]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
builtins.c (compute_objsize): Add an argument and set it to offset into destination.
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
28f4ec01 1/* Expand builtin functions.
a5544970 2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
28f4ec01 3
1322177d 4This file is part of GCC.
28f4ec01 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
28f4ec01 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
28f4ec01
BS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
28f4ec01 19
25ab3b0a
RB
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
28f4ec01
BS
24#include "config.h"
25#include "system.h"
4977bab6 26#include "coretypes.h"
c7131fb2 27#include "backend.h"
957060b5
AM
28#include "target.h"
29#include "rtl.h"
c7131fb2 30#include "tree.h"
e73cf9a2 31#include "memmodel.h"
c7131fb2 32#include "gimple.h"
957060b5
AM
33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
f90aa46c 36#include "tree-vrp.h"
957060b5
AM
37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
957060b5
AM
40#include "emit-rtl.h"
41#include "recog.h"
957060b5 42#include "diagnostic-core.h"
40e23961 43#include "alias.h"
40e23961 44#include "fold-const.h"
5c1a2e63 45#include "fold-const-call.h"
cc8bea0a 46#include "gimple-ssa-warn-restrict.h"
d8a2d370
DN
47#include "stor-layout.h"
48#include "calls.h"
49#include "varasm.h"
50#include "tree-object-size.h"
d49b6e1e 51#include "realmpfr.h"
60393bbc 52#include "cfgrtl.h"
28f4ec01 53#include "except.h"
36566b39
PK
54#include "dojump.h"
55#include "explow.h"
36566b39 56#include "stmt.h"
28f4ec01 57#include "expr.h"
e78d8e51 58#include "libfuncs.h"
28f4ec01
BS
59#include "output.h"
60#include "typeclass.h"
ab393bf1 61#include "langhooks.h"
079a182e 62#include "value-prof.h"
fa19795e 63#include "builtins.h"
314e6352
ML
64#include "stringpool.h"
65#include "attribs.h"
bdea98ca 66#include "asan.h"
686ee971 67#include "internal-fn.h"
b03ff92e 68#include "case-cfn-macros.h"
44a845ca 69#include "gimple-fold.h"
ee92e7ba 70#include "intl.h"
7365279f 71#include "file-prefix-map.h" /* remap_macro_filename() */
1f62d637
TV
72#include "gomp-constants.h"
73#include "omp-general.h"
464969eb 74#include "tree-dfa.h"
81f5094d 75
fa19795e
RS
76struct target_builtins default_target_builtins;
77#if SWITCHABLE_TARGET
78struct target_builtins *this_target_builtins = &default_target_builtins;
79#endif
80
9df2c88c 81/* Define the names of the builtin function types and codes. */
5e351e96 82const char *const built_in_class_names[BUILT_IN_LAST]
9df2c88c
RK
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84
c6a912da 85#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
7e7e470f 86const char * built_in_names[(int) END_BUILTINS] =
cb1072f4
KG
87{
88#include "builtins.def"
89};
9df2c88c 90
cbf5d0e7 91/* Setup an array of builtin_info_type, make sure each element decl is
3ff5f682 92 initialized to NULL_TREE. */
cbf5d0e7 93builtin_info_type builtin_info[(int)END_BUILTINS];
3ff5f682 94
4e7d7b3d
JJ
95/* Non-zero if __builtin_constant_p should be folded right away. */
96bool force_folding_builtin_constant_p;
97
4682ae04 98static int target_char_cast (tree, char *);
435bb2a1 99static rtx get_memory_rtx (tree, tree);
4682ae04
AJ
100static int apply_args_size (void);
101static int apply_result_size (void);
4682ae04 102static rtx result_vector (int, rtx);
4682ae04
AJ
103static void expand_builtin_prefetch (tree);
104static rtx expand_builtin_apply_args (void);
105static rtx expand_builtin_apply_args_1 (void);
106static rtx expand_builtin_apply (rtx, rtx, rtx);
107static void expand_builtin_return (rtx);
108static enum type_class type_to_class (tree);
109static rtx expand_builtin_classify_type (tree);
6c7cf1f0 110static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
1b1562a5 111static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
4359dc2a 112static rtx expand_builtin_interclass_mathfn (tree, rtx);
403e54f0 113static rtx expand_builtin_sincos (tree);
4359dc2a 114static rtx expand_builtin_cexpi (tree, rtx);
1856c8dc
JH
115static rtx expand_builtin_int_roundingfn (tree, rtx);
116static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
8870e212 117static rtx expand_builtin_next_arg (void);
4682ae04
AJ
118static rtx expand_builtin_va_start (tree);
119static rtx expand_builtin_va_end (tree);
120static rtx expand_builtin_va_copy (tree);
523a59ff 121static rtx inline_expand_builtin_string_cmp (tree, rtx);
44e10129 122static rtx expand_builtin_strcmp (tree, rtx);
ef4bddc2 123static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
095a2d76 124static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
d9c5a8b9 125static rtx expand_builtin_memchr (tree, rtx);
44e10129 126static rtx expand_builtin_memcpy (tree, rtx);
671a00ee 127static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
2ff5ffb6 128 rtx target, tree exp,
03a9b90a
AS
129 memop_ret retmode,
130 bool might_overlap);
e50d56a5 131static rtx expand_builtin_memmove (tree, rtx);
671a00ee 132static rtx expand_builtin_mempcpy (tree, rtx);
2ff5ffb6 133static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
b5338fb3 134static rtx expand_builtin_strcat (tree);
44e10129 135static rtx expand_builtin_strcpy (tree, rtx);
e08341bb 136static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
ef4bddc2 137static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
e50d56a5 138static rtx expand_builtin_stpncpy (tree, rtx);
ee92e7ba 139static rtx expand_builtin_strncat (tree, rtx);
44e10129 140static rtx expand_builtin_strncpy (tree, rtx);
095a2d76 141static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
ef4bddc2
RS
142static rtx expand_builtin_memset (tree, rtx, machine_mode);
143static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
4682ae04 144static rtx expand_builtin_bzero (tree);
ef4bddc2 145static rtx expand_builtin_strlen (tree, rtx, machine_mode);
781ff3d8 146static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
b7e52782 147static rtx expand_builtin_alloca (tree);
ef4bddc2 148static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
4682ae04 149static rtx expand_builtin_frame_address (tree, tree);
db3927fb 150static tree stabilize_va_list_loc (location_t, tree, int);
4682ae04 151static rtx expand_builtin_expect (tree, rtx);
1e9168b2 152static rtx expand_builtin_expect_with_probability (tree, rtx);
4682ae04
AJ
153static tree fold_builtin_constant_p (tree);
154static tree fold_builtin_classify_type (tree);
ab996409 155static tree fold_builtin_strlen (location_t, tree, tree);
db3927fb 156static tree fold_builtin_inf (location_t, tree, int);
db3927fb 157static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
0dba7960 158static bool validate_arg (const_tree, enum tree_code code);
4682ae04 159static rtx expand_builtin_fabs (tree, rtx, rtx);
ef79730c 160static rtx expand_builtin_signbit (tree, rtx);
db3927fb 161static tree fold_builtin_memcmp (location_t, tree, tree, tree);
db3927fb
AH
162static tree fold_builtin_isascii (location_t, tree);
163static tree fold_builtin_toascii (location_t, tree);
164static tree fold_builtin_isdigit (location_t, tree);
165static tree fold_builtin_fabs (location_t, tree, tree);
166static tree fold_builtin_abs (location_t, tree, tree);
167static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
a35da91f 168 enum tree_code);
903c723b 169static tree fold_builtin_varargs (location_t, tree, tree*, int);
db3927fb 170
b5338fb3
MS
171static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
172static tree fold_builtin_strspn (location_t, tree, tree, tree);
173static tree fold_builtin_strcspn (location_t, tree, tree, tree);
6de9cd9a 174
10a0d495 175static rtx expand_builtin_object_size (tree);
ef4bddc2 176static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
10a0d495
JJ
177 enum built_in_function);
178static void maybe_emit_chk_warning (tree, enum built_in_function);
179static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
f9555f40 180static void maybe_emit_free_warning (tree);
5039610b 181static tree fold_builtin_object_size (tree, tree);
000ba23d 182
ad03a744 183unsigned HOST_WIDE_INT target_newline;
fef5a0d9 184unsigned HOST_WIDE_INT target_percent;
000ba23d
KG
185static unsigned HOST_WIDE_INT target_c;
186static unsigned HOST_WIDE_INT target_s;
edd7ae68 187char target_percent_c[3];
fef5a0d9 188char target_percent_s[3];
ad03a744 189char target_percent_s_newline[4];
ea91f957 190static tree do_mpfr_remquo (tree, tree, tree);
752b7d38 191static tree do_mpfr_lgamma_r (tree, tree, tree);
86951993 192static void expand_builtin_sync_synchronize (void);
10a0d495 193
d7f09764
DN
194/* Return true if NAME starts with __builtin_ or __sync_. */
195
0c1e7e42 196static bool
bbf7ce11 197is_builtin_name (const char *name)
48ae6c13 198{
48ae6c13
RH
199 if (strncmp (name, "__builtin_", 10) == 0)
200 return true;
201 if (strncmp (name, "__sync_", 7) == 0)
202 return true;
86951993
AM
203 if (strncmp (name, "__atomic_", 9) == 0)
204 return true;
48ae6c13
RH
205 return false;
206}
6de9cd9a 207
bbf7ce11
RAE
208/* Return true if NODE should be considered for inline expansion regardless
209 of the optimization level. This means whenever a function is invoked with
210 its "internal" name, which normally contains the prefix "__builtin". */
211
4cfe7a6c 212bool
bbf7ce11
RAE
213called_as_built_in (tree node)
214{
215 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
216 we want the name used to call the function, not the name it
217 will have. */
218 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
219 return is_builtin_name (name);
220}
221
644ffefd
MJ
222/* Compute values M and N such that M divides (address of EXP - N) and such
223 that N < M. If these numbers can be determined, store M in alignp and N in
224 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
225 *alignp and any bit-offset to *bitposp.
73f6eabc
RS
226
227 Note that the address (and thus the alignment) computed here is based
228 on the address to which a symbol resolves, whereas DECL_ALIGN is based
229 on the address at which an object is actually located. These two
230 addresses are not always the same. For example, on ARM targets,
231 the address &foo of a Thumb function foo() has the lowest bit set,
b0f4a35f 232 whereas foo() itself starts on an even address.
df96b059 233
b0f4a35f
RG
234 If ADDR_P is true we are taking the address of the memory reference EXP
235 and thus cannot rely on the access taking place. */
236
237static bool
238get_object_alignment_2 (tree exp, unsigned int *alignp,
239 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
df96b059 240{
7df9b6f1 241 poly_int64 bitsize, bitpos;
e80c2726 242 tree offset;
ef4bddc2 243 machine_mode mode;
ee45a32d 244 int unsignedp, reversep, volatilep;
eae76e53 245 unsigned int align = BITS_PER_UNIT;
644ffefd 246 bool known_alignment = false;
df96b059 247
e80c2726
RG
248 /* Get the innermost object and the constant (bitpos) and possibly
249 variable (offset) offset of the access. */
ee45a32d 250 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
25b75a48 251 &unsignedp, &reversep, &volatilep);
e80c2726
RG
252
253 /* Extract alignment information from the innermost object and
254 possibly adjust bitpos and offset. */
b0f4a35f 255 if (TREE_CODE (exp) == FUNCTION_DECL)
73f6eabc 256 {
b0f4a35f
RG
257 /* Function addresses can encode extra information besides their
258 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
259 allows the low bit to be used as a virtual bit, we know
260 that the address itself must be at least 2-byte aligned. */
261 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
262 align = 2 * BITS_PER_UNIT;
73f6eabc 263 }
b0f4a35f
RG
264 else if (TREE_CODE (exp) == LABEL_DECL)
265 ;
266 else if (TREE_CODE (exp) == CONST_DECL)
e80c2726 267 {
b0f4a35f
RG
268 /* The alignment of a CONST_DECL is determined by its initializer. */
269 exp = DECL_INITIAL (exp);
e80c2726 270 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 271 if (CONSTANT_CLASS_P (exp))
58e17cf8 272 align = targetm.constant_alignment (exp, align);
6b00e42d 273
b0f4a35f 274 known_alignment = true;
e80c2726 275 }
b0f4a35f 276 else if (DECL_P (exp))
644ffefd 277 {
b0f4a35f 278 align = DECL_ALIGN (exp);
644ffefd 279 known_alignment = true;
644ffefd 280 }
b0f4a35f
RG
281 else if (TREE_CODE (exp) == INDIRECT_REF
282 || TREE_CODE (exp) == MEM_REF
283 || TREE_CODE (exp) == TARGET_MEM_REF)
e80c2726
RG
284 {
285 tree addr = TREE_OPERAND (exp, 0);
644ffefd
MJ
286 unsigned ptr_align;
287 unsigned HOST_WIDE_INT ptr_bitpos;
4ceae7e9 288 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
644ffefd 289
4ceae7e9 290 /* If the address is explicitely aligned, handle that. */
e80c2726
RG
291 if (TREE_CODE (addr) == BIT_AND_EXPR
292 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
293 {
4ceae7e9
RB
294 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
295 ptr_bitmask *= BITS_PER_UNIT;
146ec50f 296 align = least_bit_hwi (ptr_bitmask);
e80c2726
RG
297 addr = TREE_OPERAND (addr, 0);
298 }
644ffefd 299
b0f4a35f
RG
300 known_alignment
301 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
b0f4a35f
RG
302 align = MAX (ptr_align, align);
303
4ceae7e9
RB
304 /* Re-apply explicit alignment to the bitpos. */
305 ptr_bitpos &= ptr_bitmask;
306
3c82efd9
RG
307 /* The alignment of the pointer operand in a TARGET_MEM_REF
308 has to take the variable offset parts into account. */
b0f4a35f 309 if (TREE_CODE (exp) == TARGET_MEM_REF)
1be38ccb 310 {
b0f4a35f
RG
311 if (TMR_INDEX (exp))
312 {
313 unsigned HOST_WIDE_INT step = 1;
314 if (TMR_STEP (exp))
315 step = TREE_INT_CST_LOW (TMR_STEP (exp));
146ec50f 316 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
b0f4a35f
RG
317 }
318 if (TMR_INDEX2 (exp))
319 align = BITS_PER_UNIT;
320 known_alignment = false;
1be38ccb 321 }
644ffefd 322
b0f4a35f
RG
323 /* When EXP is an actual memory reference then we can use
324 TYPE_ALIGN of a pointer indirection to derive alignment.
325 Do so only if get_pointer_alignment_1 did not reveal absolute
3c82efd9
RG
326 alignment knowledge and if using that alignment would
327 improve the situation. */
a4cf4b64 328 unsigned int talign;
3c82efd9 329 if (!addr_p && !known_alignment
a4cf4b64
RB
330 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
331 && talign > align)
332 align = talign;
3c82efd9
RG
333 else
334 {
335 /* Else adjust bitpos accordingly. */
336 bitpos += ptr_bitpos;
337 if (TREE_CODE (exp) == MEM_REF
338 || TREE_CODE (exp) == TARGET_MEM_REF)
aca52e6f 339 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
3c82efd9 340 }
e80c2726 341 }
b0f4a35f 342 else if (TREE_CODE (exp) == STRING_CST)
1be38ccb 343 {
b0f4a35f
RG
344 /* STRING_CST are the only constant objects we allow to be not
345 wrapped inside a CONST_DECL. */
346 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 347 if (CONSTANT_CLASS_P (exp))
58e17cf8 348 align = targetm.constant_alignment (exp, align);
6b00e42d 349
b0f4a35f 350 known_alignment = true;
e80c2726 351 }
e80c2726
RG
352
353 /* If there is a non-constant offset part extract the maximum
354 alignment that can prevail. */
eae76e53 355 if (offset)
e80c2726 356 {
e75fde1a 357 unsigned int trailing_zeros = tree_ctz (offset);
eae76e53 358 if (trailing_zeros < HOST_BITS_PER_INT)
e80c2726 359 {
eae76e53
JJ
360 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
361 if (inner)
362 align = MIN (align, inner);
e80c2726 363 }
e80c2726
RG
364 }
365
7df9b6f1
RS
366 /* Account for the alignment of runtime coefficients, so that the constant
367 bitpos is guaranteed to be accurate. */
368 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
369 if (alt_align != 0 && alt_align < align)
370 {
371 align = alt_align;
372 known_alignment = false;
373 }
374
b0f4a35f 375 *alignp = align;
7df9b6f1 376 *bitposp = bitpos.coeffs[0] & (align - 1);
644ffefd 377 return known_alignment;
daade206
RG
378}
379
b0f4a35f
RG
380/* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
384
385bool
386get_object_alignment_1 (tree exp, unsigned int *alignp,
387 unsigned HOST_WIDE_INT *bitposp)
388{
389 return get_object_alignment_2 (exp, alignp, bitposp, false);
390}
391
0eb77834 392/* Return the alignment in bits of EXP, an object. */
daade206
RG
393
394unsigned int
0eb77834 395get_object_alignment (tree exp)
daade206
RG
396{
397 unsigned HOST_WIDE_INT bitpos = 0;
398 unsigned int align;
399
644ffefd 400 get_object_alignment_1 (exp, &align, &bitpos);
daade206 401
e80c2726
RG
402 /* align and bitpos now specify known low bits of the pointer.
403 ptr & (align - 1) == bitpos. */
404
405 if (bitpos != 0)
146ec50f 406 align = least_bit_hwi (bitpos);
0eb77834 407 return align;
df96b059
JJ
408}
409
644ffefd
MJ
410/* For a pointer valued expression EXP compute values M and N such that M
411 divides (EXP - N) and such that N < M. If these numbers can be determined,
b0f4a35f
RG
412 store M in alignp and N in *BITPOSP and return true. Return false if
413 the results are just a conservative approximation.
28f4ec01 414
644ffefd 415 If EXP is not a pointer, false is returned too. */
28f4ec01 416
644ffefd
MJ
417bool
418get_pointer_alignment_1 (tree exp, unsigned int *alignp,
419 unsigned HOST_WIDE_INT *bitposp)
28f4ec01 420{
1be38ccb 421 STRIP_NOPS (exp);
6026b73e 422
1be38ccb 423 if (TREE_CODE (exp) == ADDR_EXPR)
b0f4a35f
RG
424 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
425 alignp, bitposp, true);
5fa79de8
RB
426 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
427 {
428 unsigned int align;
429 unsigned HOST_WIDE_INT bitpos;
430 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
431 &align, &bitpos);
432 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
433 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
434 else
435 {
436 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
437 if (trailing_zeros < HOST_BITS_PER_INT)
438 {
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 if (inner)
441 align = MIN (align, inner);
442 }
443 }
444 *alignp = align;
445 *bitposp = bitpos & (align - 1);
446 return res;
447 }
1be38ccb
RG
448 else if (TREE_CODE (exp) == SSA_NAME
449 && POINTER_TYPE_P (TREE_TYPE (exp)))
28f4ec01 450 {
644ffefd 451 unsigned int ptr_align, ptr_misalign;
1be38ccb 452 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
644ffefd
MJ
453
454 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
455 {
456 *bitposp = ptr_misalign * BITS_PER_UNIT;
457 *alignp = ptr_align * BITS_PER_UNIT;
5505978a
RB
458 /* Make sure to return a sensible alignment when the multiplication
459 by BITS_PER_UNIT overflowed. */
460 if (*alignp == 0)
461 *alignp = 1u << (HOST_BITS_PER_INT - 1);
b0f4a35f 462 /* We cannot really tell whether this result is an approximation. */
5f9a167b 463 return false;
644ffefd
MJ
464 }
465 else
87c0fb4b
RG
466 {
467 *bitposp = 0;
644ffefd
MJ
468 *alignp = BITS_PER_UNIT;
469 return false;
87c0fb4b 470 }
28f4ec01 471 }
44fabee4
RG
472 else if (TREE_CODE (exp) == INTEGER_CST)
473 {
474 *alignp = BIGGEST_ALIGNMENT;
475 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
476 & (BIGGEST_ALIGNMENT - 1));
477 return true;
478 }
1be38ccb 479
87c0fb4b 480 *bitposp = 0;
644ffefd
MJ
481 *alignp = BITS_PER_UNIT;
482 return false;
28f4ec01
BS
483}
484
87c0fb4b
RG
485/* Return the alignment in bits of EXP, a pointer valued expression.
486 The alignment returned is, by default, the alignment of the thing that
487 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
488
489 Otherwise, look at the expression to see if we can do better, i.e., if the
490 expression is actually pointing at an object whose alignment is tighter. */
491
492unsigned int
493get_pointer_alignment (tree exp)
494{
495 unsigned HOST_WIDE_INT bitpos = 0;
496 unsigned int align;
644ffefd
MJ
497
498 get_pointer_alignment_1 (exp, &align, &bitpos);
87c0fb4b
RG
499
500 /* align and bitpos now specify known low bits of the pointer.
501 ptr & (align - 1) == bitpos. */
502
503 if (bitpos != 0)
146ec50f 504 align = least_bit_hwi (bitpos);
87c0fb4b
RG
505
506 return align;
507}
508
bfb9bd47 509/* Return the number of leading non-zero elements in the sequence
1eb4547b
MS
510 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
511 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
512
bfb9bd47 513unsigned
1eb4547b
MS
514string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
515{
516 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
517
518 unsigned n;
519
520 if (eltsize == 1)
521 {
522 /* Optimize the common case of plain char. */
523 for (n = 0; n < maxelts; n++)
524 {
525 const char *elt = (const char*) ptr + n;
526 if (!*elt)
527 break;
528 }
529 }
530 else
531 {
532 for (n = 0; n < maxelts; n++)
533 {
534 const char *elt = (const char*) ptr + n * eltsize;
535 if (!memcmp (elt, "\0\0\0\0", eltsize))
536 break;
537 }
538 }
539 return n;
540}
541
6ab24ea8
MS
542/* For a call at LOC to a function FN that expects a string in the argument
543 ARG, issue a diagnostic due to it being a called with an argument
544 declared at NONSTR that is a character array with no terminating NUL. */
545
546void
547warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
548{
549 if (TREE_NO_WARNING (arg))
550 return;
551
552 loc = expansion_point_location_if_in_system_header (loc);
553
554 if (warning_at (loc, OPT_Wstringop_overflow_,
555 "%qs argument missing terminating nul", fn))
556 {
557 inform (DECL_SOURCE_LOCATION (decl),
558 "referenced argument declared here");
559 TREE_NO_WARNING (arg) = 1;
560 }
561}
562
b5338fb3
MS
563/* For a call EXPR (which may be null) that expects a string argument
564 and SRC as the argument, returns false if SRC is a character array
565 with no terminating NUL. When nonnull, BOUND is the number of
566 characters in which to expect the terminating NUL.
567 When EXPR is nonnull also issues a warning. */
568
569bool
570check_nul_terminated_array (tree expr, tree src, tree bound /* = NULL_TREE */)
571{
572 tree size;
573 bool exact;
574 tree nonstr = unterminated_array (src, &size, &exact);
575 if (!nonstr)
576 return true;
577
578 /* NONSTR refers to the non-nul terminated constant array and SIZE
579 is the constant size of the array in bytes. EXACT is true when
580 SIZE is exact. */
581
582 if (bound)
583 {
584 wide_int min, max;
585 if (TREE_CODE (bound) == INTEGER_CST)
586 min = max = wi::to_wide (bound);
587 else
588 {
589 value_range_kind rng = get_range_info (bound, &min, &max);
590 if (rng != VR_RANGE)
591 return true;
592 }
593
594 if (wi::leu_p (min, wi::to_wide (size)))
595 return true;
596 }
597
598 if (expr && !TREE_NO_WARNING (expr))
599 {
600 tree fndecl = get_callee_fndecl (expr);
601 const char *fname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
602 warn_string_no_nul (EXPR_LOCATION (expr), fname, src, nonstr);
603 }
604
605 return false;
606}
607
e08341bb
MS
608/* If EXP refers to an unterminated constant character array return
609 the declaration of the object of which the array is a member or
6c4aa5f6
MS
610 element and if SIZE is not null, set *SIZE to the size of
611 the unterminated array and set *EXACT if the size is exact or
612 clear it otherwise. Otherwise return null. */
e08341bb 613
01b0acb7 614tree
6c4aa5f6 615unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
e08341bb 616{
6c4aa5f6
MS
617 /* C_STRLEN will return NULL and set DECL in the info
618 structure if EXP references a unterminated array. */
e09aa5bd
MS
619 c_strlen_data lendata = { };
620 tree len = c_strlen (exp, 1, &lendata);
b71bbbe2 621 if (len == NULL_TREE && lendata.minlen && lendata.decl)
6c4aa5f6
MS
622 {
623 if (size)
624 {
b71bbbe2 625 len = lendata.minlen;
e09aa5bd 626 if (lendata.off)
6c4aa5f6 627 {
e09aa5bd
MS
628 /* Constant offsets are already accounted for in LENDATA.MINLEN,
629 but not in a SSA_NAME + CST expression. */
630 if (TREE_CODE (lendata.off) == INTEGER_CST)
6c4aa5f6 631 *exact = true;
e09aa5bd
MS
632 else if (TREE_CODE (lendata.off) == PLUS_EXPR
633 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
6c4aa5f6
MS
634 {
635 /* Subtract the offset from the size of the array. */
636 *exact = false;
e09aa5bd 637 tree temp = TREE_OPERAND (lendata.off, 1);
6c4aa5f6
MS
638 temp = fold_convert (ssizetype, temp);
639 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
640 }
641 else
642 *exact = false;
643 }
644 else
645 *exact = true;
646
647 *size = len;
648 }
e09aa5bd 649 return lendata.decl;
6c4aa5f6
MS
650 }
651
652 return NULL_TREE;
e08341bb
MS
653}
654
1eb4547b
MS
655/* Compute the length of a null-terminated character string or wide
656 character string handling character sizes of 1, 2, and 4 bytes.
657 TREE_STRING_LENGTH is not the right way because it evaluates to
658 the size of the character array in bytes (as opposed to characters)
659 and because it can contain a zero byte in the middle.
28f4ec01 660
f1ba665b 661 ONLY_VALUE should be nonzero if the result is not going to be emitted
88373ed0 662 into the instruction stream and zero if it is going to be expanded.
f1ba665b 663 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
ae808627 664 is returned, otherwise NULL, since
14b7950f 665 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
ae808627
JJ
666 evaluate the side-effects.
667
21e8fb22
RB
668 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
669 accesses. Note that this implies the result is not going to be emitted
670 into the instruction stream.
671
7d583f42 672 Additional information about the string accessed may be recorded
14b7950f 673 in DATA. For example, if ARG references an unterminated string,
7d583f42
JL
674 then the declaration will be stored in the DECL field. If the
675 length of the unterminated string can be determined, it'll be
676 stored in the LEN field. Note this length could well be different
677 than what a C strlen call would return.
6ab24ea8 678
4148b00d
BE
679 ELTSIZE is 1 for normal single byte character strings, and 2 or
680 4 for wide characer strings. ELTSIZE is by default 1.
fed3cef0 681
4148b00d 682 The value returned is of type `ssizetype'. */
28f4ec01 683
6de9cd9a 684tree
14b7950f 685c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
28f4ec01 686{
7d583f42
JL
687 /* If we were not passed a DATA pointer, then get one to a local
688 structure. That avoids having to check DATA for NULL before
689 each time we want to use it. */
3f46ef1f 690 c_strlen_data local_strlen_data = { };
7d583f42
JL
691 if (!data)
692 data = &local_strlen_data;
693
1ebf0641 694 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
14b7950f
MS
695
696 tree src = STRIP_NOPS (arg);
ae808627
JJ
697 if (TREE_CODE (src) == COND_EXPR
698 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
699 {
700 tree len1, len2;
701
7d583f42
JL
702 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
703 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
33521f7d 704 if (tree_int_cst_equal (len1, len2))
ae808627
JJ
705 return len1;
706 }
707
708 if (TREE_CODE (src) == COMPOUND_EXPR
709 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
7d583f42 710 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
ae808627 711
1eb4547b 712 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
59d49708 713
1eb4547b
MS
714 /* Offset from the beginning of the string in bytes. */
715 tree byteoff;
4148b00d 716 tree memsize;
6ab24ea8
MS
717 tree decl;
718 src = string_constant (src, &byteoff, &memsize, &decl);
28f4ec01 719 if (src == 0)
5039610b 720 return NULL_TREE;
fed3cef0 721
1eb4547b 722 /* Determine the size of the string element. */
4148b00d
BE
723 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
724 return NULL_TREE;
1eb4547b
MS
725
726 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
35b4d3a6 727 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
bfb9bd47
MS
728 in case the latter is less than the size of the array, such as when
729 SRC refers to a short string literal used to initialize a large array.
730 In that case, the elements of the array after the terminating NUL are
731 all NUL. */
732 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
d01b568a 733 strelts = strelts / eltsize;
bfb9bd47 734
4148b00d
BE
735 if (!tree_fits_uhwi_p (memsize))
736 return NULL_TREE;
737
d01b568a 738 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
1eb4547b
MS
739
740 /* PTR can point to the byte representation of any string type, including
741 char* and wchar_t*. */
742 const char *ptr = TREE_STRING_POINTER (src);
fed3cef0 743
1eb4547b 744 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
28f4ec01 745 {
4148b00d
BE
746 /* The code below works only for single byte character types. */
747 if (eltsize != 1)
748 return NULL_TREE;
749
bfb9bd47
MS
750 /* If the string has an internal NUL character followed by any
751 non-NUL characters (e.g., "foo\0bar"), we can't compute
752 the offset to the following NUL if we don't know where to
28f4ec01 753 start searching for it. */
bfb9bd47 754 unsigned len = string_length (ptr, eltsize, strelts);
fed3cef0 755
7d583f42
JL
756 /* Return when an embedded null character is found or none at all.
757 In the latter case, set the DECL/LEN field in the DATA structure
758 so that callers may examine them. */
6ab24ea8 759 if (len + 1 < strelts)
4148b00d 760 return NULL_TREE;
6ab24ea8
MS
761 else if (len >= maxelts)
762 {
7d583f42 763 data->decl = decl;
6c4aa5f6 764 data->off = byteoff;
b71bbbe2 765 data->minlen = ssize_int (len);
6ab24ea8
MS
766 return NULL_TREE;
767 }
c42d0aa0 768
d01b568a
BE
769 /* For empty strings the result should be zero. */
770 if (len == 0)
771 return ssize_int (0);
772
28f4ec01 773 /* We don't know the starting offset, but we do know that the string
bfb9bd47
MS
774 has no internal zero bytes. If the offset falls within the bounds
775 of the string subtract the offset from the length of the string,
776 and return that. Otherwise the length is zero. Take care to
777 use SAVE_EXPR in case the OFFSET has side-effects. */
e8bf3d5e
BE
778 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
779 : byteoff;
780 offsave = fold_convert_loc (loc, sizetype, offsave);
bfb9bd47 781 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
e8bf3d5e
BE
782 size_int (len));
783 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
784 offsave);
785 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
bfb9bd47
MS
786 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
787 build_zero_cst (ssizetype));
28f4ec01
BS
788 }
789
1eb4547b
MS
790 /* Offset from the beginning of the string in elements. */
791 HOST_WIDE_INT eltoff;
792
28f4ec01 793 /* We have a known offset into the string. Start searching there for
5197bd50 794 a null character if we can represent it as a single HOST_WIDE_INT. */
1eb4547b
MS
795 if (byteoff == 0)
796 eltoff = 0;
1ebf0641 797 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1eb4547b 798 eltoff = -1;
28f4ec01 799 else
1ebf0641 800 eltoff = tree_to_uhwi (byteoff) / eltsize;
fed3cef0 801
b2ed71b6
BE
802 /* If the offset is known to be out of bounds, warn, and call strlen at
803 runtime. */
d01b568a 804 if (eltoff < 0 || eltoff >= maxelts)
28f4ec01 805 {
1db01ff9 806 /* Suppress multiple warnings for propagated constant strings. */
3b57ff81 807 if (only_value != 2
14b7950f 808 && !TREE_NO_WARNING (arg)
1db01ff9
JJ
809 && warning_at (loc, OPT_Warray_bounds,
810 "offset %qwi outside bounds of constant string",
811 eltoff))
14b7950f
MS
812 {
813 if (decl)
814 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
815 TREE_NO_WARNING (arg) = 1;
816 }
5039610b 817 return NULL_TREE;
28f4ec01 818 }
fed3cef0 819
4148b00d
BE
820 /* If eltoff is larger than strelts but less than maxelts the
821 string length is zero, since the excess memory will be zero. */
822 if (eltoff > strelts)
823 return ssize_int (0);
824
28f4ec01
BS
825 /* Use strlen to search for the first zero byte. Since any strings
826 constructed with build_string will have nulls appended, we win even
827 if we get handed something like (char[4])"abcd".
828
1eb4547b 829 Since ELTOFF is our starting index into the string, no further
28f4ec01 830 calculation is needed. */
1eb4547b 831 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
4148b00d 832 strelts - eltoff);
1eb4547b 833
d01b568a 834 /* Don't know what to return if there was no zero termination.
7d583f42
JL
835 Ideally this would turn into a gcc_checking_assert over time.
836 Set DECL/LEN so callers can examine them. */
d01b568a 837 if (len >= maxelts - eltoff)
6ab24ea8 838 {
7d583f42 839 data->decl = decl;
6c4aa5f6 840 data->off = byteoff;
b71bbbe2 841 data->minlen = ssize_int (len);
6ab24ea8
MS
842 return NULL_TREE;
843 }
1ebf0641 844
1eb4547b 845 return ssize_int (len);
28f4ec01
BS
846}
847
807e902e 848/* Return a constant integer corresponding to target reading
3140b2ed
JJ
849 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
850 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
851 are assumed to be zero, otherwise it reads as many characters
852 as needed. */
853
854rtx
855c_readstr (const char *str, scalar_int_mode mode,
856 bool null_terminated_p/*=true*/)
57814e5e 857{
57814e5e
JJ
858 HOST_WIDE_INT ch;
859 unsigned int i, j;
807e902e 860 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
57814e5e 861
298e6adc 862 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
807e902e
KZ
863 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
864 / HOST_BITS_PER_WIDE_INT;
865
866 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
867 for (i = 0; i < len; i++)
868 tmp[i] = 0;
5906d013 869
57814e5e
JJ
870 ch = 1;
871 for (i = 0; i < GET_MODE_SIZE (mode); i++)
872 {
873 j = i;
874 if (WORDS_BIG_ENDIAN)
875 j = GET_MODE_SIZE (mode) - i - 1;
876 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
e046112d 877 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
57814e5e
JJ
878 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
879 j *= BITS_PER_UNIT;
5906d013 880
3140b2ed 881 if (ch || !null_terminated_p)
57814e5e 882 ch = (unsigned char) str[i];
807e902e 883 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
57814e5e 884 }
807e902e
KZ
885
886 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
887 return immed_wide_int_const (c, mode);
57814e5e
JJ
888}
889
ab937357 890/* Cast a target constant CST to target CHAR and if that value fits into
206048bd 891 host char type, return zero and put that value into variable pointed to by
ab937357
JJ
892 P. */
893
894static int
4682ae04 895target_char_cast (tree cst, char *p)
ab937357
JJ
896{
897 unsigned HOST_WIDE_INT val, hostval;
898
de77ab75 899 if (TREE_CODE (cst) != INTEGER_CST
ab937357
JJ
900 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
901 return 1;
902
807e902e 903 /* Do not care if it fits or not right here. */
de77ab75 904 val = TREE_INT_CST_LOW (cst);
807e902e 905
ab937357 906 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
fecfbfa4 907 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ab937357
JJ
908
909 hostval = val;
910 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
fecfbfa4 911 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ab937357
JJ
912
913 if (val != hostval)
914 return 1;
915
916 *p = hostval;
917 return 0;
918}
919
6de9cd9a
DN
920/* Similar to save_expr, but assumes that arbitrary code is not executed
921 in between the multiple evaluations. In particular, we assume that a
922 non-addressable local variable will not be modified. */
923
924static tree
925builtin_save_expr (tree exp)
926{
5cbf5c20
RG
927 if (TREE_CODE (exp) == SSA_NAME
928 || (TREE_ADDRESSABLE (exp) == 0
929 && (TREE_CODE (exp) == PARM_DECL
8813a647 930 || (VAR_P (exp) && !TREE_STATIC (exp)))))
6de9cd9a
DN
931 return exp;
932
933 return save_expr (exp);
934}
935
28f4ec01
BS
936/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
937 times to get the address of either a higher stack frame, or a return
938 address located within it (depending on FNDECL_CODE). */
fed3cef0 939
54e62799 940static rtx
c6d01079 941expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
28f4ec01
BS
942{
943 int i;
c6d01079 944 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
579f2946 945 if (tem == NULL_RTX)
c8f27794 946 {
579f2946
TS
947 /* For a zero count with __builtin_return_address, we don't care what
948 frame address we return, because target-specific definitions will
949 override us. Therefore frame pointer elimination is OK, and using
950 the soft frame pointer is OK.
951
952 For a nonzero count, or a zero count with __builtin_frame_address,
953 we require a stable offset from the current frame pointer to the
954 previous one, so we must use the hard frame pointer, and
955 we must disable frame pointer elimination. */
956 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
957 tem = frame_pointer_rtx;
958 else
959 {
960 tem = hard_frame_pointer_rtx;
c8f27794 961
579f2946
TS
962 /* Tell reload not to eliminate the frame pointer. */
963 crtl->accesses_prior_frames = 1;
964 }
c8f27794 965 }
c6d01079 966
28f4ec01
BS
967 if (count > 0)
968 SETUP_FRAME_ADDRESSES ();
28f4ec01 969
224869d9 970 /* On the SPARC, the return address is not in the frame, it is in a
28f4ec01
BS
971 register. There is no way to access it off of the current frame
972 pointer, but it can be accessed off the previous frame pointer by
973 reading the value from the register window save area. */
2e612c47 974 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
28f4ec01 975 count--;
28f4ec01
BS
976
977 /* Scan back COUNT frames to the specified frame. */
978 for (i = 0; i < count; i++)
979 {
980 /* Assume the dynamic chain pointer is in the word that the
981 frame address points to, unless otherwise specified. */
28f4ec01 982 tem = DYNAMIC_CHAIN_ADDRESS (tem);
28f4ec01 983 tem = memory_address (Pmode, tem);
bf877a76 984 tem = gen_frame_mem (Pmode, tem);
432fd734 985 tem = copy_to_reg (tem);
28f4ec01
BS
986 }
987
224869d9
EB
988 /* For __builtin_frame_address, return what we've got. But, on
989 the SPARC for example, we may have to add a bias. */
28f4ec01 990 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
224869d9 991 return FRAME_ADDR_RTX (tem);
28f4ec01 992
224869d9 993 /* For __builtin_return_address, get the return address from that frame. */
28f4ec01
BS
994#ifdef RETURN_ADDR_RTX
995 tem = RETURN_ADDR_RTX (count, tem);
996#else
997 tem = memory_address (Pmode,
0a81f074 998 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
bf877a76 999 tem = gen_frame_mem (Pmode, tem);
28f4ec01
BS
1000#endif
1001 return tem;
1002}
1003
3bdf5ad1 1004/* Alias set used for setjmp buffer. */
4862826d 1005static alias_set_type setjmp_alias_set = -1;
3bdf5ad1 1006
250d07b6 1007/* Construct the leading half of a __builtin_setjmp call. Control will
4f6c2131
EB
1008 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1009 exception handling code. */
28f4ec01 1010
250d07b6 1011void
4682ae04 1012expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
28f4ec01 1013{
ef4bddc2 1014 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 1015 rtx stack_save;
3bdf5ad1 1016 rtx mem;
28f4ec01 1017
3bdf5ad1
RK
1018 if (setjmp_alias_set == -1)
1019 setjmp_alias_set = new_alias_set ();
1020
5ae6cd0d 1021 buf_addr = convert_memory_address (Pmode, buf_addr);
28f4ec01 1022
7d505b82 1023 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
28f4ec01 1024
250d07b6
RH
1025 /* We store the frame pointer and the address of receiver_label in
1026 the buffer and use the rest of it for the stack save area, which
1027 is machine-dependent. */
28f4ec01 1028
3bdf5ad1 1029 mem = gen_rtx_MEM (Pmode, buf_addr);
ba4828e0 1030 set_mem_alias_set (mem, setjmp_alias_set);
25403c41 1031 emit_move_insn (mem, hard_frame_pointer_rtx);
3bdf5ad1 1032
0a81f074
RS
1033 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1034 GET_MODE_SIZE (Pmode))),
ba4828e0 1035 set_mem_alias_set (mem, setjmp_alias_set);
3bdf5ad1
RK
1036
1037 emit_move_insn (validize_mem (mem),
250d07b6 1038 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
28f4ec01
BS
1039
1040 stack_save = gen_rtx_MEM (sa_mode,
0a81f074 1041 plus_constant (Pmode, buf_addr,
28f4ec01 1042 2 * GET_MODE_SIZE (Pmode)));
ba4828e0 1043 set_mem_alias_set (stack_save, setjmp_alias_set);
9eac0f2a 1044 emit_stack_save (SAVE_NONLOCAL, &stack_save);
28f4ec01
BS
1045
1046 /* If there is further processing to do, do it. */
95a3fb9d
RS
1047 if (targetm.have_builtin_setjmp_setup ())
1048 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
28f4ec01 1049
ecaebb9e 1050 /* We have a nonlocal label. */
e3b5732b 1051 cfun->has_nonlocal_label = 1;
250d07b6 1052}
28f4ec01 1053
4f6c2131 1054/* Construct the trailing part of a __builtin_setjmp call. This is
e90d1568
HPN
1055 also called directly by the SJLJ exception handling code.
1056 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
250d07b6
RH
1057
1058void
95a3fb9d 1059expand_builtin_setjmp_receiver (rtx receiver_label)
250d07b6 1060{
531ca746
RH
1061 rtx chain;
1062
e90d1568 1063 /* Mark the FP as used when we get here, so we have to make sure it's
28f4ec01 1064 marked as used by this function. */
c41c1387 1065 emit_use (hard_frame_pointer_rtx);
28f4ec01
BS
1066
1067 /* Mark the static chain as clobbered here so life information
1068 doesn't get messed up for it. */
4b522b8f 1069 chain = rtx_for_static_chain (current_function_decl, true);
531ca746
RH
1070 if (chain && REG_P (chain))
1071 emit_clobber (chain);
28f4ec01 1072
38b0b093 1073 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
28f4ec01 1074 {
e90d1568
HPN
1075 /* If the argument pointer can be eliminated in favor of the
1076 frame pointer, we don't need to restore it. We assume here
1077 that if such an elimination is present, it can always be used.
1078 This is the case on all known machines; if we don't make this
1079 assumption, we do unnecessary saving on many machines. */
28f4ec01 1080 size_t i;
8b60264b 1081 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
28f4ec01 1082
b6a1cbae 1083 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
28f4ec01
BS
1084 if (elim_regs[i].from == ARG_POINTER_REGNUM
1085 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1086 break;
1087
b6a1cbae 1088 if (i == ARRAY_SIZE (elim_regs))
28f4ec01
BS
1089 {
1090 /* Now restore our arg pointer from the address at which it
278ed218 1091 was saved in our stack frame. */
2e3f842f 1092 emit_move_insn (crtl->args.internal_arg_pointer,
bd60bab2 1093 copy_to_reg (get_arg_pointer_save_area ()));
28f4ec01
BS
1094 }
1095 }
28f4ec01 1096
95a3fb9d
RS
1097 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1098 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1099 else if (targetm.have_nonlocal_goto_receiver ())
1100 emit_insn (targetm.gen_nonlocal_goto_receiver ());
28f4ec01 1101 else
95a3fb9d 1102 { /* Nothing */ }
bcd7edfe 1103
6fb5fa3c
DB
1104 /* We must not allow the code we just generated to be reordered by
1105 scheduling. Specifically, the update of the frame pointer must
f1257268 1106 happen immediately, not later. */
6fb5fa3c 1107 emit_insn (gen_blockage ());
250d07b6 1108}
28f4ec01 1109
28f4ec01
BS
1110/* __builtin_longjmp is passed a pointer to an array of five words (not
1111 all will be used on all machines). It operates similarly to the C
1112 library function of the same name, but is more efficient. Much of
4f6c2131 1113 the code below is copied from the handling of non-local gotos. */
28f4ec01 1114
54e62799 1115static void
4682ae04 1116expand_builtin_longjmp (rtx buf_addr, rtx value)
28f4ec01 1117{
58f4cf2a
DM
1118 rtx fp, lab, stack;
1119 rtx_insn *insn, *last;
ef4bddc2 1120 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 1121
b8698a0f 1122 /* DRAP is needed for stack realign if longjmp is expanded to current
2e3f842f
L
1123 function */
1124 if (SUPPORTS_STACK_ALIGNMENT)
1125 crtl->need_drap = true;
1126
3bdf5ad1
RK
1127 if (setjmp_alias_set == -1)
1128 setjmp_alias_set = new_alias_set ();
1129
5ae6cd0d 1130 buf_addr = convert_memory_address (Pmode, buf_addr);
4b6c1672 1131
28f4ec01
BS
1132 buf_addr = force_reg (Pmode, buf_addr);
1133
531ca746
RH
1134 /* We require that the user must pass a second argument of 1, because
1135 that is what builtin_setjmp will return. */
298e6adc 1136 gcc_assert (value == const1_rtx);
28f4ec01 1137
d337d653 1138 last = get_last_insn ();
95a3fb9d
RS
1139 if (targetm.have_builtin_longjmp ())
1140 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
28f4ec01 1141 else
28f4ec01
BS
1142 {
1143 fp = gen_rtx_MEM (Pmode, buf_addr);
0a81f074 1144 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
28f4ec01
BS
1145 GET_MODE_SIZE (Pmode)));
1146
0a81f074 1147 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
28f4ec01 1148 2 * GET_MODE_SIZE (Pmode)));
ba4828e0
RK
1149 set_mem_alias_set (fp, setjmp_alias_set);
1150 set_mem_alias_set (lab, setjmp_alias_set);
1151 set_mem_alias_set (stack, setjmp_alias_set);
28f4ec01
BS
1152
1153 /* Pick up FP, label, and SP from the block and jump. This code is
1154 from expand_goto in stmt.c; see there for detailed comments. */
95a3fb9d 1155 if (targetm.have_nonlocal_goto ())
28f4ec01
BS
1156 /* We have to pass a value to the nonlocal_goto pattern that will
1157 get copied into the static_chain pointer, but it does not matter
1158 what that value is, because builtin_setjmp does not use it. */
95a3fb9d 1159 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
28f4ec01 1160 else
28f4ec01 1161 {
c41c1387
RS
1162 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1163 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
41439bf6 1164
511ed59d
WD
1165 lab = copy_to_reg (lab);
1166
71b14428
WD
1167 /* Restore the frame pointer and stack pointer. We must use a
1168 temporary since the setjmp buffer may be a local. */
1169 fp = copy_to_reg (fp);
9eac0f2a 1170 emit_stack_restore (SAVE_NONLOCAL, stack);
511ed59d
WD
1171
1172 /* Ensure the frame pointer move is not optimized. */
1173 emit_insn (gen_blockage ());
1174 emit_clobber (hard_frame_pointer_rtx);
1175 emit_clobber (frame_pointer_rtx);
71b14428 1176 emit_move_insn (hard_frame_pointer_rtx, fp);
28f4ec01 1177
c41c1387
RS
1178 emit_use (hard_frame_pointer_rtx);
1179 emit_use (stack_pointer_rtx);
28f4ec01
BS
1180 emit_indirect_jump (lab);
1181 }
1182 }
4b01bd16
RH
1183
1184 /* Search backwards and mark the jump insn as a non-local goto.
1185 Note that this precludes the use of __builtin_longjmp to a
1186 __builtin_setjmp target in the same function. However, we've
1187 already cautioned the user that these functions are for
1188 internal exception handling use only. */
8206fc89
AM
1189 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1190 {
298e6adc 1191 gcc_assert (insn != last);
5906d013 1192
4b4bf941 1193 if (JUMP_P (insn))
8206fc89 1194 {
65c5f2a6 1195 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
8206fc89
AM
1196 break;
1197 }
4b4bf941 1198 else if (CALL_P (insn))
ca7fd9cd 1199 break;
8206fc89 1200 }
28f4ec01
BS
1201}
1202
862d0b35
DN
1203static inline bool
1204more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1205{
1206 return (iter->i < iter->n);
1207}
1208
1209/* This function validates the types of a function call argument list
1210 against a specified list of tree_codes. If the last specifier is a 0,
474da67e 1211 that represents an ellipsis, otherwise the last specifier must be a
862d0b35
DN
1212 VOID_TYPE. */
1213
1214static bool
1215validate_arglist (const_tree callexpr, ...)
1216{
1217 enum tree_code code;
1218 bool res = 0;
1219 va_list ap;
1220 const_call_expr_arg_iterator iter;
1221 const_tree arg;
1222
1223 va_start (ap, callexpr);
1224 init_const_call_expr_arg_iterator (callexpr, &iter);
1225
474da67e 1226 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
0dba7960
JJ
1227 tree fn = CALL_EXPR_FN (callexpr);
1228 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
474da67e
MS
1229
1230 for (unsigned argno = 1; ; ++argno)
862d0b35
DN
1231 {
1232 code = (enum tree_code) va_arg (ap, int);
474da67e 1233
862d0b35
DN
1234 switch (code)
1235 {
1236 case 0:
1237 /* This signifies an ellipses, any further arguments are all ok. */
1238 res = true;
1239 goto end;
1240 case VOID_TYPE:
1241 /* This signifies an endlink, if no arguments remain, return
1242 true, otherwise return false. */
1243 res = !more_const_call_expr_args_p (&iter);
1244 goto end;
474da67e
MS
1245 case POINTER_TYPE:
1246 /* The actual argument must be nonnull when either the whole
1247 called function has been declared nonnull, or when the formal
1248 argument corresponding to the actual argument has been. */
0dba7960
JJ
1249 if (argmap
1250 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1251 {
1252 arg = next_const_call_expr_arg (&iter);
1253 if (!validate_arg (arg, code) || integer_zerop (arg))
1254 goto end;
1255 break;
1256 }
474da67e 1257 /* FALLTHRU */
862d0b35
DN
1258 default:
1259 /* If no parameters remain or the parameter's code does not
1260 match the specified code, return false. Otherwise continue
1261 checking any remaining arguments. */
1262 arg = next_const_call_expr_arg (&iter);
0dba7960 1263 if (!validate_arg (arg, code))
862d0b35
DN
1264 goto end;
1265 break;
1266 }
1267 }
862d0b35
DN
1268
1269 /* We need gotos here since we can only have one VA_CLOSE in a
1270 function. */
1271 end: ;
1272 va_end (ap);
1273
474da67e
MS
1274 BITMAP_FREE (argmap);
1275
862d0b35
DN
1276 return res;
1277}
1278
6de9cd9a
DN
1279/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1280 and the address of the save area. */
1281
1282static rtx
5039610b 1283expand_builtin_nonlocal_goto (tree exp)
6de9cd9a
DN
1284{
1285 tree t_label, t_save_area;
58f4cf2a
DM
1286 rtx r_label, r_save_area, r_fp, r_sp;
1287 rtx_insn *insn;
6de9cd9a 1288
5039610b 1289 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
1290 return NULL_RTX;
1291
5039610b
SL
1292 t_label = CALL_EXPR_ARG (exp, 0);
1293 t_save_area = CALL_EXPR_ARG (exp, 1);
6de9cd9a 1294
84217346 1295 r_label = expand_normal (t_label);
5e89a381 1296 r_label = convert_memory_address (Pmode, r_label);
84217346 1297 r_save_area = expand_normal (t_save_area);
5e89a381 1298 r_save_area = convert_memory_address (Pmode, r_save_area);
bc6d3f91
EB
1299 /* Copy the address of the save location to a register just in case it was
1300 based on the frame pointer. */
cba2d79f 1301 r_save_area = copy_to_reg (r_save_area);
6de9cd9a
DN
1302 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1303 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
0a81f074
RS
1304 plus_constant (Pmode, r_save_area,
1305 GET_MODE_SIZE (Pmode)));
6de9cd9a 1306
e3b5732b 1307 crtl->has_nonlocal_goto = 1;
6de9cd9a 1308
6de9cd9a 1309 /* ??? We no longer need to pass the static chain value, afaik. */
95a3fb9d
RS
1310 if (targetm.have_nonlocal_goto ())
1311 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
6de9cd9a 1312 else
6de9cd9a 1313 {
c41c1387
RS
1314 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1315 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
caf93cb0 1316
511ed59d
WD
1317 r_label = copy_to_reg (r_label);
1318
71b14428
WD
1319 /* Restore the frame pointer and stack pointer. We must use a
1320 temporary since the setjmp buffer may be a local. */
1321 r_fp = copy_to_reg (r_fp);
9eac0f2a 1322 emit_stack_restore (SAVE_NONLOCAL, r_sp);
511ed59d
WD
1323
1324 /* Ensure the frame pointer move is not optimized. */
1325 emit_insn (gen_blockage ());
1326 emit_clobber (hard_frame_pointer_rtx);
1327 emit_clobber (frame_pointer_rtx);
71b14428 1328 emit_move_insn (hard_frame_pointer_rtx, r_fp);
caf93cb0 1329
6de9cd9a
DN
1330 /* USE of hard_frame_pointer_rtx added for consistency;
1331 not clear if really needed. */
c41c1387
RS
1332 emit_use (hard_frame_pointer_rtx);
1333 emit_use (stack_pointer_rtx);
eae645b6
RS
1334
1335 /* If the architecture is using a GP register, we must
1336 conservatively assume that the target function makes use of it.
1337 The prologue of functions with nonlocal gotos must therefore
1338 initialize the GP register to the appropriate value, and we
1339 must then make sure that this value is live at the point
1340 of the jump. (Note that this doesn't necessarily apply
1341 to targets with a nonlocal_goto pattern; they are free
1342 to implement it in their own way. Note also that this is
1343 a no-op if the GP register is a global invariant.) */
959c1e20
AH
1344 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1345 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
c41c1387 1346 emit_use (pic_offset_table_rtx);
eae645b6 1347
6de9cd9a
DN
1348 emit_indirect_jump (r_label);
1349 }
caf93cb0 1350
6de9cd9a
DN
1351 /* Search backwards to the jump insn and mark it as a
1352 non-local goto. */
1353 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1354 {
4b4bf941 1355 if (JUMP_P (insn))
6de9cd9a 1356 {
65c5f2a6 1357 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
6de9cd9a
DN
1358 break;
1359 }
4b4bf941 1360 else if (CALL_P (insn))
6de9cd9a
DN
1361 break;
1362 }
1363
1364 return const0_rtx;
1365}
1366
2b92e7f5
RK
1367/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1368 (not all will be used on all machines) that was passed to __builtin_setjmp.
d33606c3
EB
1369 It updates the stack pointer in that block to the current value. This is
1370 also called directly by the SJLJ exception handling code. */
2b92e7f5 1371
d33606c3 1372void
2b92e7f5
RK
1373expand_builtin_update_setjmp_buf (rtx buf_addr)
1374{
ef4bddc2 1375 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
4887028b 1376 buf_addr = convert_memory_address (Pmode, buf_addr);
bc6d3f91 1377 rtx stack_save
2b92e7f5
RK
1378 = gen_rtx_MEM (sa_mode,
1379 memory_address
1380 (sa_mode,
0a81f074
RS
1381 plus_constant (Pmode, buf_addr,
1382 2 * GET_MODE_SIZE (Pmode))));
2b92e7f5 1383
9eac0f2a 1384 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2b92e7f5
RK
1385}
1386
a9ccbb60
JJ
1387/* Expand a call to __builtin_prefetch. For a target that does not support
1388 data prefetch, evaluate the memory address argument in case it has side
1389 effects. */
1390
1391static void
5039610b 1392expand_builtin_prefetch (tree exp)
a9ccbb60
JJ
1393{
1394 tree arg0, arg1, arg2;
5039610b 1395 int nargs;
a9ccbb60
JJ
1396 rtx op0, op1, op2;
1397
5039610b 1398 if (!validate_arglist (exp, POINTER_TYPE, 0))
e83d297b
JJ
1399 return;
1400
5039610b
SL
1401 arg0 = CALL_EXPR_ARG (exp, 0);
1402
e83d297b
JJ
1403 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1404 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1405 locality). */
5039610b
SL
1406 nargs = call_expr_nargs (exp);
1407 if (nargs > 1)
1408 arg1 = CALL_EXPR_ARG (exp, 1);
e83d297b 1409 else
5039610b
SL
1410 arg1 = integer_zero_node;
1411 if (nargs > 2)
1412 arg2 = CALL_EXPR_ARG (exp, 2);
1413 else
9a9d280e 1414 arg2 = integer_three_node;
a9ccbb60
JJ
1415
1416 /* Argument 0 is an address. */
1417 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1418
1419 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1420 if (TREE_CODE (arg1) != INTEGER_CST)
1421 {
40b97a2e 1422 error ("second argument to %<__builtin_prefetch%> must be a constant");
ca7fd9cd 1423 arg1 = integer_zero_node;
a9ccbb60 1424 }
84217346 1425 op1 = expand_normal (arg1);
a9ccbb60
JJ
1426 /* Argument 1 must be either zero or one. */
1427 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1428 {
d4ee4d25 1429 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
40b97a2e 1430 " using zero");
a9ccbb60
JJ
1431 op1 = const0_rtx;
1432 }
1433
1434 /* Argument 2 (locality) must be a compile-time constant int. */
1435 if (TREE_CODE (arg2) != INTEGER_CST)
1436 {
40b97a2e 1437 error ("third argument to %<__builtin_prefetch%> must be a constant");
a9ccbb60
JJ
1438 arg2 = integer_zero_node;
1439 }
84217346 1440 op2 = expand_normal (arg2);
a9ccbb60
JJ
1441 /* Argument 2 must be 0, 1, 2, or 3. */
1442 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1443 {
d4ee4d25 1444 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
a9ccbb60
JJ
1445 op2 = const0_rtx;
1446 }
1447
134b044d 1448 if (targetm.have_prefetch ())
a9ccbb60 1449 {
99b1c316 1450 class expand_operand ops[3];
a5c7d693
RS
1451
1452 create_address_operand (&ops[0], op0);
1453 create_integer_operand (&ops[1], INTVAL (op1));
1454 create_integer_operand (&ops[2], INTVAL (op2));
134b044d 1455 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
a5c7d693 1456 return;
a9ccbb60 1457 }
ad76cef8 1458
5ab2f7b7
KH
1459 /* Don't do anything with direct references to volatile memory, but
1460 generate code to handle other side effects. */
3c0cb5de 1461 if (!MEM_P (op0) && side_effects_p (op0))
5ab2f7b7 1462 emit_insn (op0);
a9ccbb60
JJ
1463}
1464
3bdf5ad1 1465/* Get a MEM rtx for expression EXP which is the address of an operand
76715c32 1466 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
435bb2a1
JJ
1467 the maximum length of the block of memory that might be accessed or
1468 NULL if unknown. */
3bdf5ad1 1469
28f4ec01 1470static rtx
435bb2a1 1471get_memory_rtx (tree exp, tree len)
28f4ec01 1472{
805903b5
JJ
1473 tree orig_exp = exp;
1474 rtx addr, mem;
805903b5
JJ
1475
1476 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1477 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1478 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1479 exp = TREE_OPERAND (exp, 0);
1480
1481 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1482 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
8ac61af7 1483
3bdf5ad1 1484 /* Get an expression we can use to find the attributes to assign to MEM.
625ed172 1485 First remove any nops. */
1043771b 1486 while (CONVERT_EXPR_P (exp)
3bdf5ad1
RK
1487 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1488 exp = TREE_OPERAND (exp, 0);
1489
625ed172
MM
1490 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1491 (as builtin stringops may alias with anything). */
1492 exp = fold_build2 (MEM_REF,
1493 build_array_type (char_type_node,
1494 build_range_type (sizetype,
1495 size_one_node, len)),
1496 exp, build_int_cst (ptr_type_node, 0));
1497
1498 /* If the MEM_REF has no acceptable address, try to get the base object
1499 from the original address we got, and build an all-aliasing
1500 unknown-sized access to that one. */
1501 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1502 set_mem_attributes (mem, exp, 0);
1503 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1504 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1505 0))))
343fb412 1506 {
625ed172
MM
1507 exp = build_fold_addr_expr (exp);
1508 exp = fold_build2 (MEM_REF,
1509 build_array_type (char_type_node,
1510 build_range_type (sizetype,
1511 size_zero_node,
1512 NULL)),
1513 exp, build_int_cst (ptr_type_node, 0));
931e6c29 1514 set_mem_attributes (mem, exp, 0);
343fb412 1515 }
625ed172 1516 set_mem_alias_set (mem, 0);
28f4ec01
BS
1517 return mem;
1518}
1519\f
1520/* Built-in functions to perform an untyped call and return. */
1521
fa19795e
RS
1522#define apply_args_mode \
1523 (this_target_builtins->x_apply_args_mode)
1524#define apply_result_mode \
1525 (this_target_builtins->x_apply_result_mode)
28f4ec01 1526
28f4ec01
BS
1527/* Return the size required for the block returned by __builtin_apply_args,
1528 and initialize apply_args_mode. */
1529
1530static int
4682ae04 1531apply_args_size (void)
28f4ec01
BS
1532{
1533 static int size = -1;
cbf5468f
AH
1534 int align;
1535 unsigned int regno;
28f4ec01
BS
1536
1537 /* The values computed by this function never change. */
1538 if (size < 0)
1539 {
1540 /* The first value is the incoming arg-pointer. */
1541 size = GET_MODE_SIZE (Pmode);
1542
1543 /* The second value is the structure value address unless this is
1544 passed as an "invisible" first argument. */
92f6864c 1545 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1546 size += GET_MODE_SIZE (Pmode);
1547
1548 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1549 if (FUNCTION_ARG_REGNO_P (regno))
1550 {
b660eccf 1551 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
33521f7d 1552
298e6adc 1553 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1554
1555 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1556 if (size % align != 0)
1557 size = CEIL (size, align) * align;
28f4ec01
BS
1558 size += GET_MODE_SIZE (mode);
1559 apply_args_mode[regno] = mode;
1560 }
1561 else
1562 {
b660eccf 1563 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1564 }
1565 }
1566 return size;
1567}
1568
1569/* Return the size required for the block returned by __builtin_apply,
1570 and initialize apply_result_mode. */
1571
1572static int
4682ae04 1573apply_result_size (void)
28f4ec01
BS
1574{
1575 static int size = -1;
1576 int align, regno;
28f4ec01
BS
1577
1578 /* The values computed by this function never change. */
1579 if (size < 0)
1580 {
1581 size = 0;
1582
1583 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
82f81f18 1584 if (targetm.calls.function_value_regno_p (regno))
28f4ec01 1585 {
b660eccf 1586 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
33521f7d 1587
298e6adc 1588 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1589
1590 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1591 if (size % align != 0)
1592 size = CEIL (size, align) * align;
1593 size += GET_MODE_SIZE (mode);
1594 apply_result_mode[regno] = mode;
1595 }
1596 else
b660eccf 1597 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1598
1599 /* Allow targets that use untyped_call and untyped_return to override
1600 the size so that machine-specific information can be stored here. */
1601#ifdef APPLY_RESULT_SIZE
1602 size = APPLY_RESULT_SIZE;
1603#endif
1604 }
1605 return size;
1606}
1607
28f4ec01
BS
1608/* Create a vector describing the result block RESULT. If SAVEP is true,
1609 the result block is used to save the values; otherwise it is used to
1610 restore the values. */
1611
1612static rtx
4682ae04 1613result_vector (int savep, rtx result)
28f4ec01
BS
1614{
1615 int regno, size, align, nelts;
b660eccf 1616 fixed_size_mode mode;
28f4ec01 1617 rtx reg, mem;
f883e0a7 1618 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
8d51ecf8 1619
28f4ec01
BS
1620 size = nelts = 0;
1621 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1622 if ((mode = apply_result_mode[regno]) != VOIDmode)
1623 {
1624 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1625 if (size % align != 0)
1626 size = CEIL (size, align) * align;
1627 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
f4ef873c 1628 mem = adjust_address (result, mode, size);
28f4ec01 1629 savevec[nelts++] = (savep
f7df4a84
RS
1630 ? gen_rtx_SET (mem, reg)
1631 : gen_rtx_SET (reg, mem));
28f4ec01
BS
1632 size += GET_MODE_SIZE (mode);
1633 }
1634 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1635}
28f4ec01
BS
1636
1637/* Save the state required to perform an untyped call with the same
1638 arguments as were passed to the current function. */
1639
1640static rtx
4682ae04 1641expand_builtin_apply_args_1 (void)
28f4ec01 1642{
88e541e1 1643 rtx registers, tem;
28f4ec01 1644 int size, align, regno;
b660eccf 1645 fixed_size_mode mode;
92f6864c 1646 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
28f4ec01
BS
1647
1648 /* Create a block where the arg-pointer, structure value address,
1649 and argument registers can be saved. */
1650 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1651
1652 /* Walk past the arg-pointer and structure value address. */
1653 size = GET_MODE_SIZE (Pmode);
92f6864c 1654 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1655 size += GET_MODE_SIZE (Pmode);
1656
1657 /* Save each register used in calling a function to the block. */
1658 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1659 if ((mode = apply_args_mode[regno]) != VOIDmode)
1660 {
28f4ec01
BS
1661 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1662 if (size % align != 0)
1663 size = CEIL (size, align) * align;
1664
1665 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1666
f4ef873c 1667 emit_move_insn (adjust_address (registers, mode, size), tem);
28f4ec01
BS
1668 size += GET_MODE_SIZE (mode);
1669 }
1670
1671 /* Save the arg pointer to the block. */
2e3f842f 1672 tem = copy_to_reg (crtl->args.internal_arg_pointer);
88e541e1 1673 /* We need the pointer as the caller actually passed them to us, not
ac3f5df7
HPN
1674 as we might have pretended they were passed. Make sure it's a valid
1675 operand, as emit_move_insn isn't expected to handle a PLUS. */
581edfa3
TS
1676 if (STACK_GROWS_DOWNWARD)
1677 tem
1678 = force_operand (plus_constant (Pmode, tem,
1679 crtl->args.pretend_args_size),
1680 NULL_RTX);
88e541e1 1681 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
33521f7d 1682
28f4ec01
BS
1683 size = GET_MODE_SIZE (Pmode);
1684
1685 /* Save the structure value address unless this is passed as an
1686 "invisible" first argument. */
61f71b34 1687 if (struct_incoming_value)
45309d28
ML
1688 emit_move_insn (adjust_address (registers, Pmode, size),
1689 copy_to_reg (struct_incoming_value));
28f4ec01
BS
1690
1691 /* Return the address of the block. */
1692 return copy_addr_to_reg (XEXP (registers, 0));
1693}
1694
1695/* __builtin_apply_args returns block of memory allocated on
1696 the stack into which is stored the arg pointer, structure
1697 value address, static chain, and all the registers that might
1698 possibly be used in performing a function call. The code is
1699 moved to the start of the function so the incoming values are
1700 saved. */
5197bd50 1701
28f4ec01 1702static rtx
4682ae04 1703expand_builtin_apply_args (void)
28f4ec01
BS
1704{
1705 /* Don't do __builtin_apply_args more than once in a function.
1706 Save the result of the first call and reuse it. */
1707 if (apply_args_value != 0)
1708 return apply_args_value;
1709 {
1710 /* When this function is called, it means that registers must be
1711 saved on entry to this function. So we migrate the
1712 call to the first insn of this function. */
1713 rtx temp;
28f4ec01
BS
1714
1715 start_sequence ();
1716 temp = expand_builtin_apply_args_1 ();
e67d1102 1717 rtx_insn *seq = get_insns ();
28f4ec01
BS
1718 end_sequence ();
1719
1720 apply_args_value = temp;
1721
2f937369
DM
1722 /* Put the insns after the NOTE that starts the function.
1723 If this is inside a start_sequence, make the outer-level insn
28f4ec01 1724 chain current, so the code is placed at the start of the
1f21b6f4
JJ
1725 function. If internal_arg_pointer is a non-virtual pseudo,
1726 it needs to be placed after the function that initializes
1727 that pseudo. */
28f4ec01 1728 push_topmost_sequence ();
1f21b6f4
JJ
1729 if (REG_P (crtl->args.internal_arg_pointer)
1730 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1731 emit_insn_before (seq, parm_birth_insn);
1732 else
1733 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
28f4ec01
BS
1734 pop_topmost_sequence ();
1735 return temp;
1736 }
1737}
1738
1739/* Perform an untyped call and save the state required to perform an
1740 untyped return of whatever value was returned by the given function. */
1741
1742static rtx
4682ae04 1743expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
28f4ec01
BS
1744{
1745 int size, align, regno;
b660eccf 1746 fixed_size_mode mode;
58f4cf2a
DM
1747 rtx incoming_args, result, reg, dest, src;
1748 rtx_call_insn *call_insn;
28f4ec01
BS
1749 rtx old_stack_level = 0;
1750 rtx call_fusage = 0;
92f6864c 1751 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
28f4ec01 1752
5ae6cd0d 1753 arguments = convert_memory_address (Pmode, arguments);
ce2d32cd 1754
28f4ec01
BS
1755 /* Create a block where the return registers can be saved. */
1756 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1757
28f4ec01
BS
1758 /* Fetch the arg pointer from the ARGUMENTS block. */
1759 incoming_args = gen_reg_rtx (Pmode);
ce2d32cd 1760 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
581edfa3
TS
1761 if (!STACK_GROWS_DOWNWARD)
1762 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1763 incoming_args, 0, OPTAB_LIB_WIDEN);
28f4ec01 1764
9d53e585
JM
1765 /* Push a new argument block and copy the arguments. Do not allow
1766 the (potential) memcpy call below to interfere with our stack
1767 manipulations. */
28f4ec01 1768 do_pending_stack_adjust ();
9d53e585 1769 NO_DEFER_POP;
28f4ec01 1770
f9da5064 1771 /* Save the stack with nonlocal if available. */
4476e1a0 1772 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1773 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
28f4ec01 1774 else
9eac0f2a 1775 emit_stack_save (SAVE_BLOCK, &old_stack_level);
28f4ec01 1776
316d0b19 1777 /* Allocate a block of memory onto the stack and copy the memory
d3c12306
EB
1778 arguments to the outgoing arguments address. We can pass TRUE
1779 as the 4th argument because we just saved the stack pointer
1780 and will restore it right after the call. */
9e878cf1 1781 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2e3f842f
L
1782
1783 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1784 may have already set current_function_calls_alloca to true.
1785 current_function_calls_alloca won't be set if argsize is zero,
1786 so we have to guarantee need_drap is true here. */
1787 if (SUPPORTS_STACK_ALIGNMENT)
1788 crtl->need_drap = true;
1789
316d0b19 1790 dest = virtual_outgoing_args_rtx;
581edfa3
TS
1791 if (!STACK_GROWS_DOWNWARD)
1792 {
1793 if (CONST_INT_P (argsize))
1794 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1795 else
1796 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1797 }
8ac61af7
RK
1798 dest = gen_rtx_MEM (BLKmode, dest);
1799 set_mem_align (dest, PARM_BOUNDARY);
1800 src = gen_rtx_MEM (BLKmode, incoming_args);
1801 set_mem_align (src, PARM_BOUNDARY);
44bb111a 1802 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
28f4ec01
BS
1803
1804 /* Refer to the argument block. */
1805 apply_args_size ();
1806 arguments = gen_rtx_MEM (BLKmode, arguments);
8ac61af7 1807 set_mem_align (arguments, PARM_BOUNDARY);
28f4ec01
BS
1808
1809 /* Walk past the arg-pointer and structure value address. */
1810 size = GET_MODE_SIZE (Pmode);
61f71b34 1811 if (struct_value)
28f4ec01
BS
1812 size += GET_MODE_SIZE (Pmode);
1813
1814 /* Restore each of the registers previously saved. Make USE insns
1815 for each of these registers for use in making the call. */
1816 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1817 if ((mode = apply_args_mode[regno]) != VOIDmode)
1818 {
1819 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1820 if (size % align != 0)
1821 size = CEIL (size, align) * align;
1822 reg = gen_rtx_REG (mode, regno);
f4ef873c 1823 emit_move_insn (reg, adjust_address (arguments, mode, size));
28f4ec01
BS
1824 use_reg (&call_fusage, reg);
1825 size += GET_MODE_SIZE (mode);
1826 }
1827
1828 /* Restore the structure value address unless this is passed as an
1829 "invisible" first argument. */
1830 size = GET_MODE_SIZE (Pmode);
61f71b34 1831 if (struct_value)
28f4ec01
BS
1832 {
1833 rtx value = gen_reg_rtx (Pmode);
f4ef873c 1834 emit_move_insn (value, adjust_address (arguments, Pmode, size));
61f71b34 1835 emit_move_insn (struct_value, value);
f8cfc6aa 1836 if (REG_P (struct_value))
61f71b34 1837 use_reg (&call_fusage, struct_value);
28f4ec01
BS
1838 }
1839
1840 /* All arguments and registers used for the call are set up by now! */
531ca746 1841 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
28f4ec01
BS
1842
1843 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1844 and we don't want to load it into a register as an optimization,
1845 because prepare_call_address already did it if it should be done. */
1846 if (GET_CODE (function) != SYMBOL_REF)
1847 function = memory_address (FUNCTION_MODE, function);
1848
1849 /* Generate the actual call instruction and save the return value. */
43c7dca8
RS
1850 if (targetm.have_untyped_call ())
1851 {
1852 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1853 emit_call_insn (targetm.gen_untyped_call (mem, result,
1854 result_vector (1, result)));
1855 }
58d745ec 1856 else if (targetm.have_call_value ())
28f4ec01
BS
1857 {
1858 rtx valreg = 0;
1859
1860 /* Locate the unique return register. It is not possible to
1861 express a call that sets more than one return register using
1862 call_value; use untyped_call for that. In fact, untyped_call
1863 only needs to save the return registers in the given block. */
1864 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1865 if ((mode = apply_result_mode[regno]) != VOIDmode)
1866 {
58d745ec 1867 gcc_assert (!valreg); /* have_untyped_call required. */
5906d013 1868
28f4ec01
BS
1869 valreg = gen_rtx_REG (mode, regno);
1870 }
1871
58d745ec
RS
1872 emit_insn (targetm.gen_call_value (valreg,
1873 gen_rtx_MEM (FUNCTION_MODE, function),
1874 const0_rtx, NULL_RTX, const0_rtx));
28f4ec01 1875
f4ef873c 1876 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
28f4ec01
BS
1877 }
1878 else
298e6adc 1879 gcc_unreachable ();
28f4ec01 1880
ee960939
OH
1881 /* Find the CALL insn we just emitted, and attach the register usage
1882 information. */
1883 call_insn = last_call_insn ();
1884 add_function_usage_to (call_insn, call_fusage);
28f4ec01
BS
1885
1886 /* Restore the stack. */
4476e1a0 1887 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1888 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
28f4ec01 1889 else
9eac0f2a 1890 emit_stack_restore (SAVE_BLOCK, old_stack_level);
c3284718 1891 fixup_args_size_notes (call_insn, get_last_insn (), 0);
28f4ec01 1892
9d53e585
JM
1893 OK_DEFER_POP;
1894
28f4ec01 1895 /* Return the address of the result block. */
5ae6cd0d
MM
1896 result = copy_addr_to_reg (XEXP (result, 0));
1897 return convert_memory_address (ptr_mode, result);
28f4ec01
BS
1898}
1899
1900/* Perform an untyped return. */
1901
1902static void
4682ae04 1903expand_builtin_return (rtx result)
28f4ec01
BS
1904{
1905 int size, align, regno;
b660eccf 1906 fixed_size_mode mode;
28f4ec01 1907 rtx reg;
fee3e72c 1908 rtx_insn *call_fusage = 0;
28f4ec01 1909
5ae6cd0d 1910 result = convert_memory_address (Pmode, result);
ce2d32cd 1911
28f4ec01
BS
1912 apply_result_size ();
1913 result = gen_rtx_MEM (BLKmode, result);
1914
43c7dca8 1915 if (targetm.have_untyped_return ())
28f4ec01 1916 {
43c7dca8
RS
1917 rtx vector = result_vector (0, result);
1918 emit_jump_insn (targetm.gen_untyped_return (result, vector));
28f4ec01
BS
1919 emit_barrier ();
1920 return;
1921 }
28f4ec01
BS
1922
1923 /* Restore the return value and note that each value is used. */
1924 size = 0;
1925 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1926 if ((mode = apply_result_mode[regno]) != VOIDmode)
1927 {
1928 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1929 if (size % align != 0)
1930 size = CEIL (size, align) * align;
1931 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
f4ef873c 1932 emit_move_insn (reg, adjust_address (result, mode, size));
28f4ec01
BS
1933
1934 push_to_sequence (call_fusage);
c41c1387 1935 emit_use (reg);
28f4ec01
BS
1936 call_fusage = get_insns ();
1937 end_sequence ();
1938 size += GET_MODE_SIZE (mode);
1939 }
1940
1941 /* Put the USE insns before the return. */
2f937369 1942 emit_insn (call_fusage);
28f4ec01
BS
1943
1944 /* Return whatever values was restored by jumping directly to the end
1945 of the function. */
6e3077c6 1946 expand_naked_return ();
28f4ec01
BS
1947}
1948
ad82abb8 1949/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
5197bd50 1950
ad82abb8 1951static enum type_class
4682ae04 1952type_to_class (tree type)
ad82abb8
ZW
1953{
1954 switch (TREE_CODE (type))
1955 {
1956 case VOID_TYPE: return void_type_class;
1957 case INTEGER_TYPE: return integer_type_class;
ad82abb8
ZW
1958 case ENUMERAL_TYPE: return enumeral_type_class;
1959 case BOOLEAN_TYPE: return boolean_type_class;
1960 case POINTER_TYPE: return pointer_type_class;
1961 case REFERENCE_TYPE: return reference_type_class;
1962 case OFFSET_TYPE: return offset_type_class;
1963 case REAL_TYPE: return real_type_class;
1964 case COMPLEX_TYPE: return complex_type_class;
1965 case FUNCTION_TYPE: return function_type_class;
1966 case METHOD_TYPE: return method_type_class;
1967 case RECORD_TYPE: return record_type_class;
1968 case UNION_TYPE:
1969 case QUAL_UNION_TYPE: return union_type_class;
1970 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1971 ? string_type_class : array_type_class);
ad82abb8
ZW
1972 case LANG_TYPE: return lang_type_class;
1973 default: return no_type_class;
1974 }
1975}
8d51ecf8 1976
5039610b 1977/* Expand a call EXP to __builtin_classify_type. */
5197bd50 1978
28f4ec01 1979static rtx
5039610b 1980expand_builtin_classify_type (tree exp)
28f4ec01 1981{
5039610b
SL
1982 if (call_expr_nargs (exp))
1983 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
28f4ec01
BS
1984 return GEN_INT (no_type_class);
1985}
1986
ee5fd23a
MM
1987/* This helper macro, meant to be used in mathfn_built_in below, determines
1988 which among a set of builtin math functions is appropriate for a given type
1989 mode. The `F' (float) and `L' (long double) are automatically generated
1990 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1991 types, there are additional types that are considered with 'F32', 'F64',
1992 'F128', etc. suffixes. */
b03ff92e
RS
1993#define CASE_MATHFN(MATHFN) \
1994 CASE_CFN_##MATHFN: \
1995 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1996 fcodel = BUILT_IN_##MATHFN##L ; break;
ee5fd23a
MM
1997/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1998 types. */
1999#define CASE_MATHFN_FLOATN(MATHFN) \
2000 CASE_CFN_##MATHFN: \
2001 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2002 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2003 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2004 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2005 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2006 break;
bf460eec 2007/* Similar to above, but appends _R after any F/L suffix. */
b03ff92e
RS
2008#define CASE_MATHFN_REENT(MATHFN) \
2009 case CFN_BUILT_IN_##MATHFN##_R: \
2010 case CFN_BUILT_IN_##MATHFN##F_R: \
2011 case CFN_BUILT_IN_##MATHFN##L_R: \
2012 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2013 fcodel = BUILT_IN_##MATHFN##L_R ; break;
daa027cc 2014
5c1a2e63
RS
2015/* Return a function equivalent to FN but operating on floating-point
2016 values of type TYPE, or END_BUILTINS if no such function exists.
b03ff92e
RS
2017 This is purely an operation on function codes; it does not guarantee
2018 that the target actually has an implementation of the function. */
05f41289 2019
5c1a2e63 2020static built_in_function
b03ff92e 2021mathfn_built_in_2 (tree type, combined_fn fn)
272f51a3 2022{
ee5fd23a 2023 tree mtype;
5c1a2e63 2024 built_in_function fcode, fcodef, fcodel;
ee5fd23a
MM
2025 built_in_function fcodef16 = END_BUILTINS;
2026 built_in_function fcodef32 = END_BUILTINS;
2027 built_in_function fcodef64 = END_BUILTINS;
2028 built_in_function fcodef128 = END_BUILTINS;
2029 built_in_function fcodef32x = END_BUILTINS;
2030 built_in_function fcodef64x = END_BUILTINS;
2031 built_in_function fcodef128x = END_BUILTINS;
daa027cc
KG
2032
2033 switch (fn)
2034 {
b03ff92e
RS
2035 CASE_MATHFN (ACOS)
2036 CASE_MATHFN (ACOSH)
2037 CASE_MATHFN (ASIN)
2038 CASE_MATHFN (ASINH)
2039 CASE_MATHFN (ATAN)
2040 CASE_MATHFN (ATAN2)
2041 CASE_MATHFN (ATANH)
2042 CASE_MATHFN (CBRT)
c6cfa2bf 2043 CASE_MATHFN_FLOATN (CEIL)
b03ff92e 2044 CASE_MATHFN (CEXPI)
ee5fd23a 2045 CASE_MATHFN_FLOATN (COPYSIGN)
b03ff92e
RS
2046 CASE_MATHFN (COS)
2047 CASE_MATHFN (COSH)
2048 CASE_MATHFN (DREM)
2049 CASE_MATHFN (ERF)
2050 CASE_MATHFN (ERFC)
2051 CASE_MATHFN (EXP)
2052 CASE_MATHFN (EXP10)
2053 CASE_MATHFN (EXP2)
2054 CASE_MATHFN (EXPM1)
2055 CASE_MATHFN (FABS)
2056 CASE_MATHFN (FDIM)
c6cfa2bf 2057 CASE_MATHFN_FLOATN (FLOOR)
ee5fd23a
MM
2058 CASE_MATHFN_FLOATN (FMA)
2059 CASE_MATHFN_FLOATN (FMAX)
2060 CASE_MATHFN_FLOATN (FMIN)
b03ff92e
RS
2061 CASE_MATHFN (FMOD)
2062 CASE_MATHFN (FREXP)
2063 CASE_MATHFN (GAMMA)
2064 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2065 CASE_MATHFN (HUGE_VAL)
2066 CASE_MATHFN (HYPOT)
2067 CASE_MATHFN (ILOGB)
2068 CASE_MATHFN (ICEIL)
2069 CASE_MATHFN (IFLOOR)
2070 CASE_MATHFN (INF)
2071 CASE_MATHFN (IRINT)
2072 CASE_MATHFN (IROUND)
2073 CASE_MATHFN (ISINF)
2074 CASE_MATHFN (J0)
2075 CASE_MATHFN (J1)
2076 CASE_MATHFN (JN)
2077 CASE_MATHFN (LCEIL)
2078 CASE_MATHFN (LDEXP)
2079 CASE_MATHFN (LFLOOR)
2080 CASE_MATHFN (LGAMMA)
2081 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2082 CASE_MATHFN (LLCEIL)
2083 CASE_MATHFN (LLFLOOR)
2084 CASE_MATHFN (LLRINT)
2085 CASE_MATHFN (LLROUND)
2086 CASE_MATHFN (LOG)
2087 CASE_MATHFN (LOG10)
2088 CASE_MATHFN (LOG1P)
2089 CASE_MATHFN (LOG2)
2090 CASE_MATHFN (LOGB)
2091 CASE_MATHFN (LRINT)
2092 CASE_MATHFN (LROUND)
2093 CASE_MATHFN (MODF)
2094 CASE_MATHFN (NAN)
2095 CASE_MATHFN (NANS)
c6cfa2bf 2096 CASE_MATHFN_FLOATN (NEARBYINT)
b03ff92e
RS
2097 CASE_MATHFN (NEXTAFTER)
2098 CASE_MATHFN (NEXTTOWARD)
2099 CASE_MATHFN (POW)
2100 CASE_MATHFN (POWI)
2101 CASE_MATHFN (POW10)
2102 CASE_MATHFN (REMAINDER)
2103 CASE_MATHFN (REMQUO)
c6cfa2bf
MM
2104 CASE_MATHFN_FLOATN (RINT)
2105 CASE_MATHFN_FLOATN (ROUND)
7d7b99f9 2106 CASE_MATHFN_FLOATN (ROUNDEVEN)
b03ff92e
RS
2107 CASE_MATHFN (SCALB)
2108 CASE_MATHFN (SCALBLN)
2109 CASE_MATHFN (SCALBN)
2110 CASE_MATHFN (SIGNBIT)
2111 CASE_MATHFN (SIGNIFICAND)
2112 CASE_MATHFN (SIN)
2113 CASE_MATHFN (SINCOS)
2114 CASE_MATHFN (SINH)
ee5fd23a 2115 CASE_MATHFN_FLOATN (SQRT)
b03ff92e
RS
2116 CASE_MATHFN (TAN)
2117 CASE_MATHFN (TANH)
2118 CASE_MATHFN (TGAMMA)
c6cfa2bf 2119 CASE_MATHFN_FLOATN (TRUNC)
b03ff92e
RS
2120 CASE_MATHFN (Y0)
2121 CASE_MATHFN (Y1)
2122 CASE_MATHFN (YN)
daa027cc 2123
b03ff92e
RS
2124 default:
2125 return END_BUILTINS;
2126 }
daa027cc 2127
ee5fd23a
MM
2128 mtype = TYPE_MAIN_VARIANT (type);
2129 if (mtype == double_type_node)
5c1a2e63 2130 return fcode;
ee5fd23a 2131 else if (mtype == float_type_node)
5c1a2e63 2132 return fcodef;
ee5fd23a 2133 else if (mtype == long_double_type_node)
5c1a2e63 2134 return fcodel;
ee5fd23a
MM
2135 else if (mtype == float16_type_node)
2136 return fcodef16;
2137 else if (mtype == float32_type_node)
2138 return fcodef32;
2139 else if (mtype == float64_type_node)
2140 return fcodef64;
2141 else if (mtype == float128_type_node)
2142 return fcodef128;
2143 else if (mtype == float32x_type_node)
2144 return fcodef32x;
2145 else if (mtype == float64x_type_node)
2146 return fcodef64x;
2147 else if (mtype == float128x_type_node)
2148 return fcodef128x;
daa027cc 2149 else
5c1a2e63
RS
2150 return END_BUILTINS;
2151}
2152
2153/* Return mathematic function equivalent to FN but operating directly on TYPE,
2154 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2155 otherwise use the explicit declaration. If we can't do the conversion,
2156 return null. */
2157
2158static tree
b03ff92e 2159mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
5c1a2e63
RS
2160{
2161 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2162 if (fcode2 == END_BUILTINS)
5039610b 2163 return NULL_TREE;
e79983f4
MM
2164
2165 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2166 return NULL_TREE;
2167
2168 return builtin_decl_explicit (fcode2);
272f51a3
JH
2169}
2170
b03ff92e 2171/* Like mathfn_built_in_1, but always use the implicit array. */
05f41289
KG
2172
2173tree
b03ff92e 2174mathfn_built_in (tree type, combined_fn fn)
05f41289
KG
2175{
2176 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2177}
2178
b03ff92e
RS
2179/* Like mathfn_built_in_1, but take a built_in_function and
2180 always use the implicit array. */
2181
2182tree
2183mathfn_built_in (tree type, enum built_in_function fn)
2184{
2185 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2186}
2187
686ee971
RS
2188/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2189 return its code, otherwise return IFN_LAST. Note that this function
2190 only tests whether the function is defined in internals.def, not whether
2191 it is actually available on the target. */
2192
2193internal_fn
2194associated_internal_fn (tree fndecl)
2195{
2196 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2197 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2198 switch (DECL_FUNCTION_CODE (fndecl))
2199 {
2200#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2201 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
ee5fd23a
MM
2202#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2203 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2204 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
4959a752
RS
2205#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2206 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
686ee971
RS
2207#include "internal-fn.def"
2208
2209 CASE_FLT_FN (BUILT_IN_POW10):
2210 return IFN_EXP10;
2211
2212 CASE_FLT_FN (BUILT_IN_DREM):
2213 return IFN_REMAINDER;
2214
2215 CASE_FLT_FN (BUILT_IN_SCALBN):
2216 CASE_FLT_FN (BUILT_IN_SCALBLN):
2217 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2218 return IFN_LDEXP;
2219 return IFN_LAST;
2220
2221 default:
2222 return IFN_LAST;
2223 }
2224}
2225
2226/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2227 on the current target by a call to an internal function, return the
2228 code of that internal function, otherwise return IFN_LAST. The caller
2229 is responsible for ensuring that any side-effects of the built-in
2230 call are dealt with correctly. E.g. if CALL sets errno, the caller
2231 must decide that the errno result isn't needed or make it available
2232 in some other way. */
2233
2234internal_fn
2235replacement_internal_fn (gcall *call)
2236{
2237 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2238 {
2239 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2240 if (ifn != IFN_LAST)
2241 {
2242 tree_pair types = direct_internal_fn_types (ifn, call);
d95ab70a
RS
2243 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2244 if (direct_internal_fn_supported_p (ifn, types, opt_type))
686ee971
RS
2245 return ifn;
2246 }
2247 }
2248 return IFN_LAST;
2249}
2250
1b1562a5
MM
2251/* Expand a call to the builtin trinary math functions (fma).
2252 Return NULL_RTX if a normal call should be emitted rather than expanding the
2253 function in-line. EXP is the expression that is a call to the builtin
2254 function; if convenient, the result should be placed in TARGET.
2255 SUBTARGET may be used as the target for computing one of EXP's
2256 operands. */
2257
2258static rtx
2259expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2260{
2261 optab builtin_optab;
58f4cf2a
DM
2262 rtx op0, op1, op2, result;
2263 rtx_insn *insns;
1b1562a5
MM
2264 tree fndecl = get_callee_fndecl (exp);
2265 tree arg0, arg1, arg2;
ef4bddc2 2266 machine_mode mode;
1b1562a5
MM
2267
2268 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2269 return NULL_RTX;
2270
2271 arg0 = CALL_EXPR_ARG (exp, 0);
2272 arg1 = CALL_EXPR_ARG (exp, 1);
2273 arg2 = CALL_EXPR_ARG (exp, 2);
2274
2275 switch (DECL_FUNCTION_CODE (fndecl))
2276 {
2277 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 2278 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
2279 builtin_optab = fma_optab; break;
2280 default:
2281 gcc_unreachable ();
2282 }
2283
2284 /* Make a suitable register to place result in. */
2285 mode = TYPE_MODE (TREE_TYPE (exp));
2286
2287 /* Before working hard, check whether the instruction is available. */
2288 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2289 return NULL_RTX;
2290
04b80dbb 2291 result = gen_reg_rtx (mode);
1b1562a5
MM
2292
2293 /* Always stabilize the argument list. */
2294 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2295 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2296 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2297
2298 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2299 op1 = expand_normal (arg1);
2300 op2 = expand_normal (arg2);
2301
2302 start_sequence ();
2303
04b80dbb
RS
2304 /* Compute into RESULT.
2305 Set RESULT to wherever the result comes back. */
2306 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2307 result, 0);
1b1562a5
MM
2308
2309 /* If we were unable to expand via the builtin, stop the sequence
2310 (without outputting the insns) and call to the library function
2311 with the stabilized argument list. */
04b80dbb 2312 if (result == 0)
1b1562a5
MM
2313 {
2314 end_sequence ();
2315 return expand_call (exp, target, target == const0_rtx);
2316 }
2317
2318 /* Output the entire sequence. */
2319 insns = get_insns ();
2320 end_sequence ();
2321 emit_insn (insns);
2322
04b80dbb 2323 return result;
1b1562a5
MM
2324}
2325
6c7cf1f0 2326/* Expand a call to the builtin sin and cos math functions.
5039610b 2327 Return NULL_RTX if a normal call should be emitted rather than expanding the
6c7cf1f0
UB
2328 function in-line. EXP is the expression that is a call to the builtin
2329 function; if convenient, the result should be placed in TARGET.
2330 SUBTARGET may be used as the target for computing one of EXP's
2331 operands. */
2332
2333static rtx
2334expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2335{
2336 optab builtin_optab;
58f4cf2a
DM
2337 rtx op0;
2338 rtx_insn *insns;
6c7cf1f0 2339 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2340 machine_mode mode;
5799f732 2341 tree arg;
6c7cf1f0 2342
5039610b
SL
2343 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2344 return NULL_RTX;
6c7cf1f0 2345
5039610b 2346 arg = CALL_EXPR_ARG (exp, 0);
6c7cf1f0
UB
2347
2348 switch (DECL_FUNCTION_CODE (fndecl))
2349 {
ea6a6627
VR
2350 CASE_FLT_FN (BUILT_IN_SIN):
2351 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2352 builtin_optab = sincos_optab; break;
2353 default:
298e6adc 2354 gcc_unreachable ();
6c7cf1f0
UB
2355 }
2356
2357 /* Make a suitable register to place result in. */
2358 mode = TYPE_MODE (TREE_TYPE (exp));
2359
6c7cf1f0 2360 /* Check if sincos insn is available, otherwise fallback
9cf737f8 2361 to sin or cos insn. */
947131ba 2362 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6c7cf1f0
UB
2363 switch (DECL_FUNCTION_CODE (fndecl))
2364 {
ea6a6627 2365 CASE_FLT_FN (BUILT_IN_SIN):
6c7cf1f0 2366 builtin_optab = sin_optab; break;
ea6a6627 2367 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2368 builtin_optab = cos_optab; break;
2369 default:
298e6adc 2370 gcc_unreachable ();
6c7cf1f0 2371 }
6c7cf1f0
UB
2372
2373 /* Before working hard, check whether the instruction is available. */
947131ba 2374 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6c7cf1f0 2375 {
04b80dbb 2376 rtx result = gen_reg_rtx (mode);
6c7cf1f0
UB
2377
2378 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2379 need to expand the argument again. This way, we will not perform
2380 side-effects more the once. */
5799f732 2381 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6c7cf1f0 2382
49452c07 2383 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6c7cf1f0 2384
6c7cf1f0
UB
2385 start_sequence ();
2386
04b80dbb
RS
2387 /* Compute into RESULT.
2388 Set RESULT to wherever the result comes back. */
6c7cf1f0
UB
2389 if (builtin_optab == sincos_optab)
2390 {
04b80dbb 2391 int ok;
5906d013 2392
6c7cf1f0
UB
2393 switch (DECL_FUNCTION_CODE (fndecl))
2394 {
ea6a6627 2395 CASE_FLT_FN (BUILT_IN_SIN):
04b80dbb 2396 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6c7cf1f0 2397 break;
ea6a6627 2398 CASE_FLT_FN (BUILT_IN_COS):
04b80dbb 2399 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6c7cf1f0
UB
2400 break;
2401 default:
298e6adc 2402 gcc_unreachable ();
6c7cf1f0 2403 }
04b80dbb 2404 gcc_assert (ok);
6c7cf1f0
UB
2405 }
2406 else
04b80dbb 2407 result = expand_unop (mode, builtin_optab, op0, result, 0);
6c7cf1f0 2408
04b80dbb 2409 if (result != 0)
6c7cf1f0 2410 {
6c7cf1f0
UB
2411 /* Output the entire sequence. */
2412 insns = get_insns ();
2413 end_sequence ();
2414 emit_insn (insns);
04b80dbb 2415 return result;
6c7cf1f0
UB
2416 }
2417
2418 /* If we were unable to expand via the builtin, stop the sequence
2419 (without outputting the insns) and call to the library function
2420 with the stabilized argument list. */
2421 end_sequence ();
2422 }
2423
04b80dbb 2424 return expand_call (exp, target, target == const0_rtx);
6c7cf1f0
UB
2425}
2426
44e10129
MM
2427/* Given an interclass math builtin decl FNDECL and it's argument ARG
2428 return an RTL instruction code that implements the functionality.
2429 If that isn't possible or available return CODE_FOR_nothing. */
eaee4464 2430
44e10129
MM
2431static enum insn_code
2432interclass_mathfn_icode (tree arg, tree fndecl)
eaee4464 2433{
44e10129 2434 bool errno_set = false;
2225b9f2 2435 optab builtin_optab = unknown_optab;
ef4bddc2 2436 machine_mode mode;
eaee4464
UB
2437
2438 switch (DECL_FUNCTION_CODE (fndecl))
2439 {
2440 CASE_FLT_FN (BUILT_IN_ILOGB):
903c723b
TC
2441 errno_set = true; builtin_optab = ilogb_optab; break;
2442 CASE_FLT_FN (BUILT_IN_ISINF):
2443 builtin_optab = isinf_optab; break;
2444 case BUILT_IN_ISNORMAL:
2445 case BUILT_IN_ISFINITE:
2446 CASE_FLT_FN (BUILT_IN_FINITE):
2447 case BUILT_IN_FINITED32:
2448 case BUILT_IN_FINITED64:
2449 case BUILT_IN_FINITED128:
2450 case BUILT_IN_ISINFD32:
2451 case BUILT_IN_ISINFD64:
2452 case BUILT_IN_ISINFD128:
2453 /* These builtins have no optabs (yet). */
0c8d3c2b 2454 break;
eaee4464
UB
2455 default:
2456 gcc_unreachable ();
2457 }
2458
2459 /* There's no easy way to detect the case we need to set EDOM. */
2460 if (flag_errno_math && errno_set)
44e10129 2461 return CODE_FOR_nothing;
eaee4464
UB
2462
2463 /* Optab mode depends on the mode of the input argument. */
2464 mode = TYPE_MODE (TREE_TYPE (arg));
2465
0c8d3c2b 2466 if (builtin_optab)
947131ba 2467 return optab_handler (builtin_optab, mode);
44e10129
MM
2468 return CODE_FOR_nothing;
2469}
2470
2471/* Expand a call to one of the builtin math functions that operate on
903c723b
TC
2472 floating point argument and output an integer result (ilogb, isinf,
2473 isnan, etc).
44e10129
MM
2474 Return 0 if a normal call should be emitted rather than expanding the
2475 function in-line. EXP is the expression that is a call to the builtin
4359dc2a 2476 function; if convenient, the result should be placed in TARGET. */
44e10129
MM
2477
2478static rtx
4359dc2a 2479expand_builtin_interclass_mathfn (tree exp, rtx target)
44e10129
MM
2480{
2481 enum insn_code icode = CODE_FOR_nothing;
2482 rtx op0;
2483 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2484 machine_mode mode;
44e10129
MM
2485 tree arg;
2486
2487 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2488 return NULL_RTX;
2489
2490 arg = CALL_EXPR_ARG (exp, 0);
2491 icode = interclass_mathfn_icode (arg, fndecl);
2492 mode = TYPE_MODE (TREE_TYPE (arg));
2493
eaee4464
UB
2494 if (icode != CODE_FOR_nothing)
2495 {
99b1c316 2496 class expand_operand ops[1];
58f4cf2a 2497 rtx_insn *last = get_last_insn ();
8a0b1aa4 2498 tree orig_arg = arg;
eaee4464
UB
2499
2500 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2501 need to expand the argument again. This way, we will not perform
2502 side-effects more the once. */
5799f732 2503 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
eaee4464 2504
4359dc2a 2505 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
eaee4464
UB
2506
2507 if (mode != GET_MODE (op0))
2508 op0 = convert_to_mode (mode, op0, 0);
2509
a5c7d693
RS
2510 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2511 if (maybe_legitimize_operands (icode, 0, 1, ops)
2512 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2513 return ops[0].value;
2514
8a0b1aa4
MM
2515 delete_insns_since (last);
2516 CALL_EXPR_ARG (exp, 0) = orig_arg;
eaee4464
UB
2517 }
2518
44e10129 2519 return NULL_RTX;
eaee4464
UB
2520}
2521
403e54f0 2522/* Expand a call to the builtin sincos math function.
5039610b 2523 Return NULL_RTX if a normal call should be emitted rather than expanding the
403e54f0
RG
2524 function in-line. EXP is the expression that is a call to the builtin
2525 function. */
2526
2527static rtx
2528expand_builtin_sincos (tree exp)
2529{
2530 rtx op0, op1, op2, target1, target2;
ef4bddc2 2531 machine_mode mode;
403e54f0
RG
2532 tree arg, sinp, cosp;
2533 int result;
db3927fb 2534 location_t loc = EXPR_LOCATION (exp);
ca818bd9 2535 tree alias_type, alias_off;
403e54f0 2536
5039610b
SL
2537 if (!validate_arglist (exp, REAL_TYPE,
2538 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2539 return NULL_RTX;
403e54f0 2540
5039610b
SL
2541 arg = CALL_EXPR_ARG (exp, 0);
2542 sinp = CALL_EXPR_ARG (exp, 1);
2543 cosp = CALL_EXPR_ARG (exp, 2);
403e54f0
RG
2544
2545 /* Make a suitable register to place result in. */
2546 mode = TYPE_MODE (TREE_TYPE (arg));
2547
2548 /* Check if sincos insn is available, otherwise emit the call. */
947131ba 2549 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
403e54f0
RG
2550 return NULL_RTX;
2551
2552 target1 = gen_reg_rtx (mode);
2553 target2 = gen_reg_rtx (mode);
2554
84217346 2555 op0 = expand_normal (arg);
ca818bd9
RG
2556 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2557 alias_off = build_int_cst (alias_type, 0);
2558 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2559 sinp, alias_off));
2560 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2561 cosp, alias_off));
403e54f0
RG
2562
2563 /* Compute into target1 and target2.
2564 Set TARGET to wherever the result comes back. */
2565 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2566 gcc_assert (result);
2567
2568 /* Move target1 and target2 to the memory locations indicated
2569 by op1 and op2. */
2570 emit_move_insn (op1, target1);
2571 emit_move_insn (op2, target2);
2572
2573 return const0_rtx;
2574}
2575
75c7c595
RG
2576/* Expand a call to the internal cexpi builtin to the sincos math function.
2577 EXP is the expression that is a call to the builtin function; if convenient,
4359dc2a 2578 the result should be placed in TARGET. */
75c7c595
RG
2579
2580static rtx
4359dc2a 2581expand_builtin_cexpi (tree exp, rtx target)
75c7c595
RG
2582{
2583 tree fndecl = get_callee_fndecl (exp);
75c7c595 2584 tree arg, type;
ef4bddc2 2585 machine_mode mode;
75c7c595 2586 rtx op0, op1, op2;
db3927fb 2587 location_t loc = EXPR_LOCATION (exp);
75c7c595 2588
5039610b
SL
2589 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2590 return NULL_RTX;
75c7c595 2591
5039610b 2592 arg = CALL_EXPR_ARG (exp, 0);
75c7c595
RG
2593 type = TREE_TYPE (arg);
2594 mode = TYPE_MODE (TREE_TYPE (arg));
2595
2596 /* Try expanding via a sincos optab, fall back to emitting a libcall
b54c5497
RG
2597 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2598 is only generated from sincos, cexp or if we have either of them. */
947131ba 2599 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
75c7c595
RG
2600 {
2601 op1 = gen_reg_rtx (mode);
2602 op2 = gen_reg_rtx (mode);
2603
4359dc2a 2604 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2605
2606 /* Compute into op1 and op2. */
2607 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2608 }
d33d9e47 2609 else if (targetm.libc_has_function (function_sincos))
75c7c595 2610 {
5039610b 2611 tree call, fn = NULL_TREE;
75c7c595
RG
2612 tree top1, top2;
2613 rtx op1a, op2a;
2614
2615 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2616 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
75c7c595 2617 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2618 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
75c7c595 2619 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2620 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
5039610b
SL
2621 else
2622 gcc_unreachable ();
b8698a0f 2623
9474e8ab
MM
2624 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2625 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
18ae1560
UB
2626 op1a = copy_addr_to_reg (XEXP (op1, 0));
2627 op2a = copy_addr_to_reg (XEXP (op2, 0));
75c7c595
RG
2628 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2629 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2630
75c7c595
RG
2631 /* Make sure not to fold the sincos call again. */
2632 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5039610b
SL
2633 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2634 call, 3, arg, top1, top2));
75c7c595 2635 }
b54c5497
RG
2636 else
2637 {
9d972b2d 2638 tree call, fn = NULL_TREE, narg;
b54c5497
RG
2639 tree ctype = build_complex_type (type);
2640
9d972b2d 2641 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2642 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
9d972b2d 2643 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2644 fn = builtin_decl_explicit (BUILT_IN_CEXP);
9d972b2d 2645 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2646 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
5039610b
SL
2647 else
2648 gcc_unreachable ();
34a24c11
RG
2649
2650 /* If we don't have a decl for cexp create one. This is the
2651 friendliest fallback if the user calls __builtin_cexpi
2652 without full target C99 function support. */
2653 if (fn == NULL_TREE)
2654 {
2655 tree fntype;
2656 const char *name = NULL;
2657
2658 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2659 name = "cexpf";
2660 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2661 name = "cexp";
2662 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2663 name = "cexpl";
2664
2665 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2666 fn = build_fn_decl (name, fntype);
2667 }
2668
db3927fb 2669 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
b54c5497
RG
2670 build_real (type, dconst0), arg);
2671
2672 /* Make sure not to fold the cexp call again. */
2673 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
b8698a0f 2674 return expand_expr (build_call_nary (ctype, call, 1, narg),
49452c07 2675 target, VOIDmode, EXPAND_NORMAL);
b54c5497 2676 }
75c7c595
RG
2677
2678 /* Now build the proper return type. */
2679 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2680 make_tree (TREE_TYPE (arg), op2),
2681 make_tree (TREE_TYPE (arg), op1)),
49452c07 2682 target, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2683}
2684
44e10129
MM
2685/* Conveniently construct a function call expression. FNDECL names the
2686 function to be called, N is the number of arguments, and the "..."
2687 parameters are the argument expressions. Unlike build_call_exr
2688 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2689
2690static tree
2691build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2692{
2693 va_list ap;
2694 tree fntype = TREE_TYPE (fndecl);
2695 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2696
2697 va_start (ap, n);
2698 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2699 va_end (ap);
2700 SET_EXPR_LOCATION (fn, loc);
2701 return fn;
2702}
44e10129 2703
0bfa1541
RG
2704/* Expand a call to one of the builtin rounding functions gcc defines
2705 as an extension (lfloor and lceil). As these are gcc extensions we
2706 do not need to worry about setting errno to EDOM.
d8b42d06
UB
2707 If expanding via optab fails, lower expression to (int)(floor(x)).
2708 EXP is the expression that is a call to the builtin function;
1856c8dc 2709 if convenient, the result should be placed in TARGET. */
d8b42d06
UB
2710
2711static rtx
1856c8dc 2712expand_builtin_int_roundingfn (tree exp, rtx target)
d8b42d06 2713{
c3a4177f 2714 convert_optab builtin_optab;
58f4cf2a
DM
2715 rtx op0, tmp;
2716 rtx_insn *insns;
d8b42d06 2717 tree fndecl = get_callee_fndecl (exp);
d8b42d06
UB
2718 enum built_in_function fallback_fn;
2719 tree fallback_fndecl;
ef4bddc2 2720 machine_mode mode;
968fc3b6 2721 tree arg;
d8b42d06 2722
5039610b 2723 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1c178a5a 2724 return NULL_RTX;
d8b42d06 2725
5039610b 2726 arg = CALL_EXPR_ARG (exp, 0);
d8b42d06
UB
2727
2728 switch (DECL_FUNCTION_CODE (fndecl))
2729 {
6c32ee74 2730 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
2731 CASE_FLT_FN (BUILT_IN_LCEIL):
2732 CASE_FLT_FN (BUILT_IN_LLCEIL):
f94b1661
UB
2733 builtin_optab = lceil_optab;
2734 fallback_fn = BUILT_IN_CEIL;
2735 break;
2736
6c32ee74 2737 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627
VR
2738 CASE_FLT_FN (BUILT_IN_LFLOOR):
2739 CASE_FLT_FN (BUILT_IN_LLFLOOR):
d8b42d06
UB
2740 builtin_optab = lfloor_optab;
2741 fallback_fn = BUILT_IN_FLOOR;
2742 break;
2743
2744 default:
2745 gcc_unreachable ();
2746 }
2747
2748 /* Make a suitable register to place result in. */
2749 mode = TYPE_MODE (TREE_TYPE (exp));
2750
c3a4177f 2751 target = gen_reg_rtx (mode);
d8b42d06 2752
c3a4177f
RG
2753 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2754 need to expand the argument again. This way, we will not perform
2755 side-effects more the once. */
5799f732 2756 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
d8b42d06 2757
1856c8dc 2758 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
d8b42d06 2759
c3a4177f 2760 start_sequence ();
d8b42d06 2761
c3a4177f
RG
2762 /* Compute into TARGET. */
2763 if (expand_sfix_optab (target, op0, builtin_optab))
2764 {
2765 /* Output the entire sequence. */
2766 insns = get_insns ();
d8b42d06 2767 end_sequence ();
c3a4177f
RG
2768 emit_insn (insns);
2769 return target;
d8b42d06
UB
2770 }
2771
c3a4177f
RG
2772 /* If we were unable to expand via the builtin, stop the sequence
2773 (without outputting the insns). */
2774 end_sequence ();
2775
d8b42d06
UB
2776 /* Fall back to floating point rounding optab. */
2777 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
34a24c11
RG
2778
2779 /* For non-C99 targets we may end up without a fallback fndecl here
2780 if the user called __builtin_lfloor directly. In this case emit
2781 a call to the floor/ceil variants nevertheless. This should result
2782 in the best user experience for not full C99 targets. */
2783 if (fallback_fndecl == NULL_TREE)
2784 {
2785 tree fntype;
2786 const char *name = NULL;
2787
2788 switch (DECL_FUNCTION_CODE (fndecl))
2789 {
6c32ee74 2790 case BUILT_IN_ICEIL:
34a24c11
RG
2791 case BUILT_IN_LCEIL:
2792 case BUILT_IN_LLCEIL:
2793 name = "ceil";
2794 break;
6c32ee74 2795 case BUILT_IN_ICEILF:
34a24c11
RG
2796 case BUILT_IN_LCEILF:
2797 case BUILT_IN_LLCEILF:
2798 name = "ceilf";
2799 break;
6c32ee74 2800 case BUILT_IN_ICEILL:
34a24c11
RG
2801 case BUILT_IN_LCEILL:
2802 case BUILT_IN_LLCEILL:
2803 name = "ceill";
2804 break;
6c32ee74 2805 case BUILT_IN_IFLOOR:
34a24c11
RG
2806 case BUILT_IN_LFLOOR:
2807 case BUILT_IN_LLFLOOR:
2808 name = "floor";
2809 break;
6c32ee74 2810 case BUILT_IN_IFLOORF:
34a24c11
RG
2811 case BUILT_IN_LFLOORF:
2812 case BUILT_IN_LLFLOORF:
2813 name = "floorf";
2814 break;
6c32ee74 2815 case BUILT_IN_IFLOORL:
34a24c11
RG
2816 case BUILT_IN_LFLOORL:
2817 case BUILT_IN_LLFLOORL:
2818 name = "floorl";
2819 break;
2820 default:
2821 gcc_unreachable ();
2822 }
2823
2824 fntype = build_function_type_list (TREE_TYPE (arg),
2825 TREE_TYPE (arg), NULL_TREE);
2826 fallback_fndecl = build_fn_decl (name, fntype);
2827 }
2828
aa493694 2829 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
d8b42d06 2830
39b1ec97 2831 tmp = expand_normal (exp);
9a002da8 2832 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
d8b42d06
UB
2833
2834 /* Truncate the result of floating point optab to integer
2835 via expand_fix (). */
2836 target = gen_reg_rtx (mode);
2837 expand_fix (target, tmp, 0);
2838
2839 return target;
2840}
2841
0bfa1541
RG
2842/* Expand a call to one of the builtin math functions doing integer
2843 conversion (lrint).
2844 Return 0 if a normal call should be emitted rather than expanding the
2845 function in-line. EXP is the expression that is a call to the builtin
1856c8dc 2846 function; if convenient, the result should be placed in TARGET. */
0bfa1541
RG
2847
2848static rtx
1856c8dc 2849expand_builtin_int_roundingfn_2 (tree exp, rtx target)
0bfa1541 2850{
bb7f0423 2851 convert_optab builtin_optab;
58f4cf2a
DM
2852 rtx op0;
2853 rtx_insn *insns;
0bfa1541 2854 tree fndecl = get_callee_fndecl (exp);
968fc3b6 2855 tree arg;
ef4bddc2 2856 machine_mode mode;
ff63ac4d 2857 enum built_in_function fallback_fn = BUILT_IN_NONE;
0bfa1541 2858
5039610b 2859 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1c178a5a 2860 return NULL_RTX;
b8698a0f 2861
5039610b 2862 arg = CALL_EXPR_ARG (exp, 0);
0bfa1541
RG
2863
2864 switch (DECL_FUNCTION_CODE (fndecl))
2865 {
6c32ee74 2866 CASE_FLT_FN (BUILT_IN_IRINT):
ff63ac4d 2867 fallback_fn = BUILT_IN_LRINT;
81fea426 2868 gcc_fallthrough ();
0bfa1541
RG
2869 CASE_FLT_FN (BUILT_IN_LRINT):
2870 CASE_FLT_FN (BUILT_IN_LLRINT):
ff63ac4d
JJ
2871 builtin_optab = lrint_optab;
2872 break;
6c32ee74
UB
2873
2874 CASE_FLT_FN (BUILT_IN_IROUND):
ff63ac4d 2875 fallback_fn = BUILT_IN_LROUND;
81fea426 2876 gcc_fallthrough ();
4d81bf84
RG
2877 CASE_FLT_FN (BUILT_IN_LROUND):
2878 CASE_FLT_FN (BUILT_IN_LLROUND):
ff63ac4d
JJ
2879 builtin_optab = lround_optab;
2880 break;
6c32ee74 2881
0bfa1541
RG
2882 default:
2883 gcc_unreachable ();
2884 }
2885
ff63ac4d
JJ
2886 /* There's no easy way to detect the case we need to set EDOM. */
2887 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2888 return NULL_RTX;
2889
0bfa1541
RG
2890 /* Make a suitable register to place result in. */
2891 mode = TYPE_MODE (TREE_TYPE (exp));
2892
ff63ac4d
JJ
2893 /* There's no easy way to detect the case we need to set EDOM. */
2894 if (!flag_errno_math)
2895 {
04b80dbb 2896 rtx result = gen_reg_rtx (mode);
0bfa1541 2897
ff63ac4d
JJ
2898 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2899 need to expand the argument again. This way, we will not perform
2900 side-effects more the once. */
2901 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
0bfa1541 2902
ff63ac4d 2903 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
0bfa1541 2904
ff63ac4d 2905 start_sequence ();
0bfa1541 2906
04b80dbb 2907 if (expand_sfix_optab (result, op0, builtin_optab))
ff63ac4d
JJ
2908 {
2909 /* Output the entire sequence. */
2910 insns = get_insns ();
2911 end_sequence ();
2912 emit_insn (insns);
04b80dbb 2913 return result;
ff63ac4d
JJ
2914 }
2915
2916 /* If we were unable to expand via the builtin, stop the sequence
2917 (without outputting the insns) and call to the library function
2918 with the stabilized argument list. */
0bfa1541
RG
2919 end_sequence ();
2920 }
2921
ff63ac4d
JJ
2922 if (fallback_fn != BUILT_IN_NONE)
2923 {
2924 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2925 targets, (int) round (x) should never be transformed into
2926 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2927 a call to lround in the hope that the target provides at least some
2928 C99 functions. This should result in the best user experience for
2929 not full C99 targets. */
b03ff92e
RS
2930 tree fallback_fndecl = mathfn_built_in_1
2931 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
ff63ac4d
JJ
2932
2933 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2934 fallback_fndecl, 1, arg);
2935
2936 target = expand_call (exp, NULL_RTX, target == const0_rtx);
9a002da8 2937 target = maybe_emit_group_store (target, TREE_TYPE (exp));
ff63ac4d
JJ
2938 return convert_to_mode (mode, target, 0);
2939 }
bb7f0423 2940
04b80dbb 2941 return expand_call (exp, target, target == const0_rtx);
0bfa1541
RG
2942}
2943
5039610b 2944/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
17684d46
RG
2945 a normal call should be emitted rather than expanding the function
2946 in-line. EXP is the expression that is a call to the builtin
2947 function; if convenient, the result should be placed in TARGET. */
2948
2949static rtx
4359dc2a 2950expand_builtin_powi (tree exp, rtx target)
17684d46 2951{
17684d46
RG
2952 tree arg0, arg1;
2953 rtx op0, op1;
ef4bddc2
RS
2954 machine_mode mode;
2955 machine_mode mode2;
17684d46 2956
5039610b
SL
2957 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2958 return NULL_RTX;
17684d46 2959
5039610b
SL
2960 arg0 = CALL_EXPR_ARG (exp, 0);
2961 arg1 = CALL_EXPR_ARG (exp, 1);
17684d46
RG
2962 mode = TYPE_MODE (TREE_TYPE (exp));
2963
17684d46
RG
2964 /* Emit a libcall to libgcc. */
2965
5039610b 2966 /* Mode of the 2nd argument must match that of an int. */
f4b31647 2967 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
0b8495ae 2968
17684d46
RG
2969 if (target == NULL_RTX)
2970 target = gen_reg_rtx (mode);
2971
4359dc2a 2972 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
17684d46
RG
2973 if (GET_MODE (op0) != mode)
2974 op0 = convert_to_mode (mode, op0, 0);
49452c07 2975 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
0b8495ae
FJ
2976 if (GET_MODE (op1) != mode2)
2977 op1 = convert_to_mode (mode2, op1, 0);
17684d46 2978
8a33f100 2979 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
db69559b 2980 target, LCT_CONST, mode,
0b8495ae 2981 op0, mode, op1, mode2);
17684d46
RG
2982
2983 return target;
2984}
2985
b8698a0f 2986/* Expand expression EXP which is a call to the strlen builtin. Return
781ff3d8 2987 NULL_RTX if we failed and the caller should emit a normal call, otherwise
0e9295cf 2988 try to get the result in TARGET, if convenient. */
3bdf5ad1 2989
28f4ec01 2990static rtx
5039610b 2991expand_builtin_strlen (tree exp, rtx target,
ef4bddc2 2992 machine_mode target_mode)
28f4ec01 2993{
5039610b
SL
2994 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2995 return NULL_RTX;
712b7a05 2996
99b1c316 2997 class expand_operand ops[4];
16155777
MS
2998 rtx pat;
2999 tree len;
3000 tree src = CALL_EXPR_ARG (exp, 0);
3001 rtx src_reg;
3002 rtx_insn *before_strlen;
3003 machine_mode insn_mode;
3004 enum insn_code icode = CODE_FOR_nothing;
3005 unsigned int align;
ae808627 3006
16155777
MS
3007 /* If the length can be computed at compile-time, return it. */
3008 len = c_strlen (src, 0);
3009 if (len)
3010 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3011
3012 /* If the length can be computed at compile-time and is constant
3013 integer, but there are side-effects in src, evaluate
3014 src for side-effects, then return len.
3015 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3016 can be optimized into: i++; x = 3; */
3017 len = c_strlen (src, 1);
3018 if (len && TREE_CODE (len) == INTEGER_CST)
3019 {
3020 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3021 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3022 }
28f4ec01 3023
16155777 3024 align = get_pointer_alignment (src) / BITS_PER_UNIT;
28f4ec01 3025
16155777
MS
3026 /* If SRC is not a pointer type, don't do this operation inline. */
3027 if (align == 0)
3028 return NULL_RTX;
3029
3030 /* Bail out if we can't compute strlen in the right mode. */
3031 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3032 {
3033 icode = optab_handler (strlen_optab, insn_mode);
3034 if (icode != CODE_FOR_nothing)
3035 break;
3036 }
3037 if (insn_mode == VOIDmode)
3038 return NULL_RTX;
28f4ec01 3039
16155777
MS
3040 /* Make a place to hold the source address. We will not expand
3041 the actual source until we are sure that the expansion will
3042 not fail -- there are trees that cannot be expanded twice. */
3043 src_reg = gen_reg_rtx (Pmode);
28f4ec01 3044
16155777
MS
3045 /* Mark the beginning of the strlen sequence so we can emit the
3046 source operand later. */
3047 before_strlen = get_last_insn ();
28f4ec01 3048
16155777
MS
3049 create_output_operand (&ops[0], target, insn_mode);
3050 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3051 create_integer_operand (&ops[2], 0);
3052 create_integer_operand (&ops[3], align);
3053 if (!maybe_expand_insn (icode, 4, ops))
3054 return NULL_RTX;
dd05e4fa 3055
16155777
MS
3056 /* Check to see if the argument was declared attribute nonstring
3057 and if so, issue a warning since at this point it's not known
3058 to be nul-terminated. */
3059 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6a33d0ff 3060
16155777
MS
3061 /* Now that we are assured of success, expand the source. */
3062 start_sequence ();
3063 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3064 if (pat != src_reg)
3065 {
fa465762 3066#ifdef POINTERS_EXTEND_UNSIGNED
16155777
MS
3067 if (GET_MODE (pat) != Pmode)
3068 pat = convert_to_mode (Pmode, pat,
3069 POINTERS_EXTEND_UNSIGNED);
fa465762 3070#endif
16155777
MS
3071 emit_move_insn (src_reg, pat);
3072 }
3073 pat = get_insns ();
3074 end_sequence ();
fca9f642 3075
16155777
MS
3076 if (before_strlen)
3077 emit_insn_after (pat, before_strlen);
3078 else
3079 emit_insn_before (pat, get_insns ());
28f4ec01 3080
16155777
MS
3081 /* Return the value in the proper mode for this function. */
3082 if (GET_MODE (ops[0].value) == target_mode)
3083 target = ops[0].value;
3084 else if (target != 0)
3085 convert_move (target, ops[0].value, 0);
3086 else
3087 target = convert_to_mode (target_mode, ops[0].value, 0);
dd05e4fa 3088
16155777 3089 return target;
28f4ec01
BS
3090}
3091
781ff3d8
MS
3092/* Expand call EXP to the strnlen built-in, returning the result
3093 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3094
3095static rtx
3096expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3097{
3098 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3099 return NULL_RTX;
3100
3101 tree src = CALL_EXPR_ARG (exp, 0);
3102 tree bound = CALL_EXPR_ARG (exp, 1);
3103
3104 if (!bound)
3105 return NULL_RTX;
3106
3107 location_t loc = UNKNOWN_LOCATION;
3108 if (EXPR_HAS_LOCATION (exp))
3109 loc = EXPR_LOCATION (exp);
3110
3111 tree maxobjsize = max_object_size ();
3112 tree func = get_callee_fndecl (exp);
3113
1583124e
MS
3114 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3115 so these conversions aren't necessary. */
e09aa5bd
MS
3116 c_strlen_data lendata = { };
3117 tree len = c_strlen (src, 0, &lendata, 1);
1583124e
MS
3118 if (len)
3119 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
781ff3d8
MS
3120
3121 if (TREE_CODE (bound) == INTEGER_CST)
3122 {
3123 if (!TREE_NO_WARNING (exp)
3124 && tree_int_cst_lt (maxobjsize, bound)
3125 && warning_at (loc, OPT_Wstringop_overflow_,
3126 "%K%qD specified bound %E "
3127 "exceeds maximum object size %E",
3128 exp, func, bound, maxobjsize))
1db01ff9 3129 TREE_NO_WARNING (exp) = true;
781ff3d8 3130
6c4aa5f6 3131 bool exact = true;
781ff3d8 3132 if (!len || TREE_CODE (len) != INTEGER_CST)
6c4aa5f6
MS
3133 {
3134 /* Clear EXACT if LEN may be less than SRC suggests,
3135 such as in
3136 strnlen (&a[i], sizeof a)
3137 where the value of i is unknown. Unless i's value is
3138 zero, the call is unsafe because the bound is greater. */
e09aa5bd
MS
3139 lendata.decl = unterminated_array (src, &len, &exact);
3140 if (!lendata.decl)
6c4aa5f6
MS
3141 return NULL_RTX;
3142 }
3143
e09aa5bd 3144 if (lendata.decl
6c4aa5f6
MS
3145 && !TREE_NO_WARNING (exp)
3146 && ((tree_int_cst_lt (len, bound))
3147 || !exact))
3148 {
3149 location_t warnloc
3150 = expansion_point_location_if_in_system_header (loc);
3151
3152 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3153 exact
3154 ? G_("%K%qD specified bound %E exceeds the size %E "
3155 "of unterminated array")
3156 : G_("%K%qD specified bound %E may exceed the size "
3157 "of at most %E of unterminated array"),
3158 exp, func, bound, len))
3159 {
e09aa5bd 3160 inform (DECL_SOURCE_LOCATION (lendata.decl),
6c4aa5f6
MS
3161 "referenced argument declared here");
3162 TREE_NO_WARNING (exp) = true;
3163 return NULL_RTX;
3164 }
3165 }
3166
3167 if (!len)
781ff3d8
MS
3168 return NULL_RTX;
3169
781ff3d8
MS
3170 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3171 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3172 }
3173
3174 if (TREE_CODE (bound) != SSA_NAME)
3175 return NULL_RTX;
3176
3177 wide_int min, max;
54994253 3178 enum value_range_kind rng = get_range_info (bound, &min, &max);
781ff3d8
MS
3179 if (rng != VR_RANGE)
3180 return NULL_RTX;
3181
3182 if (!TREE_NO_WARNING (exp)
1a9b15a7 3183 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
781ff3d8
MS
3184 && warning_at (loc, OPT_Wstringop_overflow_,
3185 "%K%qD specified bound [%wu, %wu] "
3186 "exceeds maximum object size %E",
3187 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
1db01ff9 3188 TREE_NO_WARNING (exp) = true;
781ff3d8 3189
f3431652 3190 bool exact = true;
781ff3d8 3191 if (!len || TREE_CODE (len) != INTEGER_CST)
f3431652 3192 {
e09aa5bd
MS
3193 lendata.decl = unterminated_array (src, &len, &exact);
3194 if (!lendata.decl)
f3431652
MS
3195 return NULL_RTX;
3196 }
781ff3d8 3197
e09aa5bd 3198 if (lendata.decl
f3431652
MS
3199 && !TREE_NO_WARNING (exp)
3200 && (wi::ltu_p (wi::to_wide (len), min)
3201 || !exact))
6c4aa5f6 3202 {
f3431652
MS
3203 location_t warnloc
3204 = expansion_point_location_if_in_system_header (loc);
3205
3206 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3207 exact
3208 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3209 "the size %E of unterminated array")
3210 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3211 "the size of at most %E of unterminated array"),
3212 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3213 {
e09aa5bd 3214 inform (DECL_SOURCE_LOCATION (lendata.decl),
f3431652
MS
3215 "referenced argument declared here");
3216 TREE_NO_WARNING (exp) = true;
3217 }
6c4aa5f6
MS
3218 }
3219
e09aa5bd 3220 if (lendata.decl)
f3431652
MS
3221 return NULL_RTX;
3222
781ff3d8
MS
3223 if (wi::gtu_p (min, wi::to_wide (len)))
3224 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3225
3226 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3227 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3228}
3229
57814e5e
JJ
3230/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3231 bytes from constant string DATA + OFFSET and return it as target
3232 constant. */
3233
3234static rtx
4682ae04 3235builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 3236 scalar_int_mode mode)
57814e5e
JJ
3237{
3238 const char *str = (const char *) data;
3239
298e6adc
NS
3240 gcc_assert (offset >= 0
3241 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3242 <= strlen (str) + 1));
57814e5e
JJ
3243
3244 return c_readstr (str + offset, mode);
3245}
3246
3918b108 3247/* LEN specify length of the block of memcpy/memset operation.
82bb7d4e
JH
3248 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3249 In some cases we can make very likely guess on max size, then we
3250 set it into PROBABLE_MAX_SIZE. */
3918b108
JH
3251
3252static void
3253determine_block_size (tree len, rtx len_rtx,
3254 unsigned HOST_WIDE_INT *min_size,
82bb7d4e
JH
3255 unsigned HOST_WIDE_INT *max_size,
3256 unsigned HOST_WIDE_INT *probable_max_size)
3918b108
JH
3257{
3258 if (CONST_INT_P (len_rtx))
3259 {
2738b4c7 3260 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3918b108
JH
3261 return;
3262 }
3263 else
3264 {
807e902e 3265 wide_int min, max;
54994253 3266 enum value_range_kind range_type = VR_UNDEFINED;
82bb7d4e
JH
3267
3268 /* Determine bounds from the type. */
3269 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3270 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3271 else
3272 *min_size = 0;
3273 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2738b4c7
JJ
3274 *probable_max_size = *max_size
3275 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
82bb7d4e
JH
3276 else
3277 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3278
3279 if (TREE_CODE (len) == SSA_NAME)
3280 range_type = get_range_info (len, &min, &max);
3281 if (range_type == VR_RANGE)
3918b108 3282 {
807e902e 3283 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3918b108 3284 *min_size = min.to_uhwi ();
807e902e 3285 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
82bb7d4e 3286 *probable_max_size = *max_size = max.to_uhwi ();
3918b108 3287 }
82bb7d4e 3288 else if (range_type == VR_ANTI_RANGE)
3918b108 3289 {
70ec86ee 3290 /* Anti range 0...N lets us to determine minimal size to N+1. */
807e902e 3291 if (min == 0)
82bb7d4e 3292 {
807e902e
KZ
3293 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3294 *min_size = max.to_uhwi () + 1;
82bb7d4e
JH
3295 }
3296 /* Code like
3297
3298 int n;
3299 if (n < 100)
70ec86ee 3300 memcpy (a, b, n)
82bb7d4e
JH
3301
3302 Produce anti range allowing negative values of N. We still
3303 can use the information and make a guess that N is not negative.
3304 */
807e902e
KZ
3305 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3306 *probable_max_size = min.to_uhwi () - 1;
3918b108
JH
3307 }
3308 }
3309 gcc_checking_assert (*max_size <=
3310 (unsigned HOST_WIDE_INT)
3311 GET_MODE_MASK (GET_MODE (len_rtx)));
3312}
3313
ee92e7ba
MS
3314/* Try to verify that the sizes and lengths of the arguments to a string
3315 manipulation function given by EXP are within valid bounds and that
cc8bea0a
MS
3316 the operation does not lead to buffer overflow or read past the end.
3317 Arguments other than EXP may be null. When non-null, the arguments
3318 have the following meaning:
3319 DST is the destination of a copy call or NULL otherwise.
3320 SRC is the source of a copy call or NULL otherwise.
3321 DSTWRITE is the number of bytes written into the destination obtained
3322 from the user-supplied size argument to the function (such as in
3323 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3324 MAXREAD is the user-supplied bound on the length of the source sequence
ee92e7ba 3325 (such as in strncat(d, s, N). It specifies the upper limit on the number
cc8bea0a
MS
3326 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3327 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3328 expression EXP is a string function call (as opposed to a memory call
3329 like memcpy). As an exception, SRCSTR can also be an integer denoting
3330 the precomputed size of the source string or object (for functions like
3331 memcpy).
3332 DSTSIZE is the size of the destination object specified by the last
ee92e7ba 3333 argument to the _chk builtins, typically resulting from the expansion
cc8bea0a
MS
3334 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3335 DSTSIZE).
ee92e7ba 3336
cc8bea0a 3337 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
ee92e7ba
MS
3338 SIZE_MAX.
3339
cc8bea0a
MS
3340 If the call is successfully verified as safe return true, otherwise
3341 return false. */
ee92e7ba 3342
54aa6b58 3343bool
cc8bea0a
MS
3344check_access (tree exp, tree, tree, tree dstwrite,
3345 tree maxread, tree srcstr, tree dstsize)
ee92e7ba 3346{
cc8bea0a
MS
3347 int opt = OPT_Wstringop_overflow_;
3348
ee92e7ba 3349 /* The size of the largest object is half the address space, or
cc8bea0a
MS
3350 PTRDIFF_MAX. (This is way too permissive.) */
3351 tree maxobjsize = max_object_size ();
ee92e7ba 3352
cc8bea0a
MS
3353 /* Either the length of the source string for string functions or
3354 the size of the source object for raw memory functions. */
ee92e7ba
MS
3355 tree slen = NULL_TREE;
3356
d9c5a8b9
MS
3357 tree range[2] = { NULL_TREE, NULL_TREE };
3358
ee92e7ba
MS
3359 /* Set to true when the exact number of bytes written by a string
3360 function like strcpy is not known and the only thing that is
3361 known is that it must be at least one (for the terminating nul). */
3362 bool at_least_one = false;
cc8bea0a 3363 if (srcstr)
ee92e7ba 3364 {
cc8bea0a 3365 /* SRCSTR is normally a pointer to string but as a special case
ee92e7ba 3366 it can be an integer denoting the length of a string. */
cc8bea0a 3367 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
ee92e7ba
MS
3368 {
3369 /* Try to determine the range of lengths the source string
d9c5a8b9 3370 refers to. If it can be determined and is less than
cc8bea0a 3371 the upper bound given by MAXREAD add one to it for
ee92e7ba 3372 the terminating nul. Otherwise, set it to one for
cc8bea0a 3373 the same reason, or to MAXREAD as appropriate. */
5d6655eb
MS
3374 c_strlen_data lendata = { };
3375 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3376 range[0] = lendata.minlen;
a7160771 3377 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
cc8bea0a 3378 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
d9c5a8b9 3379 {
cc8bea0a
MS
3380 if (maxread && tree_int_cst_le (maxread, range[0]))
3381 range[0] = range[1] = maxread;
d9c5a8b9
MS
3382 else
3383 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3384 range[0], size_one_node);
3385
cc8bea0a
MS
3386 if (maxread && tree_int_cst_le (maxread, range[1]))
3387 range[1] = maxread;
d9c5a8b9
MS
3388 else if (!integer_all_onesp (range[1]))
3389 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3390 range[1], size_one_node);
3391
3392 slen = range[0];
3393 }
ee92e7ba
MS
3394 else
3395 {
3396 at_least_one = true;
3397 slen = size_one_node;
3398 }
3399 }
3400 else
cc8bea0a 3401 slen = srcstr;
ee92e7ba
MS
3402 }
3403
cc8bea0a 3404 if (!dstwrite && !maxread)
ee92e7ba
MS
3405 {
3406 /* When the only available piece of data is the object size
3407 there is nothing to do. */
3408 if (!slen)
3409 return true;
3410
3411 /* Otherwise, when the length of the source sequence is known
cc8bea0a 3412 (as with strlen), set DSTWRITE to it. */
d9c5a8b9 3413 if (!range[0])
cc8bea0a 3414 dstwrite = slen;
ee92e7ba
MS
3415 }
3416
cc8bea0a
MS
3417 if (!dstsize)
3418 dstsize = maxobjsize;
ee92e7ba 3419
cc8bea0a
MS
3420 if (dstwrite)
3421 get_size_range (dstwrite, range);
ee92e7ba 3422
cc8bea0a 3423 tree func = get_callee_fndecl (exp);
ee92e7ba
MS
3424
3425 /* First check the number of bytes to be written against the maximum
3426 object size. */
bfb9bd47
MS
3427 if (range[0]
3428 && TREE_CODE (range[0]) == INTEGER_CST
3429 && tree_int_cst_lt (maxobjsize, range[0]))
ee92e7ba 3430 {
781ff3d8
MS
3431 if (TREE_NO_WARNING (exp))
3432 return false;
3433
ee92e7ba 3434 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3435 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba 3436
781ff3d8 3437 bool warned;
ee92e7ba 3438 if (range[0] == range[1])
54aa6b58
MS
3439 warned = (func
3440 ? warning_at (loc, opt,
3441 "%K%qD specified size %E "
3442 "exceeds maximum object size %E",
3443 exp, func, range[0], maxobjsize)
3444 : warning_at (loc, opt,
3445 "%Kspecified size %E "
3446 "exceeds maximum object size %E",
3447 exp, range[0], maxobjsize));
781ff3d8 3448 else
54aa6b58
MS
3449 warned = (func
3450 ? warning_at (loc, opt,
3451 "%K%qD specified size between %E and %E "
3452 "exceeds maximum object size %E",
3453 exp, func,
3454 range[0], range[1], maxobjsize)
3455 : warning_at (loc, opt,
3456 "%Kspecified size between %E and %E "
3457 "exceeds maximum object size %E",
3458 exp, range[0], range[1], maxobjsize));
781ff3d8
MS
3459 if (warned)
3460 TREE_NO_WARNING (exp) = true;
3461
ee92e7ba
MS
3462 return false;
3463 }
3464
cc8bea0a
MS
3465 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3466 constant, and in range of unsigned HOST_WIDE_INT. */
3467 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3468
ee92e7ba
MS
3469 /* Next check the number of bytes to be written against the destination
3470 object size. */
cc8bea0a 3471 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
ee92e7ba
MS
3472 {
3473 if (range[0]
bfb9bd47 3474 && TREE_CODE (range[0]) == INTEGER_CST
cc8bea0a
MS
3475 && ((tree_fits_uhwi_p (dstsize)
3476 && tree_int_cst_lt (dstsize, range[0]))
bfb9bd47
MS
3477 || (dstwrite
3478 && tree_fits_uhwi_p (dstwrite)
cc8bea0a 3479 && tree_int_cst_lt (dstwrite, range[0]))))
ee92e7ba 3480 {
e0676e2e
MS
3481 if (TREE_NO_WARNING (exp))
3482 return false;
3483
ee92e7ba 3484 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3485 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba 3486
54aa6b58 3487 bool warned = false;
cc8bea0a 3488 if (dstwrite == slen && at_least_one)
d9c5a8b9
MS
3489 {
3490 /* This is a call to strcpy with a destination of 0 size
3491 and a source of unknown length. The call will write
3492 at least one byte past the end of the destination. */
54aa6b58
MS
3493 warned = (func
3494 ? warning_at (loc, opt,
3495 "%K%qD writing %E or more bytes into "
3496 "a region of size %E overflows "
3497 "the destination",
3498 exp, func, range[0], dstsize)
3499 : warning_at (loc, opt,
3500 "%Kwriting %E or more bytes into "
3501 "a region of size %E overflows "
3502 "the destination",
3503 exp, range[0], dstsize));
d9c5a8b9
MS
3504 }
3505 else if (tree_int_cst_equal (range[0], range[1]))
54aa6b58
MS
3506 warned = (func
3507 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3508 "%K%qD writing %E byte into a region "
3509 "of size %E overflows the destination",
3510 "%K%qD writing %E bytes into a region "
3511 "of size %E overflows the destination",
3512 exp, func, range[0], dstsize)
3513 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3514 "%Kwriting %E byte into a region "
3515 "of size %E overflows the destination",
3516 "%Kwriting %E bytes into a region "
3517 "of size %E overflows the destination",
3518 exp, range[0], dstsize));
d9c5a8b9
MS
3519 else if (tree_int_cst_sign_bit (range[1]))
3520 {
3521 /* Avoid printing the upper bound if it's invalid. */
54aa6b58
MS
3522 warned = (func
3523 ? warning_at (loc, opt,
3524 "%K%qD writing %E or more bytes into "
3525 "a region of size %E overflows "
3526 "the destination",
3527 exp, func, range[0], dstsize)
3528 : warning_at (loc, opt,
3529 "%Kwriting %E or more bytes into "
3530 "a region of size %E overflows "
3531 "the destination",
3532 exp, range[0], dstsize));
d9c5a8b9 3533 }
ee92e7ba 3534 else
54aa6b58
MS
3535 warned = (func
3536 ? warning_at (loc, opt,
3537 "%K%qD writing between %E and %E bytes "
3538 "into a region of size %E overflows "
3539 "the destination",
3540 exp, func, range[0], range[1],
3541 dstsize)
3542 : warning_at (loc, opt,
3543 "%Kwriting between %E and %E bytes "
3544 "into a region of size %E overflows "
3545 "the destination",
3546 exp, range[0], range[1],
3547 dstsize));
3548 if (warned)
3549 TREE_NO_WARNING (exp) = true;
ee92e7ba
MS
3550
3551 /* Return error when an overflow has been detected. */
3552 return false;
3553 }
3554 }
3555
3556 /* Check the maximum length of the source sequence against the size
3557 of the destination object if known, or against the maximum size
3558 of an object. */
cc8bea0a 3559 if (maxread)
ee92e7ba 3560 {
cc8bea0a 3561 get_size_range (maxread, range);
cc8bea0a 3562 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
ee92e7ba
MS
3563 {
3564 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3565 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba
MS
3566
3567 if (tree_int_cst_lt (maxobjsize, range[0]))
3568 {
e0676e2e
MS
3569 if (TREE_NO_WARNING (exp))
3570 return false;
3571
54aa6b58
MS
3572 bool warned = false;
3573
ee92e7ba
MS
3574 /* Warn about crazy big sizes first since that's more
3575 likely to be meaningful than saying that the bound
3576 is greater than the object size if both are big. */
3577 if (range[0] == range[1])
54aa6b58
MS
3578 warned = (func
3579 ? warning_at (loc, opt,
3580 "%K%qD specified bound %E "
3581 "exceeds maximum object size %E",
3582 exp, func, range[0], maxobjsize)
3583 : warning_at (loc, opt,
3584 "%Kspecified bound %E "
3585 "exceeds maximum object size %E",
3586 exp, range[0], maxobjsize));
ee92e7ba 3587 else
54aa6b58
MS
3588 warned = (func
3589 ? warning_at (loc, opt,
3590 "%K%qD specified bound between "
3591 "%E and %E exceeds maximum object "
3592 "size %E",
3593 exp, func,
3594 range[0], range[1], maxobjsize)
3595 : warning_at (loc, opt,
3596 "%Kspecified bound between "
3597 "%E and %E exceeds maximum object "
3598 "size %E",
3599 exp, range[0], range[1], maxobjsize));
3600 if (warned)
3601 TREE_NO_WARNING (exp) = true;
ee92e7ba
MS
3602
3603 return false;
3604 }
3605
cc8bea0a 3606 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
ee92e7ba 3607 {
e0676e2e
MS
3608 if (TREE_NO_WARNING (exp))
3609 return false;
3610
54aa6b58
MS
3611 bool warned = false;
3612
d9c5a8b9 3613 if (tree_int_cst_equal (range[0], range[1]))
54aa6b58
MS
3614 warned = (func
3615 ? warning_at (loc, opt,
3616 "%K%qD specified bound %E "
3617 "exceeds destination size %E",
3618 exp, func,
3619 range[0], dstsize)
3620 : warning_at (loc, opt,
3621 "%Kspecified bound %E "
3622 "exceeds destination size %E",
3623 exp, range[0], dstsize));
ee92e7ba 3624 else
54aa6b58
MS
3625 warned = (func
3626 ? warning_at (loc, opt,
3627 "%K%qD specified bound between %E "
3628 "and %E exceeds destination size %E",
3629 exp, func,
3630 range[0], range[1], dstsize)
3631 : warning_at (loc, opt,
3632 "%Kspecified bound between %E "
3633 "and %E exceeds destination size %E",
3634 exp,
3635 range[0], range[1], dstsize));
3636 if (warned)
3637 TREE_NO_WARNING (exp) = true;
3638
ee92e7ba
MS
3639 return false;
3640 }
3641 }
3642 }
3643
cc8bea0a 3644 /* Check for reading past the end of SRC. */
d9c5a8b9 3645 if (slen
cc8bea0a
MS
3646 && slen == srcstr
3647 && dstwrite && range[0]
d9c5a8b9
MS
3648 && tree_int_cst_lt (slen, range[0]))
3649 {
e0676e2e
MS
3650 if (TREE_NO_WARNING (exp))
3651 return false;
3652
54aa6b58 3653 bool warned = false;
d9c5a8b9 3654 location_t loc = tree_nonartificial_location (exp);
54aa6b58 3655 loc = expansion_point_location_if_in_system_header (loc);
d9c5a8b9
MS
3656
3657 if (tree_int_cst_equal (range[0], range[1]))
54aa6b58
MS
3658 warned = (func
3659 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3660 "%K%qD reading %E byte from a region of size %E",
3661 "%K%qD reading %E bytes from a region of size %E",
3662 exp, func, range[0], slen)
3663 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3664 "%Kreading %E byte from a region of size %E",
3665 "%Kreading %E bytes from a region of size %E",
3666 exp, range[0], slen));
d9c5a8b9
MS
3667 else if (tree_int_cst_sign_bit (range[1]))
3668 {
3669 /* Avoid printing the upper bound if it's invalid. */
54aa6b58
MS
3670 warned = (func
3671 ? warning_at (loc, opt,
3672 "%K%qD reading %E or more bytes from a region "
3673 "of size %E",
3674 exp, func, range[0], slen)
3675 : warning_at (loc, opt,
3676 "%Kreading %E or more bytes from a region "
3677 "of size %E",
3678 exp, range[0], slen));
d9c5a8b9
MS
3679 }
3680 else
54aa6b58
MS
3681 warned = (func
3682 ? warning_at (loc, opt,
3683 "%K%qD reading between %E and %E bytes from "
3684 "a region of size %E",
3685 exp, func, range[0], range[1], slen)
3686 : warning_at (loc, opt,
3687 "%Kreading between %E and %E bytes from "
3688 "a region of size %E",
3689 exp, range[0], range[1], slen));
3690 if (warned)
3691 TREE_NO_WARNING (exp) = true;
3692
d9c5a8b9
MS
3693 return false;
3694 }
3695
ee92e7ba
MS
3696 return true;
3697}
3698
268209f3
MS
3699/* If STMT is a call to an allocation function, returns the size
3700 of the object allocated by the call. */
3701
3702tree
3703gimple_call_alloc_size (gimple *stmt)
3704{
3705 if (!stmt)
3706 return NULL_TREE;
3707
3708 tree allocfntype;
3709 if (tree fndecl = gimple_call_fndecl (stmt))
3710 allocfntype = TREE_TYPE (fndecl);
3711 else
3712 allocfntype = gimple_call_fntype (stmt);
3713
3714 if (!allocfntype)
3715 return NULL_TREE;
3716
3717 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
3718 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
3719 if (!at)
3720 {
3721 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
3722 return NULL_TREE;
3723
3724 argidx1 = 0;
3725 }
3726
3727 unsigned nargs = gimple_call_num_args (stmt);
3728
3729 if (argidx1 == UINT_MAX)
3730 {
3731 tree atval = TREE_VALUE (at);
3732 if (!atval)
3733 return NULL_TREE;
3734
3735 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3736 if (nargs <= argidx1)
3737 return NULL_TREE;
3738
3739 atval = TREE_CHAIN (atval);
3740 if (atval)
3741 {
3742 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3743 if (nargs <= argidx2)
3744 return NULL_TREE;
3745 }
3746 }
3747
3748 tree size = gimple_call_arg (stmt, argidx1);
3749
3750 wide_int rng1[2];
3751 if (TREE_CODE (size) == INTEGER_CST)
3752 rng1[0] = rng1[1] = wi::to_wide (size);
3753 else if (TREE_CODE (size) != SSA_NAME
3754 || get_range_info (size, rng1, rng1 + 1) != VR_RANGE)
3755 return NULL_TREE;
3756
3757 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
a6ae300f 3758 return fold_convert (sizetype, size);
268209f3
MS
3759
3760 /* To handle ranges do the math in wide_int and return the product
3761 of the upper bounds as a constant. Ignore anti-ranges. */
3762 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
3763 wide_int rng2[2];
3764 if (TREE_CODE (n) == INTEGER_CST)
3765 rng2[0] = rng2[1] = wi::to_wide (n);
3766 else if (TREE_CODE (n) != SSA_NAME
3767 || get_range_info (n, rng2, rng2 + 1) != VR_RANGE)
3768 return NULL_TREE;
3769
3770 /* Extend to the maximum precsion to avoid overflow. */
3771 const int prec = ADDR_MAX_PRECISION;
3772 rng1[0] = wide_int::from (rng1[0], prec, UNSIGNED);
3773 rng1[1] = wide_int::from (rng1[1], prec, UNSIGNED);
3774 rng2[0] = wide_int::from (rng2[0], prec, UNSIGNED);
3775 rng2[1] = wide_int::from (rng2[1], prec, UNSIGNED);
3776
3777 /* Return the lesser of SIZE_MAX and the product of the upper bounds. */
3778 rng1[0] = rng1[0] * rng2[0];
3779 rng1[1] = rng1[1] * rng2[1];
3780 tree size_max = TYPE_MAX_VALUE (sizetype);
3781 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
3782 {
3783 rng1[1] = wi::to_wide (size_max);
3784 return size_max;
3785 }
3786
3787 return wide_int_to_tree (sizetype, rng1[1]);
3788}
3789
ee92e7ba 3790/* Helper to compute the size of the object referenced by the DEST
025d57f0 3791 expression which must have pointer type, using Object Size type
ee92e7ba 3792 OSTYPE (only the least significant 2 bits are used). Return
af3fa359
MS
3793 an estimate of the size of the object if successful or NULL when
3794 the size cannot be determined. When the referenced object involves
3795 a non-constant offset in some range the returned value represents
3796 the largest size given the smallest non-negative offset in the
464969eb 3797 range. If nonnull, set *PDECL to the decl of the referenced
268209f3
MS
3798 subobject if it can be determined, or to null otherwise. Likewise,
3799 when POFF is nonnull *POFF is set to the offset into *PDECL.
464969eb
MS
3800 The function is intended for diagnostics and should not be used
3801 to influence code generation or optimization. */
ee92e7ba 3802
025d57f0 3803tree
268209f3
MS
3804compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */,
3805 tree *poff /* = NULL */)
ee92e7ba 3806{
268209f3 3807 tree dummy_decl = NULL_TREE;
464969eb 3808 if (!pdecl)
268209f3
MS
3809 pdecl = &dummy_decl;
3810
3811 tree dummy_off = size_zero_node;
3812 if (!poff)
3813 poff = &dummy_off;
464969eb 3814
ee92e7ba 3815 unsigned HOST_WIDE_INT size;
025d57f0
MS
3816
3817 /* Only the two least significant bits are meaningful. */
3818 ostype &= 3;
3819
f7d86b5c 3820 if (compute_builtin_object_size (dest, ostype, &size, pdecl, poff))
ee92e7ba
MS
3821 return build_int_cst (sizetype, size);
3822
025d57f0
MS
3823 if (TREE_CODE (dest) == SSA_NAME)
3824 {
3825 gimple *stmt = SSA_NAME_DEF_STMT (dest);
268209f3
MS
3826 if (is_gimple_call (stmt))
3827 {
3828 /* If STMT is a call to an allocation function get the size
3829 from its argument(s). */
3830 return gimple_call_alloc_size (stmt);
3831 }
3832
025d57f0
MS
3833 if (!is_gimple_assign (stmt))
3834 return NULL_TREE;
3835
af3fa359
MS
3836 dest = gimple_assign_rhs1 (stmt);
3837
025d57f0 3838 tree_code code = gimple_assign_rhs_code (stmt);
af3fa359
MS
3839 if (code == POINTER_PLUS_EXPR)
3840 {
3841 /* compute_builtin_object_size fails for addresses with
3842 non-constant offsets. Try to determine the range of
e3329a78 3843 such an offset here and use it to adjust the constant
af3fa359
MS
3844 size. */
3845 tree off = gimple_assign_rhs2 (stmt);
e3329a78
MS
3846 if (TREE_CODE (off) == INTEGER_CST)
3847 {
268209f3 3848 if (tree size = compute_objsize (dest, ostype, pdecl, poff))
e3329a78
MS
3849 {
3850 wide_int wioff = wi::to_wide (off);
3851 wide_int wisiz = wi::to_wide (size);
3852
3853 /* Ignore negative offsets for now. For others,
3854 use the lower bound as the most optimistic
3855 estimate of the (remaining) size. */
3856 if (wi::sign_mask (wioff))
3857 ;
3858 else if (wi::ltu_p (wioff, wisiz))
268209f3
MS
3859 {
3860 *poff = size_binop (PLUS_EXPR, *poff, off);
3861 return wide_int_to_tree (TREE_TYPE (size),
3862 wi::sub (wisiz, wioff));
3863 }
e3329a78 3864 else
268209f3
MS
3865 {
3866 *poff = size_binop (PLUS_EXPR, *poff, off);
3867 return size_zero_node;
3868 }
e3329a78
MS
3869 }
3870 }
3871 else if (TREE_CODE (off) == SSA_NAME
f05b3724 3872 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
af3fa359
MS
3873 {
3874 wide_int min, max;
54994253 3875 enum value_range_kind rng = get_range_info (off, &min, &max);
af3fa359
MS
3876
3877 if (rng == VR_RANGE)
3878 {
464969eb 3879 if (tree size = compute_objsize (dest, ostype, pdecl))
af3fa359
MS
3880 {
3881 wide_int wisiz = wi::to_wide (size);
3882
3883 /* Ignore negative offsets for now. For others,
3884 use the lower bound as the most optimistic
3885 estimate of the (remaining)size. */
a411ae9b
MS
3886 if (wi::sign_mask (min)
3887 || wi::sign_mask (max))
af3fa359
MS
3888 ;
3889 else if (wi::ltu_p (min, wisiz))
268209f3
MS
3890 {
3891 *poff = size_binop (PLUS_EXPR, *poff,
3892 wide_int_to_tree (sizetype, min));
3893 return wide_int_to_tree (TREE_TYPE (size),
3894 wi::sub (wisiz, min));
3895 }
af3fa359 3896 else
268209f3
MS
3897 {
3898 *poff = size_binop (PLUS_EXPR, *poff,
3899 wide_int_to_tree (sizetype, min));
3900 return size_zero_node;
3901 }
af3fa359
MS
3902 }
3903 }
3904 }
3905 }
3906 else if (code != ADDR_EXPR)
025d57f0 3907 return NULL_TREE;
025d57f0
MS
3908 }
3909
af3fa359
MS
3910 /* Unless computing the largest size (for memcpy and other raw memory
3911 functions), try to determine the size of the object from its type. */
3912 if (!ostype)
3913 return NULL_TREE;
3914
464969eb
MS
3915 if (TREE_CODE (dest) == ARRAY_REF
3916 || TREE_CODE (dest) == MEM_REF)
b631bdb3
MS
3917 {
3918 tree ref = TREE_OPERAND (dest, 0);
3919 tree off = TREE_OPERAND (dest, 1);
268209f3 3920 if (tree size = compute_objsize (ref, ostype, pdecl, poff))
b631bdb3 3921 {
464969eb
MS
3922 /* If the declaration of the destination object is known
3923 to have zero size, return zero. */
268209f3
MS
3924 if (integer_zerop (size)
3925 && *pdecl && DECL_P (*pdecl)
3926 && *poff && integer_zerop (*poff))
f7d86b5c 3927 return size_zero_node;
464969eb 3928
268209f3
MS
3929 /* A valid offset into a declared object cannot be negative. */
3930 if (tree_int_cst_sgn (*poff) < 0)
3931 return size_zero_node;
464969eb 3932
268209f3
MS
3933 /* Adjust SIZE either up or down by the sum of *POFF and OFF
3934 above. */
464969eb
MS
3935 if (TREE_CODE (dest) == ARRAY_REF)
3936 {
268209f3 3937 /* Convert the array index into a byte offset. */
464969eb 3938 tree eltype = TREE_TYPE (dest);
f05b3724
JJ
3939 tree tpsize = TYPE_SIZE_UNIT (eltype);
3940 if (tpsize && TREE_CODE (tpsize) == INTEGER_CST)
464969eb
MS
3941 off = fold_build2 (MULT_EXPR, size_type_node, off, tpsize);
3942 else
3943 return NULL_TREE;
3944 }
3945
268209f3
MS
3946 wide_int offrng[2];
3947 if (TREE_CODE (off) == INTEGER_CST)
3948 offrng[0] = offrng[1] = wi::to_wide (off);
3949 else if (TREE_CODE (off) == SSA_NAME)
3950 {
3951 wide_int min, max;
3952 enum value_range_kind rng
3953 = get_range_info (off, offrng, offrng + 1);
3954 if (rng != VR_RANGE)
3955 return NULL_TREE;
3956 }
3957 else
3958 return NULL_TREE;
3959
3960 /* Convert to the same precision to keep wide_int from "helpfuly"
3961 crashing whenever it sees other argumments. */
3962 offrng[0] = wide_int::from (offrng[0], ADDR_MAX_BITSIZE, SIGNED);
3963 offrng[1] = wide_int::from (offrng[1], ADDR_MAX_BITSIZE, SIGNED);
3964
3965 tree dstoff = *poff;
3966 if (integer_zerop (*poff))
3967 *poff = off;
3968 else if (!integer_zerop (off))
3969 {
3970 *poff = fold_convert (ptrdiff_type_node, *poff);
3971 off = fold_convert (ptrdiff_type_node, off);
3972 *poff = size_binop (PLUS_EXPR, *poff, off);
3973 }
3974
3975 if (wi::sign_mask (offrng[0]) >= 0)
3976 {
3977 if (TREE_CODE (size) != INTEGER_CST)
3978 return NULL_TREE;
3979
3980 /* Return the difference between the size and the offset
3981 or zero if the offset is greater. */
3982 wide_int wisize = wi::to_wide (size, ADDR_MAX_BITSIZE);
3983 if (wi::ltu_p (wisize, offrng[0]))
3984 return size_zero_node;
3985
3986 return wide_int_to_tree (sizetype, wisize - offrng[0]);
3987 }
3988
3989 wide_int dstoffrng[2];
3990 if (TREE_CODE (dstoff) == INTEGER_CST)
3991 dstoffrng[0] = dstoffrng[1] = wi::to_wide (dstoff);
3992 else if (TREE_CODE (dstoff) == SSA_NAME)
3993 {
3994 enum value_range_kind rng
3995 = get_range_info (dstoff, dstoffrng, dstoffrng + 1);
3996 if (rng != VR_RANGE)
3997 return NULL_TREE;
3998 }
3999 else
4000 return NULL_TREE;
4001
4002 dstoffrng[0] = wide_int::from (dstoffrng[0], ADDR_MAX_BITSIZE, SIGNED);
4003 dstoffrng[1] = wide_int::from (dstoffrng[1], ADDR_MAX_BITSIZE, SIGNED);
4004
4005 wide_int declsize = wi::to_wide (size);
4006 if (wi::sign_mask (dstoffrng[0]) > 0)
4007 declsize += dstoffrng[0];
4008
4009 offrng[1] += dstoffrng[1];
4010 if (wi::sign_mask (offrng[1]) < 0)
4011 return size_zero_node;
4012
4013 return wide_int_to_tree (sizetype, declsize);
b631bdb3
MS
4014 }
4015
4016 return NULL_TREE;
4017 }
4018
464969eb
MS
4019 if (TREE_CODE (dest) == COMPONENT_REF)
4020 {
4021 *pdecl = TREE_OPERAND (dest, 1);
6889a3ac 4022 return component_ref_size (dest);
464969eb
MS
4023 }
4024
025d57f0
MS
4025 if (TREE_CODE (dest) != ADDR_EXPR)
4026 return NULL_TREE;
4027
464969eb
MS
4028 tree ref = TREE_OPERAND (dest, 0);
4029 if (DECL_P (ref))
4030 {
4031 *pdecl = ref;
3e00ba47
RB
4032 if (tree size = DECL_SIZE_UNIT (ref))
4033 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
464969eb
MS
4034 }
4035
025d57f0
MS
4036 tree type = TREE_TYPE (dest);
4037 if (TREE_CODE (type) == POINTER_TYPE)
4038 type = TREE_TYPE (type);
4039
4040 type = TYPE_MAIN_VARIANT (type);
268209f3
MS
4041 if (TREE_CODE (dest) == ADDR_EXPR)
4042 dest = TREE_OPERAND (dest, 0);
025d57f0
MS
4043
4044 if (TREE_CODE (type) == ARRAY_TYPE
268209f3 4045 && !array_at_struct_end_p (dest))
464969eb
MS
4046 {
4047 if (tree size = TYPE_SIZE_UNIT (type))
4048 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
025d57f0
MS
4049 }
4050
ee92e7ba
MS
4051 return NULL_TREE;
4052}
4053
4054/* Helper to determine and check the sizes of the source and the destination
d9c5a8b9
MS
4055 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
4056 call expression, DEST is the destination argument, SRC is the source
4057 argument or null, and LEN is the number of bytes. Use Object Size type-0
4058 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
ee92e7ba
MS
4059 (no overflow or invalid sizes), false otherwise. */
4060
4061static bool
cc8bea0a 4062check_memop_access (tree exp, tree dest, tree src, tree size)
ee92e7ba 4063{
ee92e7ba 4064 /* For functions like memset and memcpy that operate on raw memory
d9c5a8b9
MS
4065 try to determine the size of the largest source and destination
4066 object using type-0 Object Size regardless of the object size
4067 type specified by the option. */
4068 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
4069 tree dstsize = compute_objsize (dest, 0);
ee92e7ba 4070
cc8bea0a
MS
4071 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
4072 srcsize, dstsize);
d9c5a8b9
MS
4073}
4074
4075/* Validate memchr arguments without performing any expansion.
4076 Return NULL_RTX. */
4077
4078static rtx
4079expand_builtin_memchr (tree exp, rtx)
4080{
4081 if (!validate_arglist (exp,
4082 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4083 return NULL_RTX;
4084
4085 tree arg1 = CALL_EXPR_ARG (exp, 0);
4086 tree len = CALL_EXPR_ARG (exp, 2);
4087
4088 /* Diagnose calls where the specified length exceeds the size
4089 of the object. */
4090 if (warn_stringop_overflow)
4091 {
4092 tree size = compute_objsize (arg1, 0);
cc8bea0a
MS
4093 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4094 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
d9c5a8b9
MS
4095 }
4096
4097 return NULL_RTX;
ee92e7ba
MS
4098}
4099
5039610b
SL
4100/* Expand a call EXP to the memcpy builtin.
4101 Return NULL_RTX if we failed, the caller should emit a normal call,
9cb65f92 4102 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e 4103 mode MODE if that's convenient). */
5039610b 4104
28f4ec01 4105static rtx
44e10129 4106expand_builtin_memcpy (tree exp, rtx target)
28f4ec01 4107{
5039610b
SL
4108 if (!validate_arglist (exp,
4109 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4110 return NULL_RTX;
ee92e7ba
MS
4111
4112 tree dest = CALL_EXPR_ARG (exp, 0);
4113 tree src = CALL_EXPR_ARG (exp, 1);
4114 tree len = CALL_EXPR_ARG (exp, 2);
4115
cc8bea0a 4116 check_memop_access (exp, dest, src, len);
ee92e7ba 4117
671a00ee 4118 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
03a9b90a 4119 /*retmode=*/ RETURN_BEGIN, false);
edcf72f3 4120}
57814e5e 4121
e50d56a5
MS
4122/* Check a call EXP to the memmove built-in for validity.
4123 Return NULL_RTX on both success and failure. */
4124
4125static rtx
03a9b90a 4126expand_builtin_memmove (tree exp, rtx target)
e50d56a5
MS
4127{
4128 if (!validate_arglist (exp,
4129 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4130 return NULL_RTX;
4131
4132 tree dest = CALL_EXPR_ARG (exp, 0);
d9c5a8b9 4133 tree src = CALL_EXPR_ARG (exp, 1);
e50d56a5
MS
4134 tree len = CALL_EXPR_ARG (exp, 2);
4135
cc8bea0a 4136 check_memop_access (exp, dest, src, len);
e50d56a5 4137
03a9b90a
AS
4138 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4139 /*retmode=*/ RETURN_BEGIN, true);
e50d56a5
MS
4140}
4141
5039610b
SL
4142/* Expand a call EXP to the mempcpy builtin.
4143 Return NULL_RTX if we failed; the caller should emit a normal call,
e3e9f108 4144 otherwise try to get the result in TARGET, if convenient (and in
2ff5ffb6 4145 mode MODE if that's convenient). */
e3e9f108
JJ
4146
4147static rtx
671a00ee 4148expand_builtin_mempcpy (tree exp, rtx target)
e3e9f108 4149{
5039610b
SL
4150 if (!validate_arglist (exp,
4151 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4152 return NULL_RTX;
ee92e7ba
MS
4153
4154 tree dest = CALL_EXPR_ARG (exp, 0);
4155 tree src = CALL_EXPR_ARG (exp, 1);
4156 tree len = CALL_EXPR_ARG (exp, 2);
4157
af3fa359
MS
4158 /* Policy does not generally allow using compute_objsize (which
4159 is used internally by check_memop_size) to change code generation
4160 or drive optimization decisions.
4161
4162 In this instance it is safe because the code we generate has
4163 the same semantics regardless of the return value of
4164 check_memop_sizes. Exactly the same amount of data is copied
4165 and the return value is exactly the same in both cases.
4166
4167 Furthermore, check_memop_size always uses mode 0 for the call to
4168 compute_objsize, so the imprecise nature of compute_objsize is
4169 avoided. */
4170
ee92e7ba
MS
4171 /* Avoid expanding mempcpy into memcpy when the call is determined
4172 to overflow the buffer. This also prevents the same overflow
4173 from being diagnosed again when expanding memcpy. */
cc8bea0a 4174 if (!check_memop_access (exp, dest, src, len))
ee92e7ba
MS
4175 return NULL_RTX;
4176
4177 return expand_builtin_mempcpy_args (dest, src, len,
2ff5ffb6 4178 target, exp, /*retmode=*/ RETURN_END);
edcf72f3
IE
4179}
4180
671a00ee
ML
4181/* Helper function to do the actual work for expand of memory copy family
4182 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
2ff5ffb6
ML
4183 of memory from SRC to DEST and assign to TARGET if convenient. Return
4184 value is based on RETMODE argument. */
5039610b
SL
4185
4186static rtx
671a00ee 4187expand_builtin_memory_copy_args (tree dest, tree src, tree len,
03a9b90a
AS
4188 rtx target, tree exp, memop_ret retmode,
4189 bool might_overlap)
5039610b 4190{
671a00ee
ML
4191 const char *src_str;
4192 unsigned int src_align = get_pointer_alignment (src);
4193 unsigned int dest_align = get_pointer_alignment (dest);
4194 rtx dest_mem, src_mem, dest_addr, len_rtx;
4195 HOST_WIDE_INT expected_size = -1;
4196 unsigned int expected_align = 0;
4197 unsigned HOST_WIDE_INT min_size;
4198 unsigned HOST_WIDE_INT max_size;
4199 unsigned HOST_WIDE_INT probable_max_size;
edcf72f3 4200
db91c7cf
ML
4201 bool is_move_done;
4202
671a00ee
ML
4203 /* If DEST is not a pointer type, call the normal function. */
4204 if (dest_align == 0)
4205 return NULL_RTX;
c22cacf3 4206
671a00ee
ML
4207 /* If either SRC is not a pointer type, don't do this
4208 operation in-line. */
4209 if (src_align == 0)
4210 return NULL_RTX;
8fd3cf4e 4211
671a00ee
ML
4212 if (currently_expanding_gimple_stmt)
4213 stringop_block_profile (currently_expanding_gimple_stmt,
4214 &expected_align, &expected_size);
33521f7d 4215
671a00ee
ML
4216 if (expected_align < dest_align)
4217 expected_align = dest_align;
4218 dest_mem = get_memory_rtx (dest, len);
4219 set_mem_align (dest_mem, dest_align);
4220 len_rtx = expand_normal (len);
4221 determine_block_size (len, len_rtx, &min_size, &max_size,
4222 &probable_max_size);
4223 src_str = c_getstr (src);
e3e9f108 4224
03a9b90a
AS
4225 /* If SRC is a string constant and block move would be done by
4226 pieces, we can avoid loading the string from memory and only
4227 stored the computed constants. This works in the overlap
4228 (memmove) case as well because store_by_pieces just generates a
4229 series of stores of constants from the string constant returned
4230 by c_getstr(). */
671a00ee
ML
4231 if (src_str
4232 && CONST_INT_P (len_rtx)
4233 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
4234 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
4235 CONST_CAST (char *, src_str),
4236 dest_align, false))
4237 {
4238 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
4239 builtin_memcpy_read_str,
7d3eecca 4240 CONST_CAST (char *, src_str),
2ff5ffb6 4241 dest_align, false, retmode);
671a00ee
ML
4242 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4243 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4244 return dest_mem;
4245 }
e3e9f108 4246
671a00ee
ML
4247 src_mem = get_memory_rtx (src, len);
4248 set_mem_align (src_mem, src_align);
8fd3cf4e 4249
671a00ee 4250 /* Copy word part most expediently. */
fdd33254 4251 enum block_op_methods method = BLOCK_OP_NORMAL;
2ff5ffb6
ML
4252 if (CALL_EXPR_TAILCALL (exp)
4253 && (retmode == RETURN_BEGIN || target == const0_rtx))
fdd33254 4254 method = BLOCK_OP_TAILCALL;
db91c7cf
ML
4255 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
4256 && retmode == RETURN_END
03a9b90a 4257 && !might_overlap
db91c7cf
ML
4258 && target != const0_rtx);
4259 if (use_mempcpy_call)
fdd33254
ML
4260 method = BLOCK_OP_NO_LIBCALL_RET;
4261 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
671a00ee 4262 expected_align, expected_size,
db91c7cf 4263 min_size, max_size, probable_max_size,
03a9b90a 4264 use_mempcpy_call, &is_move_done, might_overlap);
db91c7cf
ML
4265
4266 /* Bail out when a mempcpy call would be expanded as libcall and when
4267 we have a target that provides a fast implementation
4268 of mempcpy routine. */
4269 if (!is_move_done)
4270 return NULL_RTX;
4271
fdd33254
ML
4272 if (dest_addr == pc_rtx)
4273 return NULL_RTX;
671a00ee
ML
4274
4275 if (dest_addr == 0)
4276 {
4277 dest_addr = force_operand (XEXP (dest_mem, 0), target);
4278 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4279 }
4280
2ff5ffb6 4281 if (retmode != RETURN_BEGIN && target != const0_rtx)
671a00ee
ML
4282 {
4283 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4284 /* stpcpy pointer to last byte. */
2ff5ffb6 4285 if (retmode == RETURN_END_MINUS_ONE)
671a00ee 4286 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
7d3eecca 4287 }
671a00ee
ML
4288
4289 return dest_addr;
4290}
4291
4292static rtx
4293expand_builtin_mempcpy_args (tree dest, tree src, tree len,
2ff5ffb6 4294 rtx target, tree orig_exp, memop_ret retmode)
671a00ee
ML
4295{
4296 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
03a9b90a 4297 retmode, false);
e3e9f108
JJ
4298}
4299
5039610b 4300/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
beed8fc0 4301 we failed, the caller should emit a normal call, otherwise try to
2ff5ffb6
ML
4302 get the result in TARGET, if convenient.
4303 Return value is based on RETMODE argument. */
beed8fc0
AO
4304
4305static rtx
2ff5ffb6 4306expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
beed8fc0 4307{
99b1c316 4308 class expand_operand ops[3];
beed8fc0
AO
4309 rtx dest_mem;
4310 rtx src_mem;
beed8fc0 4311
7cff0471 4312 if (!targetm.have_movstr ())
5039610b 4313 return NULL_RTX;
beed8fc0 4314
435bb2a1
JJ
4315 dest_mem = get_memory_rtx (dest, NULL);
4316 src_mem = get_memory_rtx (src, NULL);
2831adb5 4317 if (retmode == RETURN_BEGIN)
beed8fc0
AO
4318 {
4319 target = force_reg (Pmode, XEXP (dest_mem, 0));
4320 dest_mem = replace_equiv_address (dest_mem, target);
beed8fc0
AO
4321 }
4322
42bdb8f2
ML
4323 create_output_operand (&ops[0],
4324 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
a5c7d693
RS
4325 create_fixed_operand (&ops[1], dest_mem);
4326 create_fixed_operand (&ops[2], src_mem);
7cff0471 4327 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
7c5425fa 4328 return NULL_RTX;
beed8fc0 4329
2ff5ffb6 4330 if (retmode != RETURN_BEGIN && target != const0_rtx)
7ce3fc8f 4331 {
a5c7d693
RS
4332 target = ops[0].value;
4333 /* movstr is supposed to set end to the address of the NUL
4334 terminator. If the caller requested a mempcpy-like return value,
4335 adjust it. */
2ff5ffb6 4336 if (retmode == RETURN_END)
a5c7d693 4337 {
0a81f074
RS
4338 rtx tem = plus_constant (GET_MODE (target),
4339 gen_lowpart (GET_MODE (target), target), 1);
a5c7d693
RS
4340 emit_move_insn (target, force_operand (tem, NULL_RTX));
4341 }
7ce3fc8f 4342 }
beed8fc0
AO
4343 return target;
4344}
4345
ee92e7ba
MS
4346/* Do some very basic size validation of a call to the strcpy builtin
4347 given by EXP. Return NULL_RTX to have the built-in expand to a call
4348 to the library function. */
4349
4350static rtx
b5338fb3 4351expand_builtin_strcat (tree exp)
ee92e7ba
MS
4352{
4353 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4354 || !warn_stringop_overflow)
4355 return NULL_RTX;
4356
4357 tree dest = CALL_EXPR_ARG (exp, 0);
4358 tree src = CALL_EXPR_ARG (exp, 1);
4359
b5338fb3
MS
4360 /* Detect unterminated source (only). */
4361 if (!check_nul_terminated_array (exp, src))
4362 return NULL_RTX;
4363
ee92e7ba
MS
4364 /* There is no way here to determine the length of the string in
4365 the destination to which the SRC string is being appended so
4366 just diagnose cases when the souce string is longer than
4367 the destination object. */
4368
d9c5a8b9 4369 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba 4370
cc8bea0a
MS
4371 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4372 destsize);
ee92e7ba
MS
4373
4374 return NULL_RTX;
4375}
4376
b8698a0f
L
4377/* Expand expression EXP, which is a call to the strcpy builtin. Return
4378 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 4379 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 4380 convenient). */
fed3cef0 4381
28f4ec01 4382static rtx
44e10129 4383expand_builtin_strcpy (tree exp, rtx target)
28f4ec01 4384{
ee92e7ba
MS
4385 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4386 return NULL_RTX;
4387
4388 tree dest = CALL_EXPR_ARG (exp, 0);
4389 tree src = CALL_EXPR_ARG (exp, 1);
4390
4391 if (warn_stringop_overflow)
4392 {
d9c5a8b9 4393 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
cc8bea0a
MS
4394 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4395 src, destsize);
ee92e7ba
MS
4396 }
4397
e08341bb 4398 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
36537a1c
MS
4399 {
4400 /* Check to see if the argument was declared attribute nonstring
4401 and if so, issue a warning since at this point it's not known
4402 to be nul-terminated. */
4403 tree fndecl = get_callee_fndecl (exp);
4404 maybe_warn_nonstring_arg (fndecl, exp);
4405 return ret;
4406 }
4407
4408 return NULL_RTX;
5039610b
SL
4409}
4410
4411/* Helper function to do the actual work for expand_builtin_strcpy. The
4412 arguments to the builtin_strcpy call DEST and SRC are broken out
4413 so that this can also be called without constructing an actual CALL_EXPR.
4414 The other arguments and return value are the same as for
4415 expand_builtin_strcpy. */
4416
4417static rtx
e08341bb 4418expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
5039610b 4419{
e08341bb
MS
4420 /* Detect strcpy calls with unterminated arrays.. */
4421 if (tree nonstr = unterminated_array (src))
4422 {
4423 /* NONSTR refers to the non-nul terminated constant array. */
4424 if (!TREE_NO_WARNING (exp))
4425 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4426 return NULL_RTX;
4427 }
4428
2ff5ffb6 4429 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
28f4ec01
BS
4430}
4431
5039610b
SL
4432/* Expand a call EXP to the stpcpy builtin.
4433 Return NULL_RTX if we failed the caller should emit a normal call,
9cb65f92
KG
4434 otherwise try to get the result in TARGET, if convenient (and in
4435 mode MODE if that's convenient). */
4436
4437static rtx
3ce4cdb2 4438expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
9cb65f92 4439{
5039610b 4440 tree dst, src;
db3927fb 4441 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
4442
4443 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4444 return NULL_RTX;
4445
4446 dst = CALL_EXPR_ARG (exp, 0);
4447 src = CALL_EXPR_ARG (exp, 1);
4448
e50d56a5
MS
4449 if (warn_stringop_overflow)
4450 {
d9c5a8b9 4451 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
cc8bea0a
MS
4452 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4453 src, destsize);
e50d56a5
MS
4454 }
4455
beed8fc0 4456 /* If return value is ignored, transform stpcpy into strcpy. */
e79983f4 4457 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
ad4319ec 4458 {
e79983f4 4459 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
aa493694 4460 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
0d2a6e08 4461 return expand_expr (result, target, mode, EXPAND_NORMAL);
ad4319ec 4462 }
9cb65f92
KG
4463 else
4464 {
5039610b 4465 tree len, lenp1;
beed8fc0 4466 rtx ret;
e3e9f108 4467
8fd3cf4e 4468 /* Ensure we get an actual string whose length can be evaluated at
c22cacf3
MS
4469 compile-time, not an expression containing a string. This is
4470 because the latter will potentially produce pessimized code
4471 when used to produce the return value. */
e09aa5bd 4472 c_strlen_data lendata = { };
01b0acb7 4473 if (!c_getstr (src, NULL)
e09aa5bd 4474 || !(len = c_strlen (src, 0, &lendata, 1)))
2ff5ffb6
ML
4475 return expand_movstr (dst, src, target,
4476 /*retmode=*/ RETURN_END_MINUS_ONE);
9cb65f92 4477
e09aa5bd
MS
4478 if (lendata.decl && !TREE_NO_WARNING (exp))
4479 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
01b0acb7 4480
db3927fb 4481 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
44e10129 4482 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
2ff5ffb6
ML
4483 target, exp,
4484 /*retmode=*/ RETURN_END_MINUS_ONE);
beed8fc0
AO
4485
4486 if (ret)
4487 return ret;
4488
4489 if (TREE_CODE (len) == INTEGER_CST)
4490 {
84217346 4491 rtx len_rtx = expand_normal (len);
beed8fc0 4492
481683e1 4493 if (CONST_INT_P (len_rtx))
beed8fc0 4494 {
e08341bb 4495 ret = expand_builtin_strcpy_args (exp, dst, src, target);
beed8fc0
AO
4496
4497 if (ret)
4498 {
4499 if (! target)
58ec6ece
SE
4500 {
4501 if (mode != VOIDmode)
4502 target = gen_reg_rtx (mode);
4503 else
4504 target = gen_reg_rtx (GET_MODE (ret));
4505 }
beed8fc0
AO
4506 if (GET_MODE (target) != GET_MODE (ret))
4507 ret = gen_lowpart (GET_MODE (target), ret);
4508
0a81f074 4509 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
7ce3fc8f 4510 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
298e6adc 4511 gcc_assert (ret);
beed8fc0
AO
4512
4513 return target;
4514 }
4515 }
4516 }
4517
2ff5ffb6
ML
4518 return expand_movstr (dst, src, target,
4519 /*retmode=*/ RETURN_END_MINUS_ONE);
9cb65f92
KG
4520 }
4521}
4522
3ce4cdb2
MS
4523/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4524 arguments while being careful to avoid duplicate warnings (which could
4525 be issued if the expander were to expand the call, resulting in it
4526 being emitted in expand_call(). */
4527
4528static rtx
4529expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4530{
4531 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4532 {
4533 /* The call has been successfully expanded. Check for nonstring
4534 arguments and issue warnings as appropriate. */
4535 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4536 return ret;
4537 }
4538
4539 return NULL_RTX;
4540}
4541
e50d56a5
MS
4542/* Check a call EXP to the stpncpy built-in for validity.
4543 Return NULL_RTX on both success and failure. */
4544
4545static rtx
4546expand_builtin_stpncpy (tree exp, rtx)
4547{
4548 if (!validate_arglist (exp,
4549 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4550 || !warn_stringop_overflow)
4551 return NULL_RTX;
4552
c6c02519 4553 /* The source and destination of the call. */
e50d56a5
MS
4554 tree dest = CALL_EXPR_ARG (exp, 0);
4555 tree src = CALL_EXPR_ARG (exp, 1);
4556
c6c02519 4557 /* The exact number of bytes to write (not the maximum). */
e50d56a5 4558 tree len = CALL_EXPR_ARG (exp, 2);
b5338fb3
MS
4559 if (!check_nul_terminated_array (exp, src, len))
4560 return NULL_RTX;
e50d56a5 4561
c6c02519 4562 /* The size of the destination object. */
d9c5a8b9 4563 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
e50d56a5 4564
cc8bea0a 4565 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
e50d56a5
MS
4566
4567 return NULL_RTX;
4568}
4569
57814e5e
JJ
4570/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4571 bytes from constant string DATA + OFFSET and return it as target
4572 constant. */
4573
14a43348 4574rtx
4682ae04 4575builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 4576 scalar_int_mode mode)
57814e5e
JJ
4577{
4578 const char *str = (const char *) data;
4579
4580 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4581 return const0_rtx;
4582
4583 return c_readstr (str + offset, mode);
4584}
4585
ee92e7ba
MS
4586/* Helper to check the sizes of sequences and the destination of calls
4587 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4588 success (no overflow or invalid sizes), false otherwise. */
4589
4590static bool
4591check_strncat_sizes (tree exp, tree objsize)
4592{
4593 tree dest = CALL_EXPR_ARG (exp, 0);
4594 tree src = CALL_EXPR_ARG (exp, 1);
cc8bea0a 4595 tree maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
4596
4597 /* Try to determine the range of lengths that the source expression
4598 refers to. */
5d6655eb
MS
4599 c_strlen_data lendata = { };
4600 get_range_strlen (src, &lendata, /* eltsize = */ 1);
ee92e7ba
MS
4601
4602 /* Try to verify that the destination is big enough for the shortest
4603 string. */
4604
4605 if (!objsize && warn_stringop_overflow)
4606 {
4607 /* If it hasn't been provided by __strncat_chk, try to determine
4608 the size of the destination object into which the source is
4609 being copied. */
d9c5a8b9 4610 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
4611 }
4612
4613 /* Add one for the terminating nul. */
5d6655eb
MS
4614 tree srclen = (lendata.minlen
4615 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
ee92e7ba
MS
4616 size_one_node)
4617 : NULL_TREE);
4618
cc8bea0a
MS
4619 /* The strncat function copies at most MAXREAD bytes and always appends
4620 the terminating nul so the specified upper bound should never be equal
4621 to (or greater than) the size of the destination. */
4622 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4623 && tree_int_cst_equal (objsize, maxread))
ee92e7ba 4624 {
e50d56a5
MS
4625 location_t loc = tree_nonartificial_location (exp);
4626 loc = expansion_point_location_if_in_system_header (loc);
4627
4628 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 4629 "%K%qD specified bound %E equals destination size",
cc8bea0a 4630 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
4631
4632 return false;
4633 }
4634
4635 if (!srclen
cc8bea0a 4636 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 4637 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
4638 && tree_int_cst_lt (maxread, srclen)))
4639 srclen = maxread;
ee92e7ba 4640
cc8bea0a 4641 /* The number of bytes to write is LEN but check_access will also
ee92e7ba 4642 check SRCLEN if LEN's value isn't known. */
cc8bea0a
MS
4643 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4644 objsize);
ee92e7ba
MS
4645}
4646
4647/* Similar to expand_builtin_strcat, do some very basic size validation
4648 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4649 the built-in expand to a call to the library function. */
4650
4651static rtx
4652expand_builtin_strncat (tree exp, rtx)
4653{
4654 if (!validate_arglist (exp,
4655 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4656 || !warn_stringop_overflow)
4657 return NULL_RTX;
4658
4659 tree dest = CALL_EXPR_ARG (exp, 0);
4660 tree src = CALL_EXPR_ARG (exp, 1);
4661 /* The upper bound on the number of bytes to write. */
cc8bea0a 4662 tree maxread = CALL_EXPR_ARG (exp, 2);
b5338fb3
MS
4663
4664 /* Detect unterminated source (only). */
4665 if (!check_nul_terminated_array (exp, src, maxread))
4666 return NULL_RTX;
4667
ee92e7ba
MS
4668 /* The length of the source sequence. */
4669 tree slen = c_strlen (src, 1);
4670
4671 /* Try to determine the range of lengths that the source expression
5d6655eb
MS
4672 refers to. Since the lengths are only used for warning and not
4673 for code generation disable strict mode below. */
4674 tree maxlen = slen;
4675 if (!maxlen)
4676 {
4677 c_strlen_data lendata = { };
4678 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4679 maxlen = lendata.maxbound;
4680 }
ee92e7ba
MS
4681
4682 /* Try to verify that the destination is big enough for the shortest
4683 string. First try to determine the size of the destination object
4684 into which the source is being copied. */
d9c5a8b9 4685 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
4686
4687 /* Add one for the terminating nul. */
5d6655eb
MS
4688 tree srclen = (maxlen
4689 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
ee92e7ba
MS
4690 size_one_node)
4691 : NULL_TREE);
4692
cc8bea0a
MS
4693 /* The strncat function copies at most MAXREAD bytes and always appends
4694 the terminating nul so the specified upper bound should never be equal
4695 to (or greater than) the size of the destination. */
4696 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4697 && tree_int_cst_equal (destsize, maxread))
ee92e7ba 4698 {
e50d56a5
MS
4699 location_t loc = tree_nonartificial_location (exp);
4700 loc = expansion_point_location_if_in_system_header (loc);
4701
4702 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 4703 "%K%qD specified bound %E equals destination size",
cc8bea0a 4704 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
4705
4706 return NULL_RTX;
4707 }
4708
4709 if (!srclen
cc8bea0a 4710 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 4711 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
4712 && tree_int_cst_lt (maxread, srclen)))
4713 srclen = maxread;
ee92e7ba 4714
cc8bea0a
MS
4715 /* The number of bytes to write is SRCLEN. */
4716 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
ee92e7ba
MS
4717
4718 return NULL_RTX;
4719}
4720
b8698a0f 4721/* Expand expression EXP, which is a call to the strncpy builtin. Return
5039610b 4722 NULL_RTX if we failed the caller should emit a normal call. */
da9e9f08
KG
4723
4724static rtx
44e10129 4725expand_builtin_strncpy (tree exp, rtx target)
da9e9f08 4726{
db3927fb 4727 location_t loc = EXPR_LOCATION (exp);
5039610b 4728
b5338fb3
MS
4729 if (!validate_arglist (exp,
4730 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4731 return NULL_RTX;
4732 tree dest = CALL_EXPR_ARG (exp, 0);
4733 tree src = CALL_EXPR_ARG (exp, 1);
4734 /* The number of bytes to write (not the maximum). */
4735 tree len = CALL_EXPR_ARG (exp, 2);
57814e5e 4736
b5338fb3
MS
4737 if (!check_nul_terminated_array (exp, src, len))
4738 return NULL_RTX;
cc8bea0a 4739
b5338fb3
MS
4740 /* The length of the source sequence. */
4741 tree slen = c_strlen (src, 1);
ee92e7ba 4742
b5338fb3
MS
4743 if (warn_stringop_overflow)
4744 {
4745 tree destsize = compute_objsize (dest,
4746 warn_stringop_overflow - 1);
da9e9f08 4747
b5338fb3
MS
4748 /* The number of bytes to write is LEN but check_access will also
4749 check SLEN if LEN's value isn't known. */
4750 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4751 destsize);
4752 }
da9e9f08 4753
b5338fb3
MS
4754 /* We must be passed a constant len and src parameter. */
4755 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4756 return NULL_RTX;
57814e5e 4757
b5338fb3
MS
4758 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4759
4760 /* We're required to pad with trailing zeros if the requested
4761 len is greater than strlen(s2)+1. In that case try to
4762 use store_by_pieces, if it fails, punt. */
4763 if (tree_int_cst_lt (slen, len))
4764 {
4765 unsigned int dest_align = get_pointer_alignment (dest);
4766 const char *p = c_getstr (src);
4767 rtx dest_mem;
4768
4769 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4770 || !can_store_by_pieces (tree_to_uhwi (len),
4771 builtin_strncpy_read_str,
4772 CONST_CAST (char *, p),
4773 dest_align, false))
4774 return NULL_RTX;
4775
4776 dest_mem = get_memory_rtx (dest, len);
4777 store_by_pieces (dest_mem, tree_to_uhwi (len),
4778 builtin_strncpy_read_str,
4779 CONST_CAST (char *, p), dest_align, false,
4780 RETURN_BEGIN);
4781 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4782 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4783 return dest_mem;
da9e9f08 4784 }
b5338fb3 4785
5039610b 4786 return NULL_RTX;
da9e9f08
KG
4787}
4788
ab937357
JJ
4789/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4790 bytes from constant string DATA + OFFSET and return it as target
4791 constant. */
4792
34d85166 4793rtx
4682ae04 4794builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4795 scalar_int_mode mode)
ab937357
JJ
4796{
4797 const char *c = (const char *) data;
f883e0a7 4798 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ab937357
JJ
4799
4800 memset (p, *c, GET_MODE_SIZE (mode));
4801
4802 return c_readstr (p, mode);
4803}
4804
1a887f86
RS
4805/* Callback routine for store_by_pieces. Return the RTL of a register
4806 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4807 char value given in the RTL register data. For example, if mode is
4808 4 bytes wide, return the RTL for 0x01010101*data. */
4809
4810static rtx
4682ae04 4811builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4812 scalar_int_mode mode)
1a887f86
RS
4813{
4814 rtx target, coeff;
4815 size_t size;
4816 char *p;
4817
4818 size = GET_MODE_SIZE (mode);
5ab2f7b7
KH
4819 if (size == 1)
4820 return (rtx) data;
1a887f86 4821
f883e0a7 4822 p = XALLOCAVEC (char, size);
1a887f86
RS
4823 memset (p, 1, size);
4824 coeff = c_readstr (p, mode);
4825
5ab2f7b7 4826 target = convert_to_mode (mode, (rtx) data, 1);
1a887f86
RS
4827 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4828 return force_reg (mode, target);
4829}
4830
b8698a0f
L
4831/* Expand expression EXP, which is a call to the memset builtin. Return
4832 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 4833 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 4834 convenient). */
fed3cef0 4835
28f4ec01 4836static rtx
ef4bddc2 4837expand_builtin_memset (tree exp, rtx target, machine_mode mode)
28f4ec01 4838{
5039610b
SL
4839 if (!validate_arglist (exp,
4840 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4841 return NULL_RTX;
ee92e7ba
MS
4842
4843 tree dest = CALL_EXPR_ARG (exp, 0);
4844 tree val = CALL_EXPR_ARG (exp, 1);
4845 tree len = CALL_EXPR_ARG (exp, 2);
4846
cc8bea0a 4847 check_memop_access (exp, dest, NULL_TREE, len);
ee92e7ba
MS
4848
4849 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5039610b 4850}
28f4ec01 4851
5039610b
SL
4852/* Helper function to do the actual work for expand_builtin_memset. The
4853 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4854 so that this can also be called without constructing an actual CALL_EXPR.
4855 The other arguments and return value are the same as for
4856 expand_builtin_memset. */
880864cf 4857
5039610b
SL
4858static rtx
4859expand_builtin_memset_args (tree dest, tree val, tree len,
ef4bddc2 4860 rtx target, machine_mode mode, tree orig_exp)
5039610b
SL
4861{
4862 tree fndecl, fn;
4863 enum built_in_function fcode;
ef4bddc2 4864 machine_mode val_mode;
5039610b
SL
4865 char c;
4866 unsigned int dest_align;
4867 rtx dest_mem, dest_addr, len_rtx;
4868 HOST_WIDE_INT expected_size = -1;
4869 unsigned int expected_align = 0;
3918b108
JH
4870 unsigned HOST_WIDE_INT min_size;
4871 unsigned HOST_WIDE_INT max_size;
82bb7d4e 4872 unsigned HOST_WIDE_INT probable_max_size;
28f4ec01 4873
0eb77834 4874 dest_align = get_pointer_alignment (dest);
079a182e 4875
5039610b
SL
4876 /* If DEST is not a pointer type, don't do this operation in-line. */
4877 if (dest_align == 0)
4878 return NULL_RTX;
c2bd38e8 4879
a5883ba0
MM
4880 if (currently_expanding_gimple_stmt)
4881 stringop_block_profile (currently_expanding_gimple_stmt,
4882 &expected_align, &expected_size);
726a989a 4883
5039610b
SL
4884 if (expected_align < dest_align)
4885 expected_align = dest_align;
880864cf 4886
5039610b
SL
4887 /* If the LEN parameter is zero, return DEST. */
4888 if (integer_zerop (len))
4889 {
4890 /* Evaluate and ignore VAL in case it has side-effects. */
4891 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4892 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4893 }
57e84f18 4894
5039610b
SL
4895 /* Stabilize the arguments in case we fail. */
4896 dest = builtin_save_expr (dest);
4897 val = builtin_save_expr (val);
4898 len = builtin_save_expr (len);
1a887f86 4899
5039610b 4900 len_rtx = expand_normal (len);
82bb7d4e
JH
4901 determine_block_size (len, len_rtx, &min_size, &max_size,
4902 &probable_max_size);
5039610b 4903 dest_mem = get_memory_rtx (dest, len);
8a445129 4904 val_mode = TYPE_MODE (unsigned_char_type_node);
1a887f86 4905
5039610b
SL
4906 if (TREE_CODE (val) != INTEGER_CST)
4907 {
4908 rtx val_rtx;
1a887f86 4909
5039610b 4910 val_rtx = expand_normal (val);
8a445129 4911 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
28f4ec01 4912
5039610b
SL
4913 /* Assume that we can memset by pieces if we can store
4914 * the coefficients by pieces (in the required modes).
4915 * We can't pass builtin_memset_gen_str as that emits RTL. */
4916 c = 1;
cc269bb6 4917 if (tree_fits_uhwi_p (len)
ae7e9ddd 4918 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
4919 builtin_memset_read_str, &c, dest_align,
4920 true))
5039610b 4921 {
8a445129 4922 val_rtx = force_reg (val_mode, val_rtx);
ae7e9ddd 4923 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150 4924 builtin_memset_gen_str, val_rtx, dest_align,
2ff5ffb6 4925 true, RETURN_BEGIN);
5039610b
SL
4926 }
4927 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4928 dest_align, expected_align,
82bb7d4e
JH
4929 expected_size, min_size, max_size,
4930 probable_max_size))
880864cf 4931 goto do_libcall;
b8698a0f 4932
5039610b
SL
4933 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4934 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4935 return dest_mem;
4936 }
28f4ec01 4937
5039610b
SL
4938 if (target_char_cast (val, &c))
4939 goto do_libcall;
ab937357 4940
5039610b
SL
4941 if (c)
4942 {
cc269bb6 4943 if (tree_fits_uhwi_p (len)
ae7e9ddd 4944 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
4945 builtin_memset_read_str, &c, dest_align,
4946 true))
ae7e9ddd 4947 store_by_pieces (dest_mem, tree_to_uhwi (len),
2ff5ffb6
ML
4948 builtin_memset_read_str, &c, dest_align, true,
4949 RETURN_BEGIN);
8a445129
RS
4950 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4951 gen_int_mode (c, val_mode),
5039610b 4952 dest_align, expected_align,
82bb7d4e
JH
4953 expected_size, min_size, max_size,
4954 probable_max_size))
5039610b 4955 goto do_libcall;
b8698a0f 4956
5039610b
SL
4957 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4958 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4959 return dest_mem;
4960 }
ab937357 4961
5039610b
SL
4962 set_mem_align (dest_mem, dest_align);
4963 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4964 CALL_EXPR_TAILCALL (orig_exp)
4965 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3918b108 4966 expected_align, expected_size,
82bb7d4e
JH
4967 min_size, max_size,
4968 probable_max_size);
28f4ec01 4969
5039610b
SL
4970 if (dest_addr == 0)
4971 {
4972 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4973 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4974 }
28f4ec01 4975
5039610b 4976 return dest_addr;
880864cf 4977
5039610b
SL
4978 do_libcall:
4979 fndecl = get_callee_fndecl (orig_exp);
4980 fcode = DECL_FUNCTION_CODE (fndecl);
31db0fe0 4981 if (fcode == BUILT_IN_MEMSET)
aa493694
JJ
4982 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4983 dest, val, len);
5039610b 4984 else if (fcode == BUILT_IN_BZERO)
aa493694
JJ
4985 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4986 dest, len);
5039610b
SL
4987 else
4988 gcc_unreachable ();
44e10129
MM
4989 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4990 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5039610b 4991 return expand_call (fn, target, target == const0_rtx);
28f4ec01
BS
4992}
4993
b8698a0f 4994/* Expand expression EXP, which is a call to the bzero builtin. Return
5039610b 4995 NULL_RTX if we failed the caller should emit a normal call. */
5197bd50 4996
e3a709be 4997static rtx
8148fe65 4998expand_builtin_bzero (tree exp)
e3a709be 4999{
5039610b 5000 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3477addf 5001 return NULL_RTX;
e3a709be 5002
ee92e7ba
MS
5003 tree dest = CALL_EXPR_ARG (exp, 0);
5004 tree size = CALL_EXPR_ARG (exp, 1);
5005
cc8bea0a 5006 check_memop_access (exp, dest, NULL_TREE, size);
8d51ecf8 5007
3477addf 5008 /* New argument list transforming bzero(ptr x, int y) to
c2bd38e8
RS
5009 memset(ptr x, int 0, size_t y). This is done this way
5010 so that if it isn't expanded inline, we fallback to
5011 calling bzero instead of memset. */
8d51ecf8 5012
ee92e7ba
MS
5013 location_t loc = EXPR_LOCATION (exp);
5014
5039610b 5015 return expand_builtin_memset_args (dest, integer_zero_node,
0d82a1c8
RG
5016 fold_convert_loc (loc,
5017 size_type_node, size),
5039610b 5018 const0_rtx, VOIDmode, exp);
e3a709be
KG
5019}
5020
a666df60
RS
5021/* Try to expand cmpstr operation ICODE with the given operands.
5022 Return the result rtx on success, otherwise return null. */
5023
5024static rtx
5025expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
5026 HOST_WIDE_INT align)
5027{
5028 machine_mode insn_mode = insn_data[icode].operand[0].mode;
5029
5030 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
5031 target = NULL_RTX;
5032
99b1c316 5033 class expand_operand ops[4];
a666df60
RS
5034 create_output_operand (&ops[0], target, insn_mode);
5035 create_fixed_operand (&ops[1], arg1_rtx);
5036 create_fixed_operand (&ops[2], arg2_rtx);
5037 create_integer_operand (&ops[3], align);
5038 if (maybe_expand_insn (icode, 4, ops))
5039 return ops[0].value;
5040 return NULL_RTX;
5041}
5042
2be3b5ce 5043/* Expand expression EXP, which is a call to the memcmp built-in function.
9b0f6f5e 5044 Return NULL_RTX if we failed and the caller should emit a normal call,
36b85e43
BS
5045 otherwise try to get the result in TARGET, if convenient.
5046 RESULT_EQ is true if we can relax the returned value to be either zero
5047 or nonzero, without caring about the sign. */
5197bd50 5048
28f4ec01 5049static rtx
36b85e43 5050expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
28f4ec01 5051{
5039610b
SL
5052 if (!validate_arglist (exp,
5053 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5054 return NULL_RTX;
c2bd38e8 5055
7f9f48be
RS
5056 tree arg1 = CALL_EXPR_ARG (exp, 0);
5057 tree arg2 = CALL_EXPR_ARG (exp, 1);
5058 tree len = CALL_EXPR_ARG (exp, 2);
b2272b13
QZ
5059 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
5060 bool no_overflow = true;
d9c5a8b9
MS
5061
5062 /* Diagnose calls where the specified length exceeds the size of either
5063 object. */
b2272b13
QZ
5064 tree size = compute_objsize (arg1, 0);
5065 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5066 len, /*maxread=*/NULL_TREE, size,
5067 /*objsize=*/NULL_TREE);
10a0e2a9 5068 if (no_overflow)
b2272b13
QZ
5069 {
5070 size = compute_objsize (arg2, 0);
5071 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5072 len, /*maxread=*/NULL_TREE, size,
5073 /*objsize=*/NULL_TREE);
10a0e2a9 5074 }
b2272b13 5075
b99d7d97
QZ
5076 /* If the specified length exceeds the size of either object,
5077 call the function. */
5078 if (!no_overflow)
5079 return NULL_RTX;
5080
10a0e2a9 5081 /* Due to the performance benefit, always inline the calls first
b2272b13
QZ
5082 when result_eq is false. */
5083 rtx result = NULL_RTX;
10a0e2a9 5084
b99d7d97 5085 if (!result_eq && fcode != BUILT_IN_BCMP)
d9c5a8b9 5086 {
523a59ff 5087 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
5088 if (result)
5089 return result;
d9c5a8b9
MS
5090 }
5091
36b85e43
BS
5092 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5093 location_t loc = EXPR_LOCATION (exp);
358b8f01 5094
7f9f48be
RS
5095 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5096 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
28f4ec01 5097
7f9f48be
RS
5098 /* If we don't have POINTER_TYPE, call the function. */
5099 if (arg1_align == 0 || arg2_align == 0)
5100 return NULL_RTX;
28f4ec01 5101
7f9f48be
RS
5102 rtx arg1_rtx = get_memory_rtx (arg1, len);
5103 rtx arg2_rtx = get_memory_rtx (arg2, len);
36b85e43 5104 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
28f4ec01 5105
7f9f48be 5106 /* Set MEM_SIZE as appropriate. */
36b85e43 5107 if (CONST_INT_P (len_rtx))
7f9f48be 5108 {
36b85e43
BS
5109 set_mem_size (arg1_rtx, INTVAL (len_rtx));
5110 set_mem_size (arg2_rtx, INTVAL (len_rtx));
7f9f48be 5111 }
6cbaec9e 5112
36b85e43
BS
5113 by_pieces_constfn constfn = NULL;
5114
d0d7f887
BS
5115 const char *src_str = c_getstr (arg2);
5116 if (result_eq && src_str == NULL)
5117 {
5118 src_str = c_getstr (arg1);
5119 if (src_str != NULL)
4f353581 5120 std::swap (arg1_rtx, arg2_rtx);
d0d7f887 5121 }
36b85e43
BS
5122
5123 /* If SRC is a string constant and block move would be done
5124 by pieces, we can avoid loading the string from memory
5125 and only stored the computed constants. */
5126 if (src_str
5127 && CONST_INT_P (len_rtx)
5128 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
5129 constfn = builtin_memcpy_read_str;
5130
b2272b13
QZ
5131 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
5132 TREE_TYPE (len), target,
5133 result_eq, constfn,
5134 CONST_CAST (char *, src_str));
36b85e43 5135
7f9f48be
RS
5136 if (result)
5137 {
5138 /* Return the value in the proper mode for this function. */
5139 if (GET_MODE (result) == mode)
5140 return result;
6cbaec9e 5141
7f9f48be
RS
5142 if (target != 0)
5143 {
5144 convert_move (target, result, 0);
5145 return target;
5146 }
8878e913 5147
28f4ec01 5148 return convert_to_mode (mode, result, 0);
7f9f48be 5149 }
28f4ec01 5150
ee516de9 5151 return NULL_RTX;
c2bd38e8
RS
5152}
5153
5039610b 5154/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
28f4ec01
BS
5155 if we failed the caller should emit a normal call, otherwise try to get
5156 the result in TARGET, if convenient. */
fed3cef0 5157
28f4ec01 5158static rtx
44e10129 5159expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
28f4ec01 5160{
5039610b
SL
5161 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5162 return NULL_RTX;
8d51ecf8 5163
b5338fb3
MS
5164 tree arg1 = CALL_EXPR_ARG (exp, 0);
5165 tree arg2 = CALL_EXPR_ARG (exp, 1);
5166
5167 if (!check_nul_terminated_array (exp, arg1)
5168 || !check_nul_terminated_array (exp, arg2))
5169 return NULL_RTX;
5170
b2272b13
QZ
5171 /* Due to the performance benefit, always inline the calls first. */
5172 rtx result = NULL_RTX;
523a59ff 5173 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
5174 if (result)
5175 return result;
5176
a666df60
RS
5177 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
5178 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
5179 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
5180 return NULL_RTX;
c22cacf3 5181
16155777
MS
5182 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5183 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
2be3b5ce 5184
16155777
MS
5185 /* If we don't have POINTER_TYPE, call the function. */
5186 if (arg1_align == 0 || arg2_align == 0)
5187 return NULL_RTX;
2be3b5ce 5188
16155777
MS
5189 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
5190 arg1 = builtin_save_expr (arg1);
5191 arg2 = builtin_save_expr (arg2);
28f4ec01 5192
16155777
MS
5193 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
5194 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
a666df60 5195
16155777
MS
5196 /* Try to call cmpstrsi. */
5197 if (cmpstr_icode != CODE_FOR_nothing)
5198 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
5199 MIN (arg1_align, arg2_align));
40c1d5f8 5200
16155777
MS
5201 /* Try to determine at least one length and call cmpstrnsi. */
5202 if (!result && cmpstrn_icode != CODE_FOR_nothing)
5203 {
5204 tree len;
5205 rtx arg3_rtx;
5206
5207 tree len1 = c_strlen (arg1, 1);
5208 tree len2 = c_strlen (arg2, 1);
5209
5210 if (len1)
5211 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5212 if (len2)
5213 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5214
5215 /* If we don't have a constant length for the first, use the length
5216 of the second, if we know it. We don't require a constant for
5217 this case; some cost analysis could be done if both are available
5218 but neither is constant. For now, assume they're equally cheap,
5219 unless one has side effects. If both strings have constant lengths,
5220 use the smaller. */
5221
5222 if (!len1)
5223 len = len2;
5224 else if (!len2)
5225 len = len1;
5226 else if (TREE_SIDE_EFFECTS (len1))
5227 len = len2;
5228 else if (TREE_SIDE_EFFECTS (len2))
5229 len = len1;
5230 else if (TREE_CODE (len1) != INTEGER_CST)
5231 len = len2;
5232 else if (TREE_CODE (len2) != INTEGER_CST)
5233 len = len1;
5234 else if (tree_int_cst_lt (len1, len2))
5235 len = len1;
5236 else
5237 len = len2;
c43fa1f5 5238
16155777
MS
5239 /* If both arguments have side effects, we cannot optimize. */
5240 if (len && !TREE_SIDE_EFFECTS (len))
40c1d5f8 5241 {
16155777
MS
5242 arg3_rtx = expand_normal (len);
5243 result = expand_cmpstrn_or_cmpmem
5244 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5245 arg3_rtx, MIN (arg1_align, arg2_align));
40c1d5f8 5246 }
16155777
MS
5247 }
5248
16155777 5249 tree fndecl = get_callee_fndecl (exp);
16155777
MS
5250 if (result)
5251 {
36537a1c
MS
5252 /* Check to see if the argument was declared attribute nonstring
5253 and if so, issue a warning since at this point it's not known
5254 to be nul-terminated. */
5255 maybe_warn_nonstring_arg (fndecl, exp);
5256
16155777
MS
5257 /* Return the value in the proper mode for this function. */
5258 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5259 if (GET_MODE (result) == mode)
5260 return result;
5261 if (target == 0)
5262 return convert_to_mode (mode, result, 0);
5263 convert_move (target, result, 0);
5264 return target;
40c1d5f8 5265 }
16155777
MS
5266
5267 /* Expand the library call ourselves using a stabilized argument
5268 list to avoid re-evaluating the function's arguments twice. */
5269 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5270 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5271 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5272 return expand_call (fn, target, target == const0_rtx);
2dee4af1 5273}
28f4ec01 5274
b8698a0f 5275/* Expand expression EXP, which is a call to the strncmp builtin. Return
5039610b 5276 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
da9e9f08 5277 the result in TARGET, if convenient. */
5197bd50 5278
da9e9f08 5279static rtx
44e10129 5280expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
ef4bddc2 5281 ATTRIBUTE_UNUSED machine_mode mode)
da9e9f08 5282{
5039610b
SL
5283 if (!validate_arglist (exp,
5284 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5285 return NULL_RTX;
da9e9f08 5286
b5338fb3
MS
5287 tree arg1 = CALL_EXPR_ARG (exp, 0);
5288 tree arg2 = CALL_EXPR_ARG (exp, 1);
5289 tree arg3 = CALL_EXPR_ARG (exp, 2);
5290
5291 if (!check_nul_terminated_array (exp, arg1, arg3)
5292 || !check_nul_terminated_array (exp, arg2, arg3))
5293 return NULL_RTX;
5294
b2272b13
QZ
5295 /* Due to the performance benefit, always inline the calls first. */
5296 rtx result = NULL_RTX;
523a59ff 5297 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
5298 if (result)
5299 return result;
5300
819c1488 5301 /* If c_strlen can determine an expression for one of the string
40c1d5f8 5302 lengths, and it doesn't have side effects, then emit cmpstrnsi
2be3b5ce 5303 using length MIN(strlen(string)+1, arg3). */
a666df60 5304 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
5305 if (cmpstrn_icode == CODE_FOR_nothing)
5306 return NULL_RTX;
5197bd50 5307
16155777
MS
5308 tree len;
5309
16155777
MS
5310 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5311 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5312
5313 tree len1 = c_strlen (arg1, 1);
5314 tree len2 = c_strlen (arg2, 1);
5315
5316 location_t loc = EXPR_LOCATION (exp);
5317
5318 if (len1)
5319 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5320 if (len2)
5321 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5322
5323 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5324
5325 /* If we don't have a constant length for the first, use the length
5326 of the second, if we know it. If neither string is constant length,
5327 use the given length argument. We don't require a constant for
5328 this case; some cost analysis could be done if both are available
5329 but neither is constant. For now, assume they're equally cheap,
5330 unless one has side effects. If both strings have constant lengths,
5331 use the smaller. */
5332
5333 if (!len1 && !len2)
5334 len = len3;
5335 else if (!len1)
5336 len = len2;
5337 else if (!len2)
5338 len = len1;
5339 else if (TREE_SIDE_EFFECTS (len1))
5340 len = len2;
5341 else if (TREE_SIDE_EFFECTS (len2))
5342 len = len1;
5343 else if (TREE_CODE (len1) != INTEGER_CST)
5344 len = len2;
5345 else if (TREE_CODE (len2) != INTEGER_CST)
5346 len = len1;
5347 else if (tree_int_cst_lt (len1, len2))
5348 len = len1;
5349 else
5350 len = len2;
5351
5352 /* If we are not using the given length, we must incorporate it here.
5353 The actual new length parameter will be MIN(len,arg3) in this case. */
5354 if (len != len3)
75e96bc8
MS
5355 {
5356 len = fold_convert_loc (loc, sizetype, len);
5357 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5358 }
16155777
MS
5359 rtx arg1_rtx = get_memory_rtx (arg1, len);
5360 rtx arg2_rtx = get_memory_rtx (arg2, len);
5361 rtx arg3_rtx = expand_normal (len);
b2272b13
QZ
5362 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5363 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5364 MIN (arg1_align, arg2_align));
16155777 5365
16155777 5366 tree fndecl = get_callee_fndecl (exp);
16155777
MS
5367 if (result)
5368 {
36537a1c
MS
5369 /* Check to see if the argument was declared attribute nonstring
5370 and if so, issue a warning since at this point it's not known
5371 to be nul-terminated. */
5372 maybe_warn_nonstring_arg (fndecl, exp);
5373
16155777
MS
5374 /* Return the value in the proper mode for this function. */
5375 mode = TYPE_MODE (TREE_TYPE (exp));
5376 if (GET_MODE (result) == mode)
5377 return result;
5378 if (target == 0)
5379 return convert_to_mode (mode, result, 0);
5380 convert_move (target, result, 0);
5381 return target;
5382 }
5383
5384 /* Expand the library call ourselves using a stabilized argument
5385 list to avoid re-evaluating the function's arguments twice. */
5386 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5387 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5388 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5389 return expand_call (fn, target, target == const0_rtx);
d118937d
KG
5390}
5391
d3707adb
RH
5392/* Expand a call to __builtin_saveregs, generating the result in TARGET,
5393 if that's convenient. */
fed3cef0 5394
d3707adb 5395rtx
4682ae04 5396expand_builtin_saveregs (void)
28f4ec01 5397{
58f4cf2a
DM
5398 rtx val;
5399 rtx_insn *seq;
28f4ec01
BS
5400
5401 /* Don't do __builtin_saveregs more than once in a function.
5402 Save the result of the first call and reuse it. */
5403 if (saveregs_value != 0)
5404 return saveregs_value;
28f4ec01 5405
d3707adb
RH
5406 /* When this function is called, it means that registers must be
5407 saved on entry to this function. So we migrate the call to the
5408 first insn of this function. */
5409
5410 start_sequence ();
28f4ec01 5411
d3707adb 5412 /* Do whatever the machine needs done in this case. */
61f71b34 5413 val = targetm.calls.expand_builtin_saveregs ();
28f4ec01 5414
d3707adb
RH
5415 seq = get_insns ();
5416 end_sequence ();
28f4ec01 5417
d3707adb 5418 saveregs_value = val;
28f4ec01 5419
2f937369
DM
5420 /* Put the insns after the NOTE that starts the function. If this
5421 is inside a start_sequence, make the outer-level insn chain current, so
d3707adb
RH
5422 the code is placed at the start of the function. */
5423 push_topmost_sequence ();
242229bb 5424 emit_insn_after (seq, entry_of_function ());
d3707adb
RH
5425 pop_topmost_sequence ();
5426
5427 return val;
28f4ec01
BS
5428}
5429
8870e212 5430/* Expand a call to __builtin_next_arg. */
5197bd50 5431
28f4ec01 5432static rtx
8870e212 5433expand_builtin_next_arg (void)
28f4ec01 5434{
8870e212
JJ
5435 /* Checking arguments is already done in fold_builtin_next_arg
5436 that must be called before this function. */
4319e38c 5437 return expand_binop (ptr_mode, add_optab,
38173d38
JH
5438 crtl->args.internal_arg_pointer,
5439 crtl->args.arg_offset_rtx,
28f4ec01
BS
5440 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5441}
5442
d3707adb
RH
5443/* Make it easier for the backends by protecting the valist argument
5444 from multiple evaluations. */
5445
5446static tree
db3927fb 5447stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
d3707adb 5448{
35cbb299
KT
5449 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5450
70f34814
RG
5451 /* The current way of determining the type of valist is completely
5452 bogus. We should have the information on the va builtin instead. */
5453 if (!vatype)
5454 vatype = targetm.fn_abi_va_list (cfun->decl);
35cbb299
KT
5455
5456 if (TREE_CODE (vatype) == ARRAY_TYPE)
d3707adb 5457 {
9f720c3e
GK
5458 if (TREE_SIDE_EFFECTS (valist))
5459 valist = save_expr (valist);
8ebecc3b 5460
9f720c3e 5461 /* For this case, the backends will be expecting a pointer to
35cbb299
KT
5462 vatype, but it's possible we've actually been given an array
5463 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
9f720c3e
GK
5464 So fix it. */
5465 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
daf68dd7 5466 {
35cbb299 5467 tree p1 = build_pointer_type (TREE_TYPE (vatype));
db3927fb 5468 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
daf68dd7 5469 }
d3707adb 5470 }
8ebecc3b 5471 else
d3707adb 5472 {
70f34814 5473 tree pt = build_pointer_type (vatype);
8ebecc3b 5474
9f720c3e
GK
5475 if (! needs_lvalue)
5476 {
8ebecc3b
RH
5477 if (! TREE_SIDE_EFFECTS (valist))
5478 return valist;
8d51ecf8 5479
db3927fb 5480 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
d3707adb 5481 TREE_SIDE_EFFECTS (valist) = 1;
d3707adb 5482 }
9f720c3e 5483
8ebecc3b 5484 if (TREE_SIDE_EFFECTS (valist))
9f720c3e 5485 valist = save_expr (valist);
70f34814
RG
5486 valist = fold_build2_loc (loc, MEM_REF,
5487 vatype, valist, build_int_cst (pt, 0));
d3707adb
RH
5488 }
5489
5490 return valist;
5491}
5492
c35d187f
RH
5493/* The "standard" definition of va_list is void*. */
5494
5495tree
5496std_build_builtin_va_list (void)
5497{
5498 return ptr_type_node;
5499}
5500
35cbb299
KT
5501/* The "standard" abi va_list is va_list_type_node. */
5502
5503tree
5504std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5505{
5506 return va_list_type_node;
5507}
5508
5509/* The "standard" type of va_list is va_list_type_node. */
5510
5511tree
5512std_canonical_va_list_type (tree type)
5513{
5514 tree wtype, htype;
5515
35cbb299
KT
5516 wtype = va_list_type_node;
5517 htype = type;
431e31a9
TV
5518
5519 if (TREE_CODE (wtype) == ARRAY_TYPE)
35cbb299
KT
5520 {
5521 /* If va_list is an array type, the argument may have decayed
5522 to a pointer type, e.g. by being passed to another function.
5523 In that case, unwrap both types so that we can compare the
5524 underlying records. */
5525 if (TREE_CODE (htype) == ARRAY_TYPE
5526 || POINTER_TYPE_P (htype))
5527 {
5528 wtype = TREE_TYPE (wtype);
5529 htype = TREE_TYPE (htype);
5530 }
5531 }
5532 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5533 return va_list_type_node;
5534
5535 return NULL_TREE;
5536}
5537
d3707adb
RH
5538/* The "standard" implementation of va_start: just assign `nextarg' to
5539 the variable. */
5197bd50 5540
d3707adb 5541void
4682ae04 5542std_expand_builtin_va_start (tree valist, rtx nextarg)
d3707adb 5543{
508dabda
ILT
5544 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5545 convert_move (va_r, nextarg, 0);
d3707adb
RH
5546}
5547
5039610b 5548/* Expand EXP, a call to __builtin_va_start. */
5197bd50 5549
d3707adb 5550static rtx
5039610b 5551expand_builtin_va_start (tree exp)
d3707adb
RH
5552{
5553 rtx nextarg;
5039610b 5554 tree valist;
db3927fb 5555 location_t loc = EXPR_LOCATION (exp);
d3707adb 5556
5039610b 5557 if (call_expr_nargs (exp) < 2)
c69c9b36 5558 {
db3927fb 5559 error_at (loc, "too few arguments to function %<va_start%>");
c69c9b36
JM
5560 return const0_rtx;
5561 }
d3707adb 5562
5039610b 5563 if (fold_builtin_next_arg (exp, true))
8870e212 5564 return const0_rtx;
d3147f64 5565
8870e212 5566 nextarg = expand_builtin_next_arg ();
db3927fb 5567 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
d3707adb 5568
d7bd8aeb
JJ
5569 if (targetm.expand_builtin_va_start)
5570 targetm.expand_builtin_va_start (valist, nextarg);
5571 else
5572 std_expand_builtin_va_start (valist, nextarg);
d3707adb
RH
5573
5574 return const0_rtx;
5575}
5576
5039610b 5577/* Expand EXP, a call to __builtin_va_end. */
3bdf5ad1 5578
d3707adb 5579static rtx
5039610b 5580expand_builtin_va_end (tree exp)
d3707adb 5581{
5039610b 5582 tree valist = CALL_EXPR_ARG (exp, 0);
daf68dd7 5583
daf68dd7
RH
5584 /* Evaluate for side effects, if needed. I hate macros that don't
5585 do that. */
5586 if (TREE_SIDE_EFFECTS (valist))
5587 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
d3707adb
RH
5588
5589 return const0_rtx;
5590}
5591
5039610b 5592/* Expand EXP, a call to __builtin_va_copy. We do this as a
d3707adb
RH
5593 builtin rather than just as an assignment in stdarg.h because of the
5594 nastiness of array-type va_list types. */
3bdf5ad1 5595
d3707adb 5596static rtx
5039610b 5597expand_builtin_va_copy (tree exp)
d3707adb
RH
5598{
5599 tree dst, src, t;
db3927fb 5600 location_t loc = EXPR_LOCATION (exp);
d3707adb 5601
5039610b
SL
5602 dst = CALL_EXPR_ARG (exp, 0);
5603 src = CALL_EXPR_ARG (exp, 1);
d3707adb 5604
db3927fb
AH
5605 dst = stabilize_va_list_loc (loc, dst, 1);
5606 src = stabilize_va_list_loc (loc, src, 0);
d3707adb 5607
35cbb299
KT
5608 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5609
5610 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
d3707adb 5611 {
35cbb299 5612 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
d3707adb
RH
5613 TREE_SIDE_EFFECTS (t) = 1;
5614 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5615 }
5616 else
5617 {
8ebecc3b
RH
5618 rtx dstb, srcb, size;
5619
5620 /* Evaluate to pointers. */
5621 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5622 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
35cbb299
KT
5623 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5624 NULL_RTX, VOIDmode, EXPAND_NORMAL);
8ebecc3b 5625
5ae6cd0d
MM
5626 dstb = convert_memory_address (Pmode, dstb);
5627 srcb = convert_memory_address (Pmode, srcb);
ce2d32cd 5628
8ebecc3b
RH
5629 /* "Dereference" to BLKmode memories. */
5630 dstb = gen_rtx_MEM (BLKmode, dstb);
ba4828e0 5631 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
35cbb299 5632 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b 5633 srcb = gen_rtx_MEM (BLKmode, srcb);
ba4828e0 5634 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
35cbb299 5635 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b
RH
5636
5637 /* Copy. */
44bb111a 5638 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
d3707adb
RH
5639 }
5640
5641 return const0_rtx;
5642}
5643
28f4ec01
BS
5644/* Expand a call to one of the builtin functions __builtin_frame_address or
5645 __builtin_return_address. */
5197bd50 5646
28f4ec01 5647static rtx
5039610b 5648expand_builtin_frame_address (tree fndecl, tree exp)
28f4ec01 5649{
28f4ec01
BS
5650 /* The argument must be a nonnegative integer constant.
5651 It counts the number of frames to scan up the stack.
8423e57c
MS
5652 The value is either the frame pointer value or the return
5653 address saved in that frame. */
5039610b 5654 if (call_expr_nargs (exp) == 0)
28f4ec01
BS
5655 /* Warning about missing arg was already issued. */
5656 return const0_rtx;
cc269bb6 5657 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
28f4ec01 5658 {
8423e57c 5659 error ("invalid argument to %qD", fndecl);
28f4ec01
BS
5660 return const0_rtx;
5661 }
5662 else
5663 {
8423e57c
MS
5664 /* Number of frames to scan up the stack. */
5665 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5666
5667 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
28f4ec01
BS
5668
5669 /* Some ports cannot access arbitrary stack frames. */
5670 if (tem == NULL)
5671 {
8423e57c 5672 warning (0, "unsupported argument to %qD", fndecl);
28f4ec01
BS
5673 return const0_rtx;
5674 }
5675
8423e57c
MS
5676 if (count)
5677 {
5678 /* Warn since no effort is made to ensure that any frame
5679 beyond the current one exists or can be safely reached. */
5680 warning (OPT_Wframe_address, "calling %qD with "
5681 "a nonzero argument is unsafe", fndecl);
5682 }
5683
28f4ec01
BS
5684 /* For __builtin_frame_address, return what we've got. */
5685 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5686 return tem;
5687
f8cfc6aa 5688 if (!REG_P (tem)
28f4ec01 5689 && ! CONSTANT_P (tem))
18ae1560 5690 tem = copy_addr_to_reg (tem);
28f4ec01
BS
5691 return tem;
5692 }
5693}
5694
d3c12306 5695/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
b7e52782 5696 failed and the caller should emit a normal call. */
d5457140 5697
28f4ec01 5698static rtx
b7e52782 5699expand_builtin_alloca (tree exp)
28f4ec01
BS
5700{
5701 rtx op0;
d5457140 5702 rtx result;
13e49da9 5703 unsigned int align;
8bd9f164 5704 tree fndecl = get_callee_fndecl (exp);
9e878cf1
EB
5705 HOST_WIDE_INT max_size;
5706 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
b7e52782 5707 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
8bd9f164 5708 bool valid_arglist
9e878cf1
EB
5709 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5710 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5711 VOID_TYPE)
5712 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5713 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5714 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
13e49da9
TV
5715
5716 if (!valid_arglist)
5039610b 5717 return NULL_RTX;
28f4ec01 5718
00abf86c
MS
5719 if ((alloca_for_var
5720 && warn_vla_limit >= HOST_WIDE_INT_MAX
5721 && warn_alloc_size_limit < warn_vla_limit)
5722 || (!alloca_for_var
5723 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5724 && warn_alloc_size_limit < warn_alloca_limit
5725 ))
8bd9f164 5726 {
00abf86c
MS
5727 /* -Walloca-larger-than and -Wvla-larger-than settings of
5728 less than HOST_WIDE_INT_MAX override the more general
5729 -Walloc-size-larger-than so unless either of the former
5730 options is smaller than the last one (wchich would imply
5731 that the call was already checked), check the alloca
5732 arguments for overflow. */
8bd9f164
MS
5733 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5734 int idx[] = { 0, -1 };
5735 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5736 }
5737
28f4ec01 5738 /* Compute the argument. */
5039610b 5739 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
28f4ec01 5740
13e49da9 5741 /* Compute the alignment. */
9e878cf1
EB
5742 align = (fcode == BUILT_IN_ALLOCA
5743 ? BIGGEST_ALIGNMENT
5744 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5745
5746 /* Compute the maximum size. */
5747 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5748 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5749 : -1);
13e49da9 5750
b7e52782
EB
5751 /* Allocate the desired space. If the allocation stems from the declaration
5752 of a variable-sized object, it cannot accumulate. */
9e878cf1
EB
5753 result
5754 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5ae6cd0d 5755 result = convert_memory_address (ptr_mode, result);
d5457140 5756
3cf3da88
EB
5757 /* Dynamic allocations for variables are recorded during gimplification. */
5758 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5759 record_dynamic_alloc (exp);
5760
d5457140 5761 return result;
28f4ec01
BS
5762}
5763
7504c3bf
JJ
5764/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5765 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5766 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5767 handle_builtin_stack_restore function. */
e3174bdf
MO
5768
5769static rtx
5770expand_asan_emit_allocas_unpoison (tree exp)
5771{
5772 tree arg0 = CALL_EXPR_ARG (exp, 0);
7504c3bf 5773 tree arg1 = CALL_EXPR_ARG (exp, 1);
8f4956ca 5774 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7504c3bf
JJ
5775 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5776 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5777 stack_pointer_rtx, NULL_RTX, 0,
5778 OPTAB_LIB_WIDEN);
5779 off = convert_modes (ptr_mode, Pmode, off, 0);
5780 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5781 OPTAB_LIB_WIDEN);
e3174bdf 5782 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
db69559b
RS
5783 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5784 top, ptr_mode, bot, ptr_mode);
e3174bdf
MO
5785 return ret;
5786}
5787
ac868f29
EB
5788/* Expand a call to bswap builtin in EXP.
5789 Return NULL_RTX if a normal call should be emitted rather than expanding the
5790 function in-line. If convenient, the result should be placed in TARGET.
5791 SUBTARGET may be used as the target for computing one of EXP's operands. */
167fa32c
EC
5792
5793static rtx
ef4bddc2 5794expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
ac868f29 5795 rtx subtarget)
167fa32c 5796{
167fa32c
EC
5797 tree arg;
5798 rtx op0;
5799
5039610b
SL
5800 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5801 return NULL_RTX;
167fa32c 5802
5039610b 5803 arg = CALL_EXPR_ARG (exp, 0);
ac868f29
EB
5804 op0 = expand_expr (arg,
5805 subtarget && GET_MODE (subtarget) == target_mode
5806 ? subtarget : NULL_RTX,
5807 target_mode, EXPAND_NORMAL);
5808 if (GET_MODE (op0) != target_mode)
5809 op0 = convert_to_mode (target_mode, op0, 1);
167fa32c 5810
ac868f29 5811 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
167fa32c
EC
5812
5813 gcc_assert (target);
5814
ac868f29 5815 return convert_to_mode (target_mode, target, 1);
167fa32c
EC
5816}
5817
5039610b
SL
5818/* Expand a call to a unary builtin in EXP.
5819 Return NULL_RTX if a normal call should be emitted rather than expanding the
28f4ec01
BS
5820 function in-line. If convenient, the result should be placed in TARGET.
5821 SUBTARGET may be used as the target for computing one of EXP's operands. */
d5457140 5822
28f4ec01 5823static rtx
ef4bddc2 5824expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4682ae04 5825 rtx subtarget, optab op_optab)
28f4ec01
BS
5826{
5827 rtx op0;
5039610b
SL
5828
5829 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5830 return NULL_RTX;
28f4ec01
BS
5831
5832 /* Compute the argument. */
4359dc2a
JJ
5833 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5834 (subtarget
5835 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5836 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
49452c07 5837 VOIDmode, EXPAND_NORMAL);
2928cd7a 5838 /* Compute op, into TARGET if possible.
28f4ec01 5839 Set TARGET to wherever the result comes back. */
5039610b 5840 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
146aef0b 5841 op_optab, op0, target, op_optab != clrsb_optab);
298e6adc 5842 gcc_assert (target);
5906d013 5843
6c537d03 5844 return convert_to_mode (target_mode, target, 0);
28f4ec01 5845}
994a57cd 5846
b8698a0f 5847/* Expand a call to __builtin_expect. We just return our argument
ef950eba
JH
5848 as the builtin_expect semantic should've been already executed by
5849 tree branch prediction pass. */
994a57cd
RH
5850
5851static rtx
5039610b 5852expand_builtin_expect (tree exp, rtx target)
994a57cd 5853{
451409e4 5854 tree arg;
994a57cd 5855
5039610b 5856 if (call_expr_nargs (exp) < 2)
994a57cd 5857 return const0_rtx;
5039610b 5858 arg = CALL_EXPR_ARG (exp, 0);
994a57cd 5859
5039610b 5860 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
ef950eba 5861 /* When guessing was done, the hints should be already stripped away. */
1d8381f1 5862 gcc_assert (!flag_guess_branch_prob
1da2ed5f 5863 || optimize == 0 || seen_error ());
994a57cd
RH
5864 return target;
5865}
5f2d6cfa 5866
1e9168b2
ML
5867/* Expand a call to __builtin_expect_with_probability. We just return our
5868 argument as the builtin_expect semantic should've been already executed by
5869 tree branch prediction pass. */
5870
5871static rtx
5872expand_builtin_expect_with_probability (tree exp, rtx target)
5873{
5874 tree arg;
5875
5876 if (call_expr_nargs (exp) < 3)
5877 return const0_rtx;
5878 arg = CALL_EXPR_ARG (exp, 0);
5879
5880 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5881 /* When guessing was done, the hints should be already stripped away. */
5882 gcc_assert (!flag_guess_branch_prob
5883 || optimize == 0 || seen_error ());
5884 return target;
5885}
5886
5887
45d439ac
JJ
5888/* Expand a call to __builtin_assume_aligned. We just return our first
5889 argument as the builtin_assume_aligned semantic should've been already
5890 executed by CCP. */
5891
5892static rtx
5893expand_builtin_assume_aligned (tree exp, rtx target)
5894{
5895 if (call_expr_nargs (exp) < 2)
5896 return const0_rtx;
5897 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5898 EXPAND_NORMAL);
5899 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5900 && (call_expr_nargs (exp) < 3
5901 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5902 return target;
5903}
5904
1e188d1e 5905void
4682ae04 5906expand_builtin_trap (void)
9602f5a0 5907{
eb6f47fb 5908 if (targetm.have_trap ())
206604dc 5909 {
eb6f47fb 5910 rtx_insn *insn = emit_insn (targetm.gen_trap ());
206604dc
JJ
5911 /* For trap insns when not accumulating outgoing args force
5912 REG_ARGS_SIZE note to prevent crossjumping of calls with
5913 different args sizes. */
5914 if (!ACCUMULATE_OUTGOING_ARGS)
68184180 5915 add_args_size_note (insn, stack_pointer_delta);
206604dc 5916 }
9602f5a0 5917 else
ee516de9
EB
5918 {
5919 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5920 tree call_expr = build_call_expr (fn, 0);
5921 expand_call (call_expr, NULL_RTX, false);
5922 }
5923
9602f5a0
RH
5924 emit_barrier ();
5925}
075ec276 5926
468059bc
DD
5927/* Expand a call to __builtin_unreachable. We do nothing except emit
5928 a barrier saying that control flow will not pass here.
5929
5930 It is the responsibility of the program being compiled to ensure
5931 that control flow does never reach __builtin_unreachable. */
5932static void
5933expand_builtin_unreachable (void)
5934{
5935 emit_barrier ();
5936}
5937
5039610b
SL
5938/* Expand EXP, a call to fabs, fabsf or fabsl.
5939 Return NULL_RTX if a normal call should be emitted rather than expanding
075ec276
RS
5940 the function inline. If convenient, the result should be placed
5941 in TARGET. SUBTARGET may be used as the target for computing
5942 the operand. */
5943
5944static rtx
5039610b 5945expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
075ec276 5946{
ef4bddc2 5947 machine_mode mode;
075ec276
RS
5948 tree arg;
5949 rtx op0;
5950
5039610b
SL
5951 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5952 return NULL_RTX;
075ec276 5953
5039610b 5954 arg = CALL_EXPR_ARG (exp, 0);
4cd8e76f 5955 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
075ec276 5956 mode = TYPE_MODE (TREE_TYPE (arg));
49452c07 5957 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
075ec276
RS
5958 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5959}
5960
5039610b 5961/* Expand EXP, a call to copysign, copysignf, or copysignl.
046625fa
RH
5962 Return NULL is a normal call should be emitted rather than expanding the
5963 function inline. If convenient, the result should be placed in TARGET.
5964 SUBTARGET may be used as the target for computing the operand. */
5965
5966static rtx
5039610b 5967expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
046625fa
RH
5968{
5969 rtx op0, op1;
5970 tree arg;
5971
5039610b
SL
5972 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5973 return NULL_RTX;
046625fa 5974
5039610b 5975 arg = CALL_EXPR_ARG (exp, 0);
84217346 5976 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
046625fa 5977
5039610b 5978 arg = CALL_EXPR_ARG (exp, 1);
84217346 5979 op1 = expand_normal (arg);
046625fa
RH
5980
5981 return expand_copysign (op0, op1, target);
5982}
5983
677feb77
DD
5984/* Expand a call to __builtin___clear_cache. */
5985
5986static rtx
f2cf13bd 5987expand_builtin___clear_cache (tree exp)
677feb77 5988{
f2cf13bd
RS
5989 if (!targetm.code_for_clear_cache)
5990 {
677feb77 5991#ifdef CLEAR_INSN_CACHE
f2cf13bd
RS
5992 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5993 does something. Just do the default expansion to a call to
5994 __clear_cache(). */
5995 return NULL_RTX;
677feb77 5996#else
f2cf13bd
RS
5997 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5998 does nothing. There is no need to call it. Do nothing. */
5999 return const0_rtx;
677feb77 6000#endif /* CLEAR_INSN_CACHE */
f2cf13bd
RS
6001 }
6002
677feb77
DD
6003 /* We have a "clear_cache" insn, and it will handle everything. */
6004 tree begin, end;
6005 rtx begin_rtx, end_rtx;
677feb77
DD
6006
6007 /* We must not expand to a library call. If we did, any
6008 fallback library function in libgcc that might contain a call to
6009 __builtin___clear_cache() would recurse infinitely. */
6010 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6011 {
6012 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6013 return const0_rtx;
6014 }
6015
f2cf13bd 6016 if (targetm.have_clear_cache ())
677feb77 6017 {
99b1c316 6018 class expand_operand ops[2];
677feb77
DD
6019
6020 begin = CALL_EXPR_ARG (exp, 0);
6021 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77
DD
6022
6023 end = CALL_EXPR_ARG (exp, 1);
6024 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77 6025
a5c7d693
RS
6026 create_address_operand (&ops[0], begin_rtx);
6027 create_address_operand (&ops[1], end_rtx);
f2cf13bd 6028 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
a5c7d693 6029 return const0_rtx;
677feb77
DD
6030 }
6031 return const0_rtx;
677feb77
DD
6032}
6033
6de9cd9a
DN
6034/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6035
6036static rtx
6037round_trampoline_addr (rtx tramp)
6038{
6039 rtx temp, addend, mask;
6040
6041 /* If we don't need too much alignment, we'll have been guaranteed
6042 proper alignment by get_trampoline_type. */
6043 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6044 return tramp;
6045
6046 /* Round address up to desired boundary. */
6047 temp = gen_reg_rtx (Pmode);
2f1cd2eb
RS
6048 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6049 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6de9cd9a
DN
6050
6051 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6052 temp, 0, OPTAB_LIB_WIDEN);
6053 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6054 temp, 0, OPTAB_LIB_WIDEN);
6055
6056 return tramp;
6057}
6058
6059static rtx
183dd130 6060expand_builtin_init_trampoline (tree exp, bool onstack)
6de9cd9a
DN
6061{
6062 tree t_tramp, t_func, t_chain;
531ca746 6063 rtx m_tramp, r_tramp, r_chain, tmp;
6de9cd9a 6064
5039610b 6065 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6de9cd9a
DN
6066 POINTER_TYPE, VOID_TYPE))
6067 return NULL_RTX;
6068
5039610b
SL
6069 t_tramp = CALL_EXPR_ARG (exp, 0);
6070 t_func = CALL_EXPR_ARG (exp, 1);
6071 t_chain = CALL_EXPR_ARG (exp, 2);
6de9cd9a 6072
84217346 6073 r_tramp = expand_normal (t_tramp);
531ca746
RH
6074 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6075 MEM_NOTRAP_P (m_tramp) = 1;
6076
183dd130
ILT
6077 /* If ONSTACK, the TRAMP argument should be the address of a field
6078 within the local function's FRAME decl. Either way, let's see if
6079 we can fill in the MEM_ATTRs for this memory. */
531ca746 6080 if (TREE_CODE (t_tramp) == ADDR_EXPR)
ad2e5b71 6081 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
531ca746 6082
183dd130
ILT
6083 /* Creator of a heap trampoline is responsible for making sure the
6084 address is aligned to at least STACK_BOUNDARY. Normally malloc
6085 will ensure this anyhow. */
531ca746
RH
6086 tmp = round_trampoline_addr (r_tramp);
6087 if (tmp != r_tramp)
6088 {
6089 m_tramp = change_address (m_tramp, BLKmode, tmp);
6090 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
f5541398 6091 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
531ca746
RH
6092 }
6093
6094 /* The FUNC argument should be the address of the nested function.
6095 Extract the actual function decl to pass to the hook. */
6096 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6097 t_func = TREE_OPERAND (t_func, 0);
6098 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6099
84217346 6100 r_chain = expand_normal (t_chain);
6de9cd9a
DN
6101
6102 /* Generate insns to initialize the trampoline. */
531ca746 6103 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6de9cd9a 6104
183dd130
ILT
6105 if (onstack)
6106 {
6107 trampolines_created = 1;
8ffadef9 6108
4c640e26
EB
6109 if (targetm.calls.custom_function_descriptors != 0)
6110 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6111 "trampoline generated for nested function %qD", t_func);
183dd130 6112 }
8ffadef9 6113
6de9cd9a
DN
6114 return const0_rtx;
6115}
6116
6117static rtx
5039610b 6118expand_builtin_adjust_trampoline (tree exp)
6de9cd9a
DN
6119{
6120 rtx tramp;
6121
5039610b 6122 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
6123 return NULL_RTX;
6124
5039610b 6125 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6de9cd9a 6126 tramp = round_trampoline_addr (tramp);
531ca746
RH
6127 if (targetm.calls.trampoline_adjust_address)
6128 tramp = targetm.calls.trampoline_adjust_address (tramp);
6de9cd9a
DN
6129
6130 return tramp;
6131}
6132
4c640e26
EB
6133/* Expand a call to the builtin descriptor initialization routine.
6134 A descriptor is made up of a couple of pointers to the static
6135 chain and the code entry in this order. */
6136
6137static rtx
6138expand_builtin_init_descriptor (tree exp)
6139{
6140 tree t_descr, t_func, t_chain;
6141 rtx m_descr, r_descr, r_func, r_chain;
6142
6143 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6144 VOID_TYPE))
6145 return NULL_RTX;
6146
6147 t_descr = CALL_EXPR_ARG (exp, 0);
6148 t_func = CALL_EXPR_ARG (exp, 1);
6149 t_chain = CALL_EXPR_ARG (exp, 2);
6150
6151 r_descr = expand_normal (t_descr);
6152 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6153 MEM_NOTRAP_P (m_descr) = 1;
0bdf9f92 6154 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
4c640e26
EB
6155
6156 r_func = expand_normal (t_func);
6157 r_chain = expand_normal (t_chain);
6158
6159 /* Generate insns to initialize the descriptor. */
6160 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6161 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6162 POINTER_SIZE / BITS_PER_UNIT), r_func);
6163
6164 return const0_rtx;
6165}
6166
6167/* Expand a call to the builtin descriptor adjustment routine. */
6168
6169static rtx
6170expand_builtin_adjust_descriptor (tree exp)
6171{
6172 rtx tramp;
6173
6174 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6175 return NULL_RTX;
6176
6177 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6178
6179 /* Unalign the descriptor to allow runtime identification. */
6180 tramp = plus_constant (ptr_mode, tramp,
6181 targetm.calls.custom_function_descriptors);
6182
6183 return force_operand (tramp, NULL_RTX);
6184}
6185
0f67fa83
WG
6186/* Expand the call EXP to the built-in signbit, signbitf or signbitl
6187 function. The function first checks whether the back end provides
6188 an insn to implement signbit for the respective mode. If not, it
6189 checks whether the floating point format of the value is such that
61717a45
FXC
6190 the sign bit can be extracted. If that is not the case, error out.
6191 EXP is the expression that is a call to the builtin function; if
6192 convenient, the result should be placed in TARGET. */
ef79730c
RS
6193static rtx
6194expand_builtin_signbit (tree exp, rtx target)
6195{
6196 const struct real_format *fmt;
b5f2d801 6197 scalar_float_mode fmode;
095a2d76 6198 scalar_int_mode rmode, imode;
5039610b 6199 tree arg;
e4fbead1 6200 int word, bitpos;
d0c9d431 6201 enum insn_code icode;
ef79730c 6202 rtx temp;
db3927fb 6203 location_t loc = EXPR_LOCATION (exp);
ef79730c 6204
5039610b
SL
6205 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6206 return NULL_RTX;
ef79730c 6207
5039610b 6208 arg = CALL_EXPR_ARG (exp, 0);
b5f2d801 6209 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
7a504f33 6210 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
ef79730c
RS
6211 fmt = REAL_MODE_FORMAT (fmode);
6212
0f67fa83
WG
6213 arg = builtin_save_expr (arg);
6214
6215 /* Expand the argument yielding a RTX expression. */
6216 temp = expand_normal (arg);
6217
6218 /* Check if the back end provides an insn that handles signbit for the
6219 argument's mode. */
947131ba 6220 icode = optab_handler (signbit_optab, fmode);
d0c9d431 6221 if (icode != CODE_FOR_nothing)
0f67fa83 6222 {
58f4cf2a 6223 rtx_insn *last = get_last_insn ();
0f67fa83 6224 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8a0b1aa4
MM
6225 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
6226 return target;
6227 delete_insns_since (last);
0f67fa83
WG
6228 }
6229
ef79730c
RS
6230 /* For floating point formats without a sign bit, implement signbit
6231 as "ARG < 0.0". */
b87a0206 6232 bitpos = fmt->signbit_ro;
e4fbead1 6233 if (bitpos < 0)
ef79730c
RS
6234 {
6235 /* But we can't do this if the format supports signed zero. */
61717a45 6236 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
ef79730c 6237
db3927fb 6238 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
987b67bc 6239 build_real (TREE_TYPE (arg), dconst0));
ef79730c
RS
6240 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6241 }
6242
e4fbead1 6243 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
ef79730c 6244 {
304b9962 6245 imode = int_mode_for_mode (fmode).require ();
e4fbead1 6246 temp = gen_lowpart (imode, temp);
254878ea
RS
6247 }
6248 else
6249 {
e4fbead1
RS
6250 imode = word_mode;
6251 /* Handle targets with different FP word orders. */
6252 if (FLOAT_WORDS_BIG_ENDIAN)
c22cacf3 6253 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
e4fbead1 6254 else
c22cacf3 6255 word = bitpos / BITS_PER_WORD;
e4fbead1
RS
6256 temp = operand_subword_force (temp, word, fmode);
6257 bitpos = bitpos % BITS_PER_WORD;
6258 }
6259
210e1852
RS
6260 /* Force the intermediate word_mode (or narrower) result into a
6261 register. This avoids attempting to create paradoxical SUBREGs
6262 of floating point modes below. */
6263 temp = force_reg (imode, temp);
6264
e4fbead1
RS
6265 /* If the bitpos is within the "result mode" lowpart, the operation
6266 can be implement with a single bitwise AND. Otherwise, we need
6267 a right shift and an AND. */
6268
6269 if (bitpos < GET_MODE_BITSIZE (rmode))
6270 {
807e902e 6271 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
ef79730c 6272
515e442a 6273 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
e4fbead1 6274 temp = gen_lowpart (rmode, temp);
254878ea 6275 temp = expand_binop (rmode, and_optab, temp,
807e902e 6276 immed_wide_int_const (mask, rmode),
e4fbead1 6277 NULL_RTX, 1, OPTAB_LIB_WIDEN);
ef79730c 6278 }
e4fbead1
RS
6279 else
6280 {
6281 /* Perform a logical right shift to place the signbit in the least
c22cacf3 6282 significant bit, then truncate the result to the desired mode
e4fbead1 6283 and mask just this bit. */
eb6c3df1 6284 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
e4fbead1
RS
6285 temp = gen_lowpart (rmode, temp);
6286 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6287 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6288 }
6289
ef79730c
RS
6290 return temp;
6291}
d1c38823
ZD
6292
6293/* Expand fork or exec calls. TARGET is the desired target of the
5039610b 6294 call. EXP is the call. FN is the
d1c38823
ZD
6295 identificator of the actual function. IGNORE is nonzero if the
6296 value is to be ignored. */
6297
6298static rtx
5039610b 6299expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
d1c38823
ZD
6300{
6301 tree id, decl;
6302 tree call;
6303
b5338fb3
MS
6304 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
6305 {
6306 /* Detect unterminated path. */
6307 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6308 return NULL_RTX;
6309
6310 /* Also detect unterminated first argument. */
6311 switch (DECL_FUNCTION_CODE (fn))
6312 {
6313 case BUILT_IN_EXECL:
6314 case BUILT_IN_EXECLE:
6315 case BUILT_IN_EXECLP:
6316 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6317 return NULL_RTX;
6318 default:
6319 break;
6320 }
6321 }
6322
6323
d1c38823
ZD
6324 /* If we are not profiling, just call the function. */
6325 if (!profile_arc_flag)
6326 return NULL_RTX;
6327
6328 /* Otherwise call the wrapper. This should be equivalent for the rest of
6329 compiler, so the code does not diverge, and the wrapper may run the
2b8a92de 6330 code necessary for keeping the profiling sane. */
d1c38823
ZD
6331
6332 switch (DECL_FUNCTION_CODE (fn))
6333 {
6334 case BUILT_IN_FORK:
6335 id = get_identifier ("__gcov_fork");
6336 break;
6337
6338 case BUILT_IN_EXECL:
6339 id = get_identifier ("__gcov_execl");
6340 break;
6341
6342 case BUILT_IN_EXECV:
6343 id = get_identifier ("__gcov_execv");
6344 break;
6345
6346 case BUILT_IN_EXECLP:
6347 id = get_identifier ("__gcov_execlp");
6348 break;
6349
6350 case BUILT_IN_EXECLE:
6351 id = get_identifier ("__gcov_execle");
6352 break;
6353
6354 case BUILT_IN_EXECVP:
6355 id = get_identifier ("__gcov_execvp");
6356 break;
6357
6358 case BUILT_IN_EXECVE:
6359 id = get_identifier ("__gcov_execve");
6360 break;
6361
6362 default:
298e6adc 6363 gcc_unreachable ();
d1c38823
ZD
6364 }
6365
c2255bc4
AH
6366 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6367 FUNCTION_DECL, id, TREE_TYPE (fn));
d1c38823
ZD
6368 DECL_EXTERNAL (decl) = 1;
6369 TREE_PUBLIC (decl) = 1;
6370 DECL_ARTIFICIAL (decl) = 1;
6371 TREE_NOTHROW (decl) = 1;
ac382b62
JM
6372 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6373 DECL_VISIBILITY_SPECIFIED (decl) = 1;
db3927fb 6374 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
d1c38823 6375 return expand_call (call, target, ignore);
5039610b 6376 }
b8698a0f 6377
48ae6c13
RH
6378
6379\f
02ee605c
RH
6380/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6381 the pointer in these functions is void*, the tree optimizers may remove
6382 casts. The mode computed in expand_builtin isn't reliable either, due
6383 to __sync_bool_compare_and_swap.
6384
6385 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6386 group of builtins. This gives us log2 of the mode size. */
6387
ef4bddc2 6388static inline machine_mode
02ee605c
RH
6389get_builtin_sync_mode (int fcode_diff)
6390{
2de0aa52
HPN
6391 /* The size is not negotiable, so ask not to get BLKmode in return
6392 if the target indicates that a smaller size would be better. */
f4b31647 6393 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
02ee605c
RH
6394}
6395
1387fef3
AS
6396/* Expand the memory expression LOC and return the appropriate memory operand
6397 for the builtin_sync operations. */
6398
6399static rtx
ef4bddc2 6400get_builtin_sync_mem (tree loc, machine_mode mode)
1387fef3
AS
6401{
6402 rtx addr, mem;
b6895597
AS
6403 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6404 ? TREE_TYPE (TREE_TYPE (loc))
6405 : TREE_TYPE (loc));
6406 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
1387fef3 6407
b6895597 6408 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
1413a419 6409 addr = convert_memory_address (addr_mode, addr);
1387fef3
AS
6410
6411 /* Note that we explicitly do not want any alias information for this
6412 memory, so that we kill all other live memories. Otherwise we don't
6413 satisfy the full barrier semantics of the intrinsic. */
b6895597
AS
6414 mem = gen_rtx_MEM (mode, addr);
6415
6416 set_mem_addr_space (mem, addr_space);
6417
6418 mem = validize_mem (mem);
1387fef3 6419
1be38ccb
RG
6420 /* The alignment needs to be at least according to that of the mode. */
6421 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
0eb77834 6422 get_pointer_alignment (loc)));
9cd9e512 6423 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
1387fef3
AS
6424 MEM_VOLATILE_P (mem) = 1;
6425
6426 return mem;
6427}
6428
86951993
AM
6429/* Make sure an argument is in the right mode.
6430 EXP is the tree argument.
6431 MODE is the mode it should be in. */
6432
6433static rtx
ef4bddc2 6434expand_expr_force_mode (tree exp, machine_mode mode)
86951993
AM
6435{
6436 rtx val;
ef4bddc2 6437 machine_mode old_mode;
86951993
AM
6438
6439 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6440 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6441 of CONST_INTs, where we know the old_mode only from the call argument. */
6442
6443 old_mode = GET_MODE (val);
6444 if (old_mode == VOIDmode)
6445 old_mode = TYPE_MODE (TREE_TYPE (exp));
6446 val = convert_modes (mode, old_mode, val, 1);
6447 return val;
6448}
6449
6450
48ae6c13 6451/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5039610b 6452 EXP is the CALL_EXPR. CODE is the rtx code
48ae6c13
RH
6453 that corresponds to the arithmetic or logical operation from the name;
6454 an exception here is that NOT actually means NAND. TARGET is an optional
6455 place for us to store the results; AFTER is true if this is the
86951993 6456 fetch_and_xxx form. */
48ae6c13
RH
6457
6458static rtx
ef4bddc2 6459expand_builtin_sync_operation (machine_mode mode, tree exp,
02ee605c 6460 enum rtx_code code, bool after,
86951993 6461 rtx target)
48ae6c13 6462{
1387fef3 6463 rtx val, mem;
c2255bc4 6464 location_t loc = EXPR_LOCATION (exp);
48ae6c13 6465
23462d4d
UB
6466 if (code == NOT && warn_sync_nand)
6467 {
6468 tree fndecl = get_callee_fndecl (exp);
6469 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6470
6471 static bool warned_f_a_n, warned_n_a_f;
6472
6473 switch (fcode)
6474 {
e0a8ecf2
AM
6475 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6476 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6477 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6478 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6479 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
23462d4d
UB
6480 if (warned_f_a_n)
6481 break;
6482
e79983f4 6483 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
c2255bc4 6484 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
6485 warned_f_a_n = true;
6486 break;
6487
e0a8ecf2
AM
6488 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6489 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6490 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6491 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6492 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
23462d4d
UB
6493 if (warned_n_a_f)
6494 break;
6495
e79983f4 6496 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
c2255bc4 6497 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
6498 warned_n_a_f = true;
6499 break;
6500
6501 default:
6502 gcc_unreachable ();
6503 }
6504 }
6505
48ae6c13 6506 /* Expand the operands. */
5039610b 6507 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993 6508 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
48ae6c13 6509
46b35980 6510 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
86951993 6511 after);
48ae6c13
RH
6512}
6513
6514/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5039610b 6515 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
48ae6c13
RH
6516 true if this is the boolean form. TARGET is a place for us to store the
6517 results; this is NOT optional if IS_BOOL is true. */
6518
6519static rtx
ef4bddc2 6520expand_builtin_compare_and_swap (machine_mode mode, tree exp,
02ee605c 6521 bool is_bool, rtx target)
48ae6c13 6522{
1387fef3 6523 rtx old_val, new_val, mem;
f0409b19 6524 rtx *pbool, *poval;
48ae6c13
RH
6525
6526 /* Expand the operands. */
5039610b 6527 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
6528 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6529 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
48ae6c13 6530
f0409b19
RH
6531 pbool = poval = NULL;
6532 if (target != const0_rtx)
6533 {
6534 if (is_bool)
6535 pbool = &target;
6536 else
6537 poval = &target;
6538 }
6539 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
46b35980
AM
6540 false, MEMMODEL_SYNC_SEQ_CST,
6541 MEMMODEL_SYNC_SEQ_CST))
86951993 6542 return NULL_RTX;
5039610b 6543
86951993 6544 return target;
48ae6c13
RH
6545}
6546
6547/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6548 general form is actually an atomic exchange, and some targets only
6549 support a reduced form with the second argument being a constant 1.
b8698a0f 6550 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5039610b 6551 the results. */
48ae6c13
RH
6552
6553static rtx
ef4bddc2 6554expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
86951993 6555 rtx target)
48ae6c13 6556{
1387fef3 6557 rtx val, mem;
48ae6c13
RH
6558
6559 /* Expand the operands. */
5039610b 6560 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
6561 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6562
744accb2 6563 return expand_sync_lock_test_and_set (target, mem, val);
86951993
AM
6564}
6565
6566/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6567
6568static void
ef4bddc2 6569expand_builtin_sync_lock_release (machine_mode mode, tree exp)
86951993
AM
6570{
6571 rtx mem;
6572
6573 /* Expand the operands. */
6574 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6575
46b35980 6576 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
86951993
AM
6577}
6578
6579/* Given an integer representing an ``enum memmodel'', verify its
6580 correctness and return the memory model enum. */
6581
6582static enum memmodel
6583get_memmodel (tree exp)
6584{
6585 rtx op;
5dcfdccd 6586 unsigned HOST_WIDE_INT val;
620e594b 6587 location_t loc
8d9fdb49 6588 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
6589
6590 /* If the parameter is not a constant, it's a run time value so we'll just
6591 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6592 if (TREE_CODE (exp) != INTEGER_CST)
6593 return MEMMODEL_SEQ_CST;
6594
6595 op = expand_normal (exp);
5dcfdccd
KY
6596
6597 val = INTVAL (op);
6598 if (targetm.memmodel_check)
6599 val = targetm.memmodel_check (val);
6600 else if (val & ~MEMMODEL_MASK)
6601 {
8d9fdb49
MP
6602 warning_at (loc, OPT_Winvalid_memory_model,
6603 "unknown architecture specifier in memory model to builtin");
5dcfdccd
KY
6604 return MEMMODEL_SEQ_CST;
6605 }
6606
46b35980
AM
6607 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6608 if (memmodel_base (val) >= MEMMODEL_LAST)
86951993 6609 {
8d9fdb49
MP
6610 warning_at (loc, OPT_Winvalid_memory_model,
6611 "invalid memory model argument to builtin");
86951993
AM
6612 return MEMMODEL_SEQ_CST;
6613 }
5dcfdccd 6614
8673b671
AM
6615 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6616 be conservative and promote consume to acquire. */
6617 if (val == MEMMODEL_CONSUME)
6618 val = MEMMODEL_ACQUIRE;
6619
5dcfdccd 6620 return (enum memmodel) val;
86951993
AM
6621}
6622
6623/* Expand the __atomic_exchange intrinsic:
6624 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6625 EXP is the CALL_EXPR.
6626 TARGET is an optional place for us to store the results. */
6627
6628static rtx
ef4bddc2 6629expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
86951993
AM
6630{
6631 rtx val, mem;
6632 enum memmodel model;
6633
6634 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
86951993
AM
6635
6636 if (!flag_inline_atomics)
6637 return NULL_RTX;
6638
6639 /* Expand the operands. */
6640 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6641 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6642
744accb2 6643 return expand_atomic_exchange (target, mem, val, model);
86951993
AM
6644}
6645
6646/* Expand the __atomic_compare_exchange intrinsic:
6647 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6648 TYPE desired, BOOL weak,
6649 enum memmodel success,
6650 enum memmodel failure)
6651 EXP is the CALL_EXPR.
6652 TARGET is an optional place for us to store the results. */
6653
6654static rtx
ef4bddc2 6655expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
86951993
AM
6656 rtx target)
6657{
58f4cf2a
DM
6658 rtx expect, desired, mem, oldval;
6659 rtx_code_label *label;
86951993
AM
6660 enum memmodel success, failure;
6661 tree weak;
6662 bool is_weak;
620e594b 6663 location_t loc
8d9fdb49 6664 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
6665
6666 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6667 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6668
77df5327
AM
6669 if (failure > success)
6670 {
8d9fdb49
MP
6671 warning_at (loc, OPT_Winvalid_memory_model,
6672 "failure memory model cannot be stronger than success "
6673 "memory model for %<__atomic_compare_exchange%>");
77df5327
AM
6674 success = MEMMODEL_SEQ_CST;
6675 }
6676
46b35980 6677 if (is_mm_release (failure) || is_mm_acq_rel (failure))
86951993 6678 {
8d9fdb49
MP
6679 warning_at (loc, OPT_Winvalid_memory_model,
6680 "invalid failure memory model for "
6681 "%<__atomic_compare_exchange%>");
77df5327
AM
6682 failure = MEMMODEL_SEQ_CST;
6683 success = MEMMODEL_SEQ_CST;
86951993
AM
6684 }
6685
77df5327 6686
86951993
AM
6687 if (!flag_inline_atomics)
6688 return NULL_RTX;
6689
6690 /* Expand the operands. */
6691 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6692
6693 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6694 expect = convert_memory_address (Pmode, expect);
215770ad 6695 expect = gen_rtx_MEM (mode, expect);
86951993
AM
6696 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6697
6698 weak = CALL_EXPR_ARG (exp, 3);
6699 is_weak = false;
9439e9a1 6700 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
86951993
AM
6701 is_weak = true;
6702
672ce939
RH
6703 if (target == const0_rtx)
6704 target = NULL;
672ce939 6705
2fdc29e8
RH
6706 /* Lest the rtl backend create a race condition with an imporoper store
6707 to memory, always create a new pseudo for OLDVAL. */
6708 oldval = NULL;
6709
6710 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
f0409b19 6711 is_weak, success, failure))
86951993
AM
6712 return NULL_RTX;
6713
672ce939
RH
6714 /* Conditionally store back to EXPECT, lest we create a race condition
6715 with an improper store to memory. */
6716 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6717 the normal case where EXPECT is totally private, i.e. a register. At
6718 which point the store can be unconditional. */
6719 label = gen_label_rtx ();
f8940d4a
JG
6720 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6721 GET_MODE (target), 1, label);
672ce939
RH
6722 emit_move_insn (expect, oldval);
6723 emit_label (label);
215770ad 6724
86951993
AM
6725 return target;
6726}
6727
849a76a5
JJ
6728/* Helper function for expand_ifn_atomic_compare_exchange - expand
6729 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6730 call. The weak parameter must be dropped to match the expected parameter
6731 list and the expected argument changed from value to pointer to memory
6732 slot. */
6733
6734static void
6735expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6736{
6737 unsigned int z;
6738 vec<tree, va_gc> *vec;
6739
6740 vec_alloc (vec, 5);
6741 vec->quick_push (gimple_call_arg (call, 0));
6742 tree expected = gimple_call_arg (call, 1);
6743 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6744 TREE_TYPE (expected));
6745 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6746 if (expd != x)
6747 emit_move_insn (x, expd);
6748 tree v = make_tree (TREE_TYPE (expected), x);
6749 vec->quick_push (build1 (ADDR_EXPR,
6750 build_pointer_type (TREE_TYPE (expected)), v));
6751 vec->quick_push (gimple_call_arg (call, 2));
6752 /* Skip the boolean weak parameter. */
6753 for (z = 4; z < 6; z++)
6754 vec->quick_push (gimple_call_arg (call, z));
4871e1ed 6755 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
cf098191 6756 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
4871e1ed 6757 gcc_assert (bytes_log2 < 5);
849a76a5
JJ
6758 built_in_function fncode
6759 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
4871e1ed 6760 + bytes_log2);
849a76a5
JJ
6761 tree fndecl = builtin_decl_explicit (fncode);
6762 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6763 fndecl);
6764 tree exp = build_call_vec (boolean_type_node, fn, vec);
6765 tree lhs = gimple_call_lhs (call);
6766 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6767 if (lhs)
6768 {
6769 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6770 if (GET_MODE (boolret) != mode)
6771 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6772 x = force_reg (mode, x);
6773 write_complex_part (target, boolret, true);
6774 write_complex_part (target, x, false);
6775 }
6776}
6777
6778/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6779
6780void
6781expand_ifn_atomic_compare_exchange (gcall *call)
6782{
6783 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6784 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
f4b31647 6785 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
849a76a5
JJ
6786 rtx expect, desired, mem, oldval, boolret;
6787 enum memmodel success, failure;
6788 tree lhs;
6789 bool is_weak;
620e594b 6790 location_t loc
849a76a5
JJ
6791 = expansion_point_location_if_in_system_header (gimple_location (call));
6792
6793 success = get_memmodel (gimple_call_arg (call, 4));
6794 failure = get_memmodel (gimple_call_arg (call, 5));
6795
6796 if (failure > success)
6797 {
6798 warning_at (loc, OPT_Winvalid_memory_model,
6799 "failure memory model cannot be stronger than success "
6800 "memory model for %<__atomic_compare_exchange%>");
6801 success = MEMMODEL_SEQ_CST;
6802 }
6803
6804 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6805 {
6806 warning_at (loc, OPT_Winvalid_memory_model,
6807 "invalid failure memory model for "
6808 "%<__atomic_compare_exchange%>");
6809 failure = MEMMODEL_SEQ_CST;
6810 success = MEMMODEL_SEQ_CST;
6811 }
6812
6813 if (!flag_inline_atomics)
6814 {
6815 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6816 return;
6817 }
6818
6819 /* Expand the operands. */
6820 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6821
6822 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6823 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6824
6825 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6826
6827 boolret = NULL;
6828 oldval = NULL;
6829
6830 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6831 is_weak, success, failure))
6832 {
6833 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6834 return;
6835 }
6836
6837 lhs = gimple_call_lhs (call);
6838 if (lhs)
6839 {
6840 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6841 if (GET_MODE (boolret) != mode)
6842 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6843 write_complex_part (target, boolret, true);
6844 write_complex_part (target, oldval, false);
6845 }
6846}
6847
86951993
AM
6848/* Expand the __atomic_load intrinsic:
6849 TYPE __atomic_load (TYPE *object, enum memmodel)
6850 EXP is the CALL_EXPR.
6851 TARGET is an optional place for us to store the results. */
6852
6853static rtx
ef4bddc2 6854expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
86951993
AM
6855{
6856 rtx mem;
6857 enum memmodel model;
6858
6859 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
46b35980 6860 if (is_mm_release (model) || is_mm_acq_rel (model))
86951993 6861 {
620e594b 6862 location_t loc
8d9fdb49
MP
6863 = expansion_point_location_if_in_system_header (input_location);
6864 warning_at (loc, OPT_Winvalid_memory_model,
6865 "invalid memory model for %<__atomic_load%>");
77df5327 6866 model = MEMMODEL_SEQ_CST;
86951993
AM
6867 }
6868
6869 if (!flag_inline_atomics)
6870 return NULL_RTX;
6871
6872 /* Expand the operand. */
6873 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6874
6875 return expand_atomic_load (target, mem, model);
6876}
6877
6878
6879/* Expand the __atomic_store intrinsic:
6880 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6881 EXP is the CALL_EXPR.
6882 TARGET is an optional place for us to store the results. */
6883
6884static rtx
ef4bddc2 6885expand_builtin_atomic_store (machine_mode mode, tree exp)
86951993
AM
6886{
6887 rtx mem, val;
6888 enum memmodel model;
6889
6890 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
46b35980
AM
6891 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6892 || is_mm_release (model)))
86951993 6893 {
620e594b 6894 location_t loc
8d9fdb49
MP
6895 = expansion_point_location_if_in_system_header (input_location);
6896 warning_at (loc, OPT_Winvalid_memory_model,
6897 "invalid memory model for %<__atomic_store%>");
77df5327 6898 model = MEMMODEL_SEQ_CST;
86951993
AM
6899 }
6900
6901 if (!flag_inline_atomics)
6902 return NULL_RTX;
6903
6904 /* Expand the operands. */
6905 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6906 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6907
0669295b 6908 return expand_atomic_store (mem, val, model, false);
86951993
AM
6909}
6910
6911/* Expand the __atomic_fetch_XXX intrinsic:
6912 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6913 EXP is the CALL_EXPR.
6914 TARGET is an optional place for us to store the results.
6915 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6916 FETCH_AFTER is true if returning the result of the operation.
6917 FETCH_AFTER is false if returning the value before the operation.
6918 IGNORE is true if the result is not used.
6919 EXT_CALL is the correct builtin for an external call if this cannot be
6920 resolved to an instruction sequence. */
6921
6922static rtx
ef4bddc2 6923expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
86951993
AM
6924 enum rtx_code code, bool fetch_after,
6925 bool ignore, enum built_in_function ext_call)
6926{
6927 rtx val, mem, ret;
6928 enum memmodel model;
6929 tree fndecl;
6930 tree addr;
6931
6932 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6933
6934 /* Expand the operands. */
6935 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6936 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6937
6938 /* Only try generating instructions if inlining is turned on. */
6939 if (flag_inline_atomics)
6940 {
6941 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6942 if (ret)
6943 return ret;
6944 }
6945
6946 /* Return if a different routine isn't needed for the library call. */
6947 if (ext_call == BUILT_IN_NONE)
6948 return NULL_RTX;
6949
6950 /* Change the call to the specified function. */
6951 fndecl = get_callee_fndecl (exp);
6952 addr = CALL_EXPR_FN (exp);
6953 STRIP_NOPS (addr);
6954
6955 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
c3284718 6956 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
86951993 6957
67914693 6958 /* If we will emit code after the call, the call cannot be a tail call.
08c273bb
SB
6959 If it is emitted as a tail call, a barrier is emitted after it, and
6960 then all trailing code is removed. */
6961 if (!ignore)
6962 CALL_EXPR_TAILCALL (exp) = 0;
6963
86951993
AM
6964 /* Expand the call here so we can emit trailing code. */
6965 ret = expand_call (exp, target, ignore);
6966
6967 /* Replace the original function just in case it matters. */
6968 TREE_OPERAND (addr, 0) = fndecl;
6969
6970 /* Then issue the arithmetic correction to return the right result. */
6971 if (!ignore)
154b68db
AM
6972 {
6973 if (code == NOT)
6974 {
6975 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6976 OPTAB_LIB_WIDEN);
6977 ret = expand_simple_unop (mode, NOT, ret, target, true);
6978 }
6979 else
6980 ret = expand_simple_binop (mode, code, ret, val, target, true,
6981 OPTAB_LIB_WIDEN);
6982 }
86951993
AM
6983 return ret;
6984}
6985
adedd5c1
JJ
6986/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6987
6988void
6989expand_ifn_atomic_bit_test_and (gcall *call)
6990{
6991 tree ptr = gimple_call_arg (call, 0);
6992 tree bit = gimple_call_arg (call, 1);
6993 tree flag = gimple_call_arg (call, 2);
6994 tree lhs = gimple_call_lhs (call);
6995 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6996 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6997 enum rtx_code code;
6998 optab optab;
99b1c316 6999 class expand_operand ops[5];
adedd5c1
JJ
7000
7001 gcc_assert (flag_inline_atomics);
7002
7003 if (gimple_call_num_args (call) == 4)
7004 model = get_memmodel (gimple_call_arg (call, 3));
7005
7006 rtx mem = get_builtin_sync_mem (ptr, mode);
7007 rtx val = expand_expr_force_mode (bit, mode);
7008
7009 switch (gimple_call_internal_fn (call))
7010 {
7011 case IFN_ATOMIC_BIT_TEST_AND_SET:
7012 code = IOR;
7013 optab = atomic_bit_test_and_set_optab;
7014 break;
7015 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7016 code = XOR;
7017 optab = atomic_bit_test_and_complement_optab;
7018 break;
7019 case IFN_ATOMIC_BIT_TEST_AND_RESET:
7020 code = AND;
7021 optab = atomic_bit_test_and_reset_optab;
7022 break;
7023 default:
7024 gcc_unreachable ();
7025 }
7026
7027 if (lhs == NULL_TREE)
7028 {
7029 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7030 val, NULL_RTX, true, OPTAB_DIRECT);
7031 if (code == AND)
7032 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7033 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
7034 return;
7035 }
7036
7037 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7038 enum insn_code icode = direct_optab_handler (optab, mode);
7039 gcc_assert (icode != CODE_FOR_nothing);
7040 create_output_operand (&ops[0], target, mode);
7041 create_fixed_operand (&ops[1], mem);
7042 create_convert_operand_to (&ops[2], val, mode, true);
7043 create_integer_operand (&ops[3], model);
7044 create_integer_operand (&ops[4], integer_onep (flag));
7045 if (maybe_expand_insn (icode, 5, ops))
7046 return;
7047
7048 rtx bitval = val;
7049 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7050 val, NULL_RTX, true, OPTAB_DIRECT);
7051 rtx maskval = val;
7052 if (code == AND)
7053 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7054 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7055 code, model, false);
7056 if (integer_onep (flag))
7057 {
7058 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7059 NULL_RTX, true, OPTAB_DIRECT);
7060 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7061 true, OPTAB_DIRECT);
7062 }
7063 else
7064 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7065 OPTAB_DIRECT);
7066 if (result != target)
7067 emit_move_insn (target, result);
7068}
7069
d660c35e
AM
7070/* Expand an atomic clear operation.
7071 void _atomic_clear (BOOL *obj, enum memmodel)
7072 EXP is the call expression. */
7073
7074static rtx
7075expand_builtin_atomic_clear (tree exp)
7076{
ef4bddc2 7077 machine_mode mode;
d660c35e
AM
7078 rtx mem, ret;
7079 enum memmodel model;
7080
f4b31647 7081 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
7082 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7083 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7084
46b35980 7085 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
d660c35e 7086 {
620e594b 7087 location_t loc
8d9fdb49
MP
7088 = expansion_point_location_if_in_system_header (input_location);
7089 warning_at (loc, OPT_Winvalid_memory_model,
7090 "invalid memory model for %<__atomic_store%>");
77df5327 7091 model = MEMMODEL_SEQ_CST;
d660c35e
AM
7092 }
7093
7094 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7095 Failing that, a store is issued by __atomic_store. The only way this can
7096 fail is if the bool type is larger than a word size. Unlikely, but
7097 handle it anyway for completeness. Assume a single threaded model since
7098 there is no atomic support in this case, and no barriers are required. */
7099 ret = expand_atomic_store (mem, const0_rtx, model, true);
7100 if (!ret)
7101 emit_move_insn (mem, const0_rtx);
7102 return const0_rtx;
7103}
7104
7105/* Expand an atomic test_and_set operation.
7106 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7107 EXP is the call expression. */
7108
7109static rtx
744accb2 7110expand_builtin_atomic_test_and_set (tree exp, rtx target)
d660c35e 7111{
744accb2 7112 rtx mem;
d660c35e 7113 enum memmodel model;
ef4bddc2 7114 machine_mode mode;
d660c35e 7115
f4b31647 7116 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
7117 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7118 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7119
744accb2 7120 return expand_atomic_test_and_set (target, mem, model);
d660c35e
AM
7121}
7122
7123
86951993
AM
7124/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7125 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7126
7127static tree
7128fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7129{
7130 int size;
ef4bddc2 7131 machine_mode mode;
86951993
AM
7132 unsigned int mode_align, type_align;
7133
7134 if (TREE_CODE (arg0) != INTEGER_CST)
7135 return NULL_TREE;
48ae6c13 7136
f4b31647 7137 /* We need a corresponding integer mode for the access to be lock-free. */
86951993 7138 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
f4b31647
RS
7139 if (!int_mode_for_size (size, 0).exists (&mode))
7140 return boolean_false_node;
7141
86951993
AM
7142 mode_align = GET_MODE_ALIGNMENT (mode);
7143
310055e7
JW
7144 if (TREE_CODE (arg1) == INTEGER_CST)
7145 {
7146 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7147
7148 /* Either this argument is null, or it's a fake pointer encoding
7149 the alignment of the object. */
146ec50f 7150 val = least_bit_hwi (val);
310055e7
JW
7151 val *= BITS_PER_UNIT;
7152
7153 if (val == 0 || mode_align < val)
7154 type_align = mode_align;
7155 else
7156 type_align = val;
7157 }
86951993
AM
7158 else
7159 {
7160 tree ttype = TREE_TYPE (arg1);
7161
7162 /* This function is usually invoked and folded immediately by the front
7163 end before anything else has a chance to look at it. The pointer
7164 parameter at this point is usually cast to a void *, so check for that
7165 and look past the cast. */
7d9cf801
JJ
7166 if (CONVERT_EXPR_P (arg1)
7167 && POINTER_TYPE_P (ttype)
7168 && VOID_TYPE_P (TREE_TYPE (ttype))
7169 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
86951993
AM
7170 arg1 = TREE_OPERAND (arg1, 0);
7171
7172 ttype = TREE_TYPE (arg1);
7173 gcc_assert (POINTER_TYPE_P (ttype));
7174
7175 /* Get the underlying type of the object. */
7176 ttype = TREE_TYPE (ttype);
7177 type_align = TYPE_ALIGN (ttype);
7178 }
7179
026c3cfd 7180 /* If the object has smaller alignment, the lock free routines cannot
86951993
AM
7181 be used. */
7182 if (type_align < mode_align)
58d38fd2 7183 return boolean_false_node;
86951993
AM
7184
7185 /* Check if a compare_and_swap pattern exists for the mode which represents
7186 the required size. The pattern is not allowed to fail, so the existence
969a32ce
TR
7187 of the pattern indicates support is present. Also require that an
7188 atomic load exists for the required size. */
7189 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
58d38fd2 7190 return boolean_true_node;
86951993 7191 else
58d38fd2 7192 return boolean_false_node;
86951993
AM
7193}
7194
7195/* Return true if the parameters to call EXP represent an object which will
7196 always generate lock free instructions. The first argument represents the
7197 size of the object, and the second parameter is a pointer to the object
7198 itself. If NULL is passed for the object, then the result is based on
7199 typical alignment for an object of the specified size. Otherwise return
7200 false. */
7201
7202static rtx
7203expand_builtin_atomic_always_lock_free (tree exp)
7204{
7205 tree size;
7206 tree arg0 = CALL_EXPR_ARG (exp, 0);
7207 tree arg1 = CALL_EXPR_ARG (exp, 1);
7208
7209 if (TREE_CODE (arg0) != INTEGER_CST)
7210 {
a9c697b8 7211 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
86951993
AM
7212 return const0_rtx;
7213 }
7214
7215 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
58d38fd2 7216 if (size == boolean_true_node)
86951993
AM
7217 return const1_rtx;
7218 return const0_rtx;
7219}
7220
7221/* Return a one or zero if it can be determined that object ARG1 of size ARG
7222 is lock free on this architecture. */
7223
7224static tree
7225fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7226{
7227 if (!flag_inline_atomics)
7228 return NULL_TREE;
7229
7230 /* If it isn't always lock free, don't generate a result. */
58d38fd2
JJ
7231 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7232 return boolean_true_node;
86951993
AM
7233
7234 return NULL_TREE;
7235}
7236
7237/* Return true if the parameters to call EXP represent an object which will
7238 always generate lock free instructions. The first argument represents the
7239 size of the object, and the second parameter is a pointer to the object
7240 itself. If NULL is passed for the object, then the result is based on
7241 typical alignment for an object of the specified size. Otherwise return
7242 NULL*/
7243
7244static rtx
7245expand_builtin_atomic_is_lock_free (tree exp)
7246{
7247 tree size;
7248 tree arg0 = CALL_EXPR_ARG (exp, 0);
7249 tree arg1 = CALL_EXPR_ARG (exp, 1);
7250
7251 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7252 {
a9c697b8 7253 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
86951993
AM
7254 return NULL_RTX;
7255 }
7256
7257 if (!flag_inline_atomics)
7258 return NULL_RTX;
7259
7260 /* If the value is known at compile time, return the RTX for it. */
7261 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
58d38fd2 7262 if (size == boolean_true_node)
86951993
AM
7263 return const1_rtx;
7264
7265 return NULL_RTX;
7266}
7267
86951993
AM
7268/* Expand the __atomic_thread_fence intrinsic:
7269 void __atomic_thread_fence (enum memmodel)
7270 EXP is the CALL_EXPR. */
7271
7272static void
7273expand_builtin_atomic_thread_fence (tree exp)
7274{
c39169c8
RH
7275 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7276 expand_mem_thread_fence (model);
86951993
AM
7277}
7278
7279/* Expand the __atomic_signal_fence intrinsic:
7280 void __atomic_signal_fence (enum memmodel)
7281 EXP is the CALL_EXPR. */
7282
7283static void
7284expand_builtin_atomic_signal_fence (tree exp)
7285{
c39169c8
RH
7286 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7287 expand_mem_signal_fence (model);
48ae6c13
RH
7288}
7289
7290/* Expand the __sync_synchronize intrinsic. */
7291
7292static void
e0a8ecf2 7293expand_builtin_sync_synchronize (void)
48ae6c13 7294{
46b35980 7295 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
48ae6c13
RH
7296}
7297
f959607b
CLT
7298static rtx
7299expand_builtin_thread_pointer (tree exp, rtx target)
7300{
7301 enum insn_code icode;
7302 if (!validate_arglist (exp, VOID_TYPE))
7303 return const0_rtx;
7304 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7305 if (icode != CODE_FOR_nothing)
7306 {
99b1c316 7307 class expand_operand op;
b8a542c6
AP
7308 /* If the target is not sutitable then create a new target. */
7309 if (target == NULL_RTX
7310 || !REG_P (target)
7311 || GET_MODE (target) != Pmode)
f959607b
CLT
7312 target = gen_reg_rtx (Pmode);
7313 create_output_operand (&op, target, Pmode);
7314 expand_insn (icode, 1, &op);
7315 return target;
7316 }
a3f9f006 7317 error ("%<__builtin_thread_pointer%> is not supported on this target");
f959607b
CLT
7318 return const0_rtx;
7319}
7320
7321static void
7322expand_builtin_set_thread_pointer (tree exp)
7323{
7324 enum insn_code icode;
7325 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7326 return;
7327 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7328 if (icode != CODE_FOR_nothing)
7329 {
99b1c316 7330 class expand_operand op;
f959607b
CLT
7331 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7332 Pmode, EXPAND_NORMAL);
5440a1b0 7333 create_input_operand (&op, val, Pmode);
f959607b
CLT
7334 expand_insn (icode, 1, &op);
7335 return;
7336 }
a3f9f006 7337 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
f959607b
CLT
7338}
7339
28f4ec01 7340\f
862d0b35
DN
7341/* Emit code to restore the current value of stack. */
7342
7343static void
7344expand_stack_restore (tree var)
7345{
58f4cf2a
DM
7346 rtx_insn *prev;
7347 rtx sa = expand_normal (var);
862d0b35
DN
7348
7349 sa = convert_memory_address (Pmode, sa);
7350
7351 prev = get_last_insn ();
7352 emit_stack_restore (SAVE_BLOCK, sa);
d33606c3
EB
7353
7354 record_new_stack_level ();
7355
862d0b35
DN
7356 fixup_args_size_notes (prev, get_last_insn (), 0);
7357}
7358
862d0b35
DN
7359/* Emit code to save the current value of stack. */
7360
7361static rtx
7362expand_stack_save (void)
7363{
7364 rtx ret = NULL_RTX;
7365
862d0b35
DN
7366 emit_stack_save (SAVE_BLOCK, &ret);
7367 return ret;
7368}
7369
1f62d637
TV
7370/* Emit code to get the openacc gang, worker or vector id or size. */
7371
7372static rtx
7373expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7374{
7375 const char *name;
7376 rtx fallback_retval;
7377 rtx_insn *(*gen_fn) (rtx, rtx);
7378 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7379 {
7380 case BUILT_IN_GOACC_PARLEVEL_ID:
7381 name = "__builtin_goacc_parlevel_id";
7382 fallback_retval = const0_rtx;
7383 gen_fn = targetm.gen_oacc_dim_pos;
7384 break;
7385 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7386 name = "__builtin_goacc_parlevel_size";
7387 fallback_retval = const1_rtx;
7388 gen_fn = targetm.gen_oacc_dim_size;
7389 break;
7390 default:
7391 gcc_unreachable ();
7392 }
7393
7394 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7395 {
7396 error ("%qs only supported in OpenACC code", name);
7397 return const0_rtx;
7398 }
7399
7400 tree arg = CALL_EXPR_ARG (exp, 0);
7401 if (TREE_CODE (arg) != INTEGER_CST)
7402 {
7403 error ("non-constant argument 0 to %qs", name);
7404 return const0_rtx;
7405 }
7406
7407 int dim = TREE_INT_CST_LOW (arg);
7408 switch (dim)
7409 {
7410 case GOMP_DIM_GANG:
7411 case GOMP_DIM_WORKER:
7412 case GOMP_DIM_VECTOR:
7413 break;
7414 default:
7415 error ("illegal argument 0 to %qs", name);
7416 return const0_rtx;
7417 }
7418
7419 if (ignore)
7420 return target;
7421
39bc9f83
TV
7422 if (target == NULL_RTX)
7423 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7424
1f62d637
TV
7425 if (!targetm.have_oacc_dim_size ())
7426 {
7427 emit_move_insn (target, fallback_retval);
7428 return target;
7429 }
7430
7431 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7432 emit_insn (gen_fn (reg, GEN_INT (dim)));
7433 if (reg != target)
7434 emit_move_insn (target, reg);
7435
7436 return target;
7437}
41dbbb37 7438
10a0e2a9 7439/* Expand a string compare operation using a sequence of char comparison
b2272b13
QZ
7440 to get rid of the calling overhead, with result going to TARGET if
7441 that's convenient.
7442
7443 VAR_STR is the variable string source;
7444 CONST_STR is the constant string source;
7445 LENGTH is the number of chars to compare;
7446 CONST_STR_N indicates which source string is the constant string;
7447 IS_MEMCMP indicates whether it's a memcmp or strcmp.
10a0e2a9 7448
b2272b13
QZ
7449 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7450
523a59ff
QZ
7451 target = (int) (unsigned char) var_str[0]
7452 - (int) (unsigned char) const_str[0];
b2272b13
QZ
7453 if (target != 0)
7454 goto ne_label;
7455 ...
523a59ff
QZ
7456 target = (int) (unsigned char) var_str[length - 2]
7457 - (int) (unsigned char) const_str[length - 2];
b2272b13
QZ
7458 if (target != 0)
7459 goto ne_label;
523a59ff
QZ
7460 target = (int) (unsigned char) var_str[length - 1]
7461 - (int) (unsigned char) const_str[length - 1];
b2272b13
QZ
7462 ne_label:
7463 */
7464
7465static rtx
10a0e2a9 7466inline_string_cmp (rtx target, tree var_str, const char *const_str,
b2272b13 7467 unsigned HOST_WIDE_INT length,
523a59ff 7468 int const_str_n, machine_mode mode)
b2272b13
QZ
7469{
7470 HOST_WIDE_INT offset = 0;
10a0e2a9 7471 rtx var_rtx_array
b2272b13
QZ
7472 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7473 rtx var_rtx = NULL_RTX;
10a0e2a9
JJ
7474 rtx const_rtx = NULL_RTX;
7475 rtx result = target ? target : gen_reg_rtx (mode);
7476 rtx_code_label *ne_label = gen_label_rtx ();
523a59ff 7477 tree unit_type_node = unsigned_char_type_node;
10a0e2a9
JJ
7478 scalar_int_mode unit_mode
7479 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
b2272b13
QZ
7480
7481 start_sequence ();
7482
7483 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7484 {
10a0e2a9 7485 var_rtx
b2272b13 7486 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
10a0e2a9 7487 const_rtx = c_readstr (const_str + offset, unit_mode);
b2272b13
QZ
7488 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7489 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
10a0e2a9 7490
523a59ff
QZ
7491 op0 = convert_modes (mode, unit_mode, op0, 1);
7492 op1 = convert_modes (mode, unit_mode, op1, 1);
10a0e2a9 7493 result = expand_simple_binop (mode, MINUS, op0, op1,
523a59ff 7494 result, 1, OPTAB_WIDEN);
10a0e2a9
JJ
7495 if (i < length - 1)
7496 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7497 mode, true, ne_label);
7498 offset += GET_MODE_SIZE (unit_mode);
b2272b13
QZ
7499 }
7500
7501 emit_label (ne_label);
7502 rtx_insn *insns = get_insns ();
7503 end_sequence ();
7504 emit_insn (insns);
7505
7506 return result;
7507}
7508
10a0e2a9 7509/* Inline expansion a call to str(n)cmp, with result going to
b2272b13
QZ
7510 TARGET if that's convenient.
7511 If the call is not been inlined, return NULL_RTX. */
7512static rtx
523a59ff 7513inline_expand_builtin_string_cmp (tree exp, rtx target)
b2272b13
QZ
7514{
7515 tree fndecl = get_callee_fndecl (exp);
7516 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7517 unsigned HOST_WIDE_INT length = 0;
7518 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7519
3d592d2d
QZ
7520 /* Do NOT apply this inlining expansion when optimizing for size or
7521 optimization level below 2. */
7522 if (optimize < 2 || optimize_insn_for_size_p ())
7523 return NULL_RTX;
7524
b2272b13 7525 gcc_checking_assert (fcode == BUILT_IN_STRCMP
10a0e2a9 7526 || fcode == BUILT_IN_STRNCMP
b2272b13
QZ
7527 || fcode == BUILT_IN_MEMCMP);
7528
523a59ff
QZ
7529 /* On a target where the type of the call (int) has same or narrower presicion
7530 than unsigned char, give up the inlining expansion. */
7531 if (TYPE_PRECISION (unsigned_char_type_node)
7532 >= TYPE_PRECISION (TREE_TYPE (exp)))
7533 return NULL_RTX;
7534
b2272b13
QZ
7535 tree arg1 = CALL_EXPR_ARG (exp, 0);
7536 tree arg2 = CALL_EXPR_ARG (exp, 1);
7537 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7538
7539 unsigned HOST_WIDE_INT len1 = 0;
7540 unsigned HOST_WIDE_INT len2 = 0;
7541 unsigned HOST_WIDE_INT len3 = 0;
7542
7543 const char *src_str1 = c_getstr (arg1, &len1);
7544 const char *src_str2 = c_getstr (arg2, &len2);
10a0e2a9 7545
b2272b13
QZ
7546 /* If neither strings is constant string, the call is not qualify. */
7547 if (!src_str1 && !src_str2)
7548 return NULL_RTX;
7549
7550 /* For strncmp, if the length is not a const, not qualify. */
6aa2e42c
ML
7551 if (is_ncmp)
7552 {
7553 if (!tree_fits_uhwi_p (len3_tree))
7554 return NULL_RTX;
7555 else
7556 len3 = tree_to_uhwi (len3_tree);
7557 }
7558
7559 if (src_str1 != NULL)
7560 len1 = strnlen (src_str1, len1) + 1;
7561
7562 if (src_str2 != NULL)
7563 len2 = strnlen (src_str2, len2) + 1;
b2272b13
QZ
7564
7565 int const_str_n = 0;
7566 if (!len1)
7567 const_str_n = 2;
7568 else if (!len2)
7569 const_str_n = 1;
7570 else if (len2 > len1)
7571 const_str_n = 1;
7572 else
7573 const_str_n = 2;
7574
7575 gcc_checking_assert (const_str_n > 0);
7576 length = (const_str_n == 1) ? len1 : len2;
7577
6aa2e42c 7578 if (is_ncmp && len3 < length)
b2272b13
QZ
7579 length = len3;
7580
10a0e2a9 7581 /* If the length of the comparision is larger than the threshold,
b2272b13 7582 do nothing. */
10a0e2a9 7583 if (length > (unsigned HOST_WIDE_INT)
028d4092 7584 param_builtin_string_cmp_inline_length)
b2272b13
QZ
7585 return NULL_RTX;
7586
7587 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7588
7589 /* Now, start inline expansion the call. */
10a0e2a9 7590 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
b2272b13 7591 (const_str_n == 1) ? src_str1 : src_str2, length,
523a59ff 7592 const_str_n, mode);
b2272b13
QZ
7593}
7594
425fc685
RE
7595/* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7596 represents the size of the first argument to that call, or VOIDmode
7597 if the argument is a pointer. IGNORE will be true if the result
7598 isn't used. */
7599static rtx
7600expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7601 bool ignore)
7602{
7603 rtx val, failsafe;
7604 unsigned nargs = call_expr_nargs (exp);
7605
7606 tree arg0 = CALL_EXPR_ARG (exp, 0);
7607
7608 if (mode == VOIDmode)
7609 {
7610 mode = TYPE_MODE (TREE_TYPE (arg0));
7611 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7612 }
7613
7614 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7615
7616 /* An optional second argument can be used as a failsafe value on
7617 some machines. If it isn't present, then the failsafe value is
7618 assumed to be 0. */
7619 if (nargs > 1)
7620 {
7621 tree arg1 = CALL_EXPR_ARG (exp, 1);
7622 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7623 }
7624 else
7625 failsafe = const0_rtx;
7626
7627 /* If the result isn't used, the behavior is undefined. It would be
7628 nice to emit a warning here, but path splitting means this might
7629 happen with legitimate code. So simply drop the builtin
7630 expansion in that case; we've handled any side-effects above. */
7631 if (ignore)
7632 return const0_rtx;
7633
7634 /* If we don't have a suitable target, create one to hold the result. */
7635 if (target == NULL || GET_MODE (target) != mode)
7636 target = gen_reg_rtx (mode);
7637
7638 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7639 val = convert_modes (mode, VOIDmode, val, false);
7640
7641 return targetm.speculation_safe_value (mode, target, val, failsafe);
7642}
7643
28f4ec01
BS
7644/* Expand an expression EXP that calls a built-in function,
7645 with result going to TARGET if that's convenient
7646 (and in mode MODE if that's convenient).
7647 SUBTARGET may be used as the target for computing one of EXP's operands.
7648 IGNORE is nonzero if the value is to be ignored. */
7649
7650rtx
ef4bddc2 7651expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
4682ae04 7652 int ignore)
28f4ec01 7653{
2f503025 7654 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 7655 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9e3920e9 7656 int flags;
28f4ec01 7657
d51151b2
JJ
7658 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7659 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7660
bdea98ca
MO
7661 /* When ASan is enabled, we don't want to expand some memory/string
7662 builtins and rely on libsanitizer's hooks. This allows us to avoid
7663 redundant checks and be sure, that possible overflow will be detected
7664 by ASan. */
7665
4d732405 7666 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
bdea98ca
MO
7667 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7668 return expand_call (exp, target, ignore);
7669
28f4ec01
BS
7670 /* When not optimizing, generate calls to library functions for a certain
7671 set of builtins. */
d25225de 7672 if (!optimize
48ae6c13 7673 && !called_as_built_in (fndecl)
63bf9a90
JH
7674 && fcode != BUILT_IN_FORK
7675 && fcode != BUILT_IN_EXECL
7676 && fcode != BUILT_IN_EXECV
7677 && fcode != BUILT_IN_EXECLP
7678 && fcode != BUILT_IN_EXECLE
7679 && fcode != BUILT_IN_EXECVP
7680 && fcode != BUILT_IN_EXECVE
9e878cf1 7681 && !ALLOCA_FUNCTION_CODE_P (fcode)
31db0fe0 7682 && fcode != BUILT_IN_FREE)
d25225de 7683 return expand_call (exp, target, ignore);
28f4ec01 7684
0a45ec5c
RS
7685 /* The built-in function expanders test for target == const0_rtx
7686 to determine whether the function's result will be ignored. */
7687 if (ignore)
7688 target = const0_rtx;
7689
7690 /* If the result of a pure or const built-in function is ignored, and
7691 none of its arguments are volatile, we can avoid expanding the
7692 built-in call and just evaluate the arguments for side-effects. */
7693 if (target == const0_rtx
9e3920e9
JJ
7694 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7695 && !(flags & ECF_LOOPING_CONST_OR_PURE))
0a45ec5c
RS
7696 {
7697 bool volatilep = false;
7698 tree arg;
5039610b 7699 call_expr_arg_iterator iter;
0a45ec5c 7700
5039610b
SL
7701 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7702 if (TREE_THIS_VOLATILE (arg))
0a45ec5c
RS
7703 {
7704 volatilep = true;
7705 break;
7706 }
7707
7708 if (! volatilep)
7709 {
5039610b
SL
7710 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7711 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a45ec5c
RS
7712 return const0_rtx;
7713 }
7714 }
7715
28f4ec01
BS
7716 switch (fcode)
7717 {
ea6a6627 7718 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 7719 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
7720 case BUILT_IN_FABSD32:
7721 case BUILT_IN_FABSD64:
7722 case BUILT_IN_FABSD128:
5039610b 7723 target = expand_builtin_fabs (exp, target, subtarget);
075ec276 7724 if (target)
c22cacf3 7725 return target;
075ec276
RS
7726 break;
7727
ea6a6627 7728 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6dc198e3 7729 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5039610b 7730 target = expand_builtin_copysign (exp, target, subtarget);
046625fa
RH
7731 if (target)
7732 return target;
7733 break;
7734
5906d013
EC
7735 /* Just do a normal library call if we were unable to fold
7736 the values. */
ea6a6627 7737 CASE_FLT_FN (BUILT_IN_CABS):
075ec276 7738 break;
28f4ec01 7739
1b1562a5 7740 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 7741 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
7742 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7743 if (target)
7744 return target;
7745 break;
7746
eaee4464
UB
7747 CASE_FLT_FN (BUILT_IN_ILOGB):
7748 if (! flag_unsafe_math_optimizations)
7749 break;
903c723b
TC
7750 gcc_fallthrough ();
7751 CASE_FLT_FN (BUILT_IN_ISINF):
7752 CASE_FLT_FN (BUILT_IN_FINITE):
7753 case BUILT_IN_ISFINITE:
7754 case BUILT_IN_ISNORMAL:
4359dc2a 7755 target = expand_builtin_interclass_mathfn (exp, target);
eaee4464
UB
7756 if (target)
7757 return target;
7758 break;
7759
6c32ee74 7760 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
7761 CASE_FLT_FN (BUILT_IN_LCEIL):
7762 CASE_FLT_FN (BUILT_IN_LLCEIL):
7763 CASE_FLT_FN (BUILT_IN_LFLOOR):
6c32ee74 7764 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627 7765 CASE_FLT_FN (BUILT_IN_LLFLOOR):
1856c8dc 7766 target = expand_builtin_int_roundingfn (exp, target);
d8b42d06
UB
7767 if (target)
7768 return target;
7769 break;
7770
6c32ee74 7771 CASE_FLT_FN (BUILT_IN_IRINT):
0bfa1541
RG
7772 CASE_FLT_FN (BUILT_IN_LRINT):
7773 CASE_FLT_FN (BUILT_IN_LLRINT):
6c32ee74 7774 CASE_FLT_FN (BUILT_IN_IROUND):
4d81bf84
RG
7775 CASE_FLT_FN (BUILT_IN_LROUND):
7776 CASE_FLT_FN (BUILT_IN_LLROUND):
1856c8dc 7777 target = expand_builtin_int_roundingfn_2 (exp, target);
0bfa1541
RG
7778 if (target)
7779 return target;
7780 break;
7781
ea6a6627 7782 CASE_FLT_FN (BUILT_IN_POWI):
4359dc2a 7783 target = expand_builtin_powi (exp, target);
17684d46
RG
7784 if (target)
7785 return target;
7786 break;
7787
75c7c595 7788 CASE_FLT_FN (BUILT_IN_CEXPI):
4359dc2a 7789 target = expand_builtin_cexpi (exp, target);
75c7c595
RG
7790 gcc_assert (target);
7791 return target;
7792
ea6a6627
VR
7793 CASE_FLT_FN (BUILT_IN_SIN):
7794 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
7795 if (! flag_unsafe_math_optimizations)
7796 break;
7797 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7798 if (target)
7799 return target;
7800 break;
7801
403e54f0
RG
7802 CASE_FLT_FN (BUILT_IN_SINCOS):
7803 if (! flag_unsafe_math_optimizations)
7804 break;
7805 target = expand_builtin_sincos (exp);
7806 if (target)
7807 return target;
7808 break;
7809
28f4ec01
BS
7810 case BUILT_IN_APPLY_ARGS:
7811 return expand_builtin_apply_args ();
7812
7813 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7814 FUNCTION with a copy of the parameters described by
7815 ARGUMENTS, and ARGSIZE. It returns a block of memory
7816 allocated on the stack into which is stored all the registers
7817 that might possibly be used for returning the result of a
7818 function. ARGUMENTS is the value returned by
7819 __builtin_apply_args. ARGSIZE is the number of bytes of
7820 arguments that must be copied. ??? How should this value be
7821 computed? We'll also need a safe worst case value for varargs
7822 functions. */
7823 case BUILT_IN_APPLY:
5039610b 7824 if (!validate_arglist (exp, POINTER_TYPE,
019fa094 7825 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5039610b 7826 && !validate_arglist (exp, REFERENCE_TYPE,
019fa094 7827 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01
BS
7828 return const0_rtx;
7829 else
7830 {
28f4ec01
BS
7831 rtx ops[3];
7832
5039610b
SL
7833 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7834 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7835 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
28f4ec01
BS
7836
7837 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7838 }
7839
7840 /* __builtin_return (RESULT) causes the function to return the
7841 value described by RESULT. RESULT is address of the block of
7842 memory returned by __builtin_apply. */
7843 case BUILT_IN_RETURN:
5039610b
SL
7844 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7845 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
28f4ec01
BS
7846 return const0_rtx;
7847
7848 case BUILT_IN_SAVEREGS:
d3707adb 7849 return expand_builtin_saveregs ();
28f4ec01 7850
6ef5231b
JJ
7851 case BUILT_IN_VA_ARG_PACK:
7852 /* All valid uses of __builtin_va_arg_pack () are removed during
7853 inlining. */
c94ed7a1 7854 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6ef5231b
JJ
7855 return const0_rtx;
7856
ab0e176c
JJ
7857 case BUILT_IN_VA_ARG_PACK_LEN:
7858 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7859 inlining. */
c94ed7a1 7860 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
ab0e176c
JJ
7861 return const0_rtx;
7862
28f4ec01
BS
7863 /* Return the address of the first anonymous stack arg. */
7864 case BUILT_IN_NEXT_ARG:
5039610b 7865 if (fold_builtin_next_arg (exp, false))
c22cacf3 7866 return const0_rtx;
8870e212 7867 return expand_builtin_next_arg ();
28f4ec01 7868
677feb77
DD
7869 case BUILT_IN_CLEAR_CACHE:
7870 target = expand_builtin___clear_cache (exp);
7871 if (target)
7872 return target;
7873 break;
7874
28f4ec01 7875 case BUILT_IN_CLASSIFY_TYPE:
5039610b 7876 return expand_builtin_classify_type (exp);
28f4ec01
BS
7877
7878 case BUILT_IN_CONSTANT_P:
6de9cd9a 7879 return const0_rtx;
28f4ec01
BS
7880
7881 case BUILT_IN_FRAME_ADDRESS:
7882 case BUILT_IN_RETURN_ADDRESS:
5039610b 7883 return expand_builtin_frame_address (fndecl, exp);
28f4ec01
BS
7884
7885 /* Returns the address of the area where the structure is returned.
7886 0 otherwise. */
7887 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5039610b 7888 if (call_expr_nargs (exp) != 0
ca7fd9cd 7889 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3c0cb5de 7890 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
ca7fd9cd 7891 return const0_rtx;
28f4ec01 7892 else
ca7fd9cd 7893 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
28f4ec01 7894
9e878cf1 7895 CASE_BUILT_IN_ALLOCA:
b7e52782 7896 target = expand_builtin_alloca (exp);
28f4ec01
BS
7897 if (target)
7898 return target;
7899 break;
7900
e3174bdf
MO
7901 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7902 return expand_asan_emit_allocas_unpoison (exp);
7903
6de9cd9a
DN
7904 case BUILT_IN_STACK_SAVE:
7905 return expand_stack_save ();
7906
7907 case BUILT_IN_STACK_RESTORE:
5039610b 7908 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6de9cd9a
DN
7909 return const0_rtx;
7910
ac868f29 7911 case BUILT_IN_BSWAP16:
167fa32c
EC
7912 case BUILT_IN_BSWAP32:
7913 case BUILT_IN_BSWAP64:
ac868f29 7914 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
167fa32c
EC
7915 if (target)
7916 return target;
7917 break;
7918
ea6a6627 7919 CASE_INT_FN (BUILT_IN_FFS):
5039610b 7920 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7921 subtarget, ffs_optab);
2928cd7a
RH
7922 if (target)
7923 return target;
7924 break;
7925
ea6a6627 7926 CASE_INT_FN (BUILT_IN_CLZ):
5039610b 7927 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7928 subtarget, clz_optab);
2928cd7a
RH
7929 if (target)
7930 return target;
7931 break;
7932
ea6a6627 7933 CASE_INT_FN (BUILT_IN_CTZ):
5039610b 7934 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7935 subtarget, ctz_optab);
2928cd7a
RH
7936 if (target)
7937 return target;
7938 break;
7939
3801c801 7940 CASE_INT_FN (BUILT_IN_CLRSB):
3801c801
BS
7941 target = expand_builtin_unop (target_mode, exp, target,
7942 subtarget, clrsb_optab);
7943 if (target)
7944 return target;
7945 break;
7946
ea6a6627 7947 CASE_INT_FN (BUILT_IN_POPCOUNT):
5039610b 7948 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7949 subtarget, popcount_optab);
2928cd7a
RH
7950 if (target)
7951 return target;
7952 break;
7953
ea6a6627 7954 CASE_INT_FN (BUILT_IN_PARITY):
5039610b 7955 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7956 subtarget, parity_optab);
28f4ec01
BS
7957 if (target)
7958 return target;
7959 break;
7960
7961 case BUILT_IN_STRLEN:
5039610b 7962 target = expand_builtin_strlen (exp, target, target_mode);
28f4ec01
BS
7963 if (target)
7964 return target;
7965 break;
7966
781ff3d8
MS
7967 case BUILT_IN_STRNLEN:
7968 target = expand_builtin_strnlen (exp, target, target_mode);
7969 if (target)
7970 return target;
7971 break;
7972
ee92e7ba 7973 case BUILT_IN_STRCAT:
b5338fb3 7974 target = expand_builtin_strcat (exp);
ee92e7ba
MS
7975 if (target)
7976 return target;
7977 break;
7978
b5338fb3
MS
7979 case BUILT_IN_GETTEXT:
7980 case BUILT_IN_PUTS:
7981 case BUILT_IN_PUTS_UNLOCKED:
7982 case BUILT_IN_STRDUP:
7983 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7984 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
7985 break;
7986
7987 case BUILT_IN_INDEX:
7988 case BUILT_IN_RINDEX:
7989 case BUILT_IN_STRCHR:
7990 case BUILT_IN_STRRCHR:
7991 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7992 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
7993 break;
7994
7995 case BUILT_IN_FPUTS:
7996 case BUILT_IN_FPUTS_UNLOCKED:
7997 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7998 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
7999 break;
8000
8001 case BUILT_IN_STRNDUP:
8002 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8003 check_nul_terminated_array (exp,
8004 CALL_EXPR_ARG (exp, 0),
8005 CALL_EXPR_ARG (exp, 1));
8006 break;
8007
8008 case BUILT_IN_STRCASECMP:
8009 case BUILT_IN_STRSTR:
8010 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8011 {
8012 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8013 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 1));
8014 }
8015 break;
8016
28f4ec01 8017 case BUILT_IN_STRCPY:
44e10129 8018 target = expand_builtin_strcpy (exp, target);
28f4ec01
BS
8019 if (target)
8020 return target;
8021 break;
8d51ecf8 8022
ee92e7ba
MS
8023 case BUILT_IN_STRNCAT:
8024 target = expand_builtin_strncat (exp, target);
8025 if (target)
8026 return target;
8027 break;
8028
da9e9f08 8029 case BUILT_IN_STRNCPY:
44e10129 8030 target = expand_builtin_strncpy (exp, target);
da9e9f08
KG
8031 if (target)
8032 return target;
8033 break;
8d51ecf8 8034
9cb65f92 8035 case BUILT_IN_STPCPY:
609ae0e2 8036 target = expand_builtin_stpcpy (exp, target, mode);
9cb65f92
KG
8037 if (target)
8038 return target;
8039 break;
8040
e50d56a5
MS
8041 case BUILT_IN_STPNCPY:
8042 target = expand_builtin_stpncpy (exp, target);
8043 if (target)
8044 return target;
8045 break;
8046
d9c5a8b9
MS
8047 case BUILT_IN_MEMCHR:
8048 target = expand_builtin_memchr (exp, target);
8049 if (target)
8050 return target;
8051 break;
8052
28f4ec01 8053 case BUILT_IN_MEMCPY:
44e10129 8054 target = expand_builtin_memcpy (exp, target);
9cb65f92
KG
8055 if (target)
8056 return target;
8057 break;
8058
e50d56a5
MS
8059 case BUILT_IN_MEMMOVE:
8060 target = expand_builtin_memmove (exp, target);
8061 if (target)
8062 return target;
8063 break;
8064
9cb65f92 8065 case BUILT_IN_MEMPCPY:
671a00ee 8066 target = expand_builtin_mempcpy (exp, target);
28f4ec01
BS
8067 if (target)
8068 return target;
8069 break;
8070
8071 case BUILT_IN_MEMSET:
5039610b 8072 target = expand_builtin_memset (exp, target, mode);
28f4ec01
BS
8073 if (target)
8074 return target;
8075 break;
8076
e3a709be 8077 case BUILT_IN_BZERO:
8148fe65 8078 target = expand_builtin_bzero (exp);
e3a709be
KG
8079 if (target)
8080 return target;
8081 break;
8082
10a0e2a9 8083 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8b0b334a
QZ
8084 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8085 when changing it to a strcmp call. */
8086 case BUILT_IN_STRCMP_EQ:
8087 target = expand_builtin_memcmp (exp, target, true);
8088 if (target)
8089 return target;
8090
8091 /* Change this call back to a BUILT_IN_STRCMP. */
10a0e2a9 8092 TREE_OPERAND (exp, 1)
8b0b334a
QZ
8093 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8094
8095 /* Delete the last parameter. */
8096 unsigned int i;
8097 vec<tree, va_gc> *arg_vec;
8098 vec_alloc (arg_vec, 2);
8099 for (i = 0; i < 2; i++)
8100 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8101 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8102 /* FALLTHROUGH */
8103
28f4ec01 8104 case BUILT_IN_STRCMP:
44e10129 8105 target = expand_builtin_strcmp (exp, target);
28f4ec01
BS
8106 if (target)
8107 return target;
8108 break;
8109
8b0b334a
QZ
8110 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8111 back to a BUILT_IN_STRNCMP. */
8112 case BUILT_IN_STRNCMP_EQ:
8113 target = expand_builtin_memcmp (exp, target, true);
8114 if (target)
8115 return target;
8116
8117 /* Change it back to a BUILT_IN_STRNCMP. */
10a0e2a9 8118 TREE_OPERAND (exp, 1)
8b0b334a
QZ
8119 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8120 /* FALLTHROUGH */
8121
da9e9f08
KG
8122 case BUILT_IN_STRNCMP:
8123 target = expand_builtin_strncmp (exp, target, mode);
8124 if (target)
8125 return target;
8126 break;
8127
4b2a62db 8128 case BUILT_IN_BCMP:
28f4ec01 8129 case BUILT_IN_MEMCMP:
36b85e43
BS
8130 case BUILT_IN_MEMCMP_EQ:
8131 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
28f4ec01
BS
8132 if (target)
8133 return target;
36b85e43
BS
8134 if (fcode == BUILT_IN_MEMCMP_EQ)
8135 {
8136 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8137 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8138 }
28f4ec01 8139 break;
28f4ec01
BS
8140
8141 case BUILT_IN_SETJMP:
903c723b 8142 /* This should have been lowered to the builtins below. */
4f6c2131
EB
8143 gcc_unreachable ();
8144
8145 case BUILT_IN_SETJMP_SETUP:
8146 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8147 and the receiver label. */
5039610b 8148 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4f6c2131 8149 {
5039610b 8150 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4f6c2131 8151 VOIDmode, EXPAND_NORMAL);
5039610b 8152 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
e67d1102 8153 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
8154
8155 /* This is copied from the handling of non-local gotos. */
8156 expand_builtin_setjmp_setup (buf_addr, label_r);
8157 nonlocal_goto_handler_labels
b5241a5a 8158 = gen_rtx_INSN_LIST (VOIDmode, label_r,
4f6c2131
EB
8159 nonlocal_goto_handler_labels);
8160 /* ??? Do not let expand_label treat us as such since we would
8161 not want to be both on the list of non-local labels and on
8162 the list of forced labels. */
8163 FORCED_LABEL (label) = 0;
8164 return const0_rtx;
8165 }
8166 break;
8167
4f6c2131
EB
8168 case BUILT_IN_SETJMP_RECEIVER:
8169 /* __builtin_setjmp_receiver is passed the receiver label. */
5039610b 8170 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4f6c2131 8171 {
5039610b 8172 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
e67d1102 8173 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
8174
8175 expand_builtin_setjmp_receiver (label_r);
8176 return const0_rtx;
8177 }
250d07b6 8178 break;
28f4ec01
BS
8179
8180 /* __builtin_longjmp is passed a pointer to an array of five words.
8181 It's similar to the C library longjmp function but works with
8182 __builtin_setjmp above. */
8183 case BUILT_IN_LONGJMP:
5039610b 8184 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01 8185 {
5039610b 8186 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
84217346 8187 VOIDmode, EXPAND_NORMAL);
5039610b 8188 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
28f4ec01
BS
8189
8190 if (value != const1_rtx)
8191 {
9e637a26 8192 error ("%<__builtin_longjmp%> second argument must be 1");
28f4ec01
BS
8193 return const0_rtx;
8194 }
8195
8196 expand_builtin_longjmp (buf_addr, value);
8197 return const0_rtx;
8198 }
4f6c2131 8199 break;
28f4ec01 8200
6de9cd9a 8201 case BUILT_IN_NONLOCAL_GOTO:
5039610b 8202 target = expand_builtin_nonlocal_goto (exp);
6de9cd9a
DN
8203 if (target)
8204 return target;
8205 break;
8206
2b92e7f5
RK
8207 /* This updates the setjmp buffer that is its argument with the value
8208 of the current stack pointer. */
8209 case BUILT_IN_UPDATE_SETJMP_BUF:
5039610b 8210 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2b92e7f5
RK
8211 {
8212 rtx buf_addr
5039610b 8213 = expand_normal (CALL_EXPR_ARG (exp, 0));
2b92e7f5
RK
8214
8215 expand_builtin_update_setjmp_buf (buf_addr);
8216 return const0_rtx;
8217 }
8218 break;
8219
28f4ec01 8220 case BUILT_IN_TRAP:
9602f5a0 8221 expand_builtin_trap ();
28f4ec01
BS
8222 return const0_rtx;
8223
468059bc
DD
8224 case BUILT_IN_UNREACHABLE:
8225 expand_builtin_unreachable ();
8226 return const0_rtx;
8227
ea6a6627 8228 CASE_FLT_FN (BUILT_IN_SIGNBIT):
44aea9ac
JJ
8229 case BUILT_IN_SIGNBITD32:
8230 case BUILT_IN_SIGNBITD64:
8231 case BUILT_IN_SIGNBITD128:
ef79730c
RS
8232 target = expand_builtin_signbit (exp, target);
8233 if (target)
8234 return target;
8235 break;
8236
28f4ec01
BS
8237 /* Various hooks for the DWARF 2 __throw routine. */
8238 case BUILT_IN_UNWIND_INIT:
8239 expand_builtin_unwind_init ();
8240 return const0_rtx;
8241 case BUILT_IN_DWARF_CFA:
8242 return virtual_cfa_rtx;
8243#ifdef DWARF2_UNWIND_INFO
9c80ff25
RH
8244 case BUILT_IN_DWARF_SP_COLUMN:
8245 return expand_builtin_dwarf_sp_column ();
d9d5c9de 8246 case BUILT_IN_INIT_DWARF_REG_SIZES:
5039610b 8247 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
d9d5c9de 8248 return const0_rtx;
28f4ec01
BS
8249#endif
8250 case BUILT_IN_FROB_RETURN_ADDR:
5039610b 8251 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 8252 case BUILT_IN_EXTRACT_RETURN_ADDR:
5039610b 8253 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 8254 case BUILT_IN_EH_RETURN:
5039610b
SL
8255 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8256 CALL_EXPR_ARG (exp, 1));
28f4ec01 8257 return const0_rtx;
52a11cbf 8258 case BUILT_IN_EH_RETURN_DATA_REGNO:
5039610b 8259 return expand_builtin_eh_return_data_regno (exp);
c76362b4 8260 case BUILT_IN_EXTEND_POINTER:
5039610b 8261 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
1d65f45c
RH
8262 case BUILT_IN_EH_POINTER:
8263 return expand_builtin_eh_pointer (exp);
8264 case BUILT_IN_EH_FILTER:
8265 return expand_builtin_eh_filter (exp);
8266 case BUILT_IN_EH_COPY_VALUES:
8267 return expand_builtin_eh_copy_values (exp);
c76362b4 8268
6c535c69 8269 case BUILT_IN_VA_START:
5039610b 8270 return expand_builtin_va_start (exp);
d3707adb 8271 case BUILT_IN_VA_END:
5039610b 8272 return expand_builtin_va_end (exp);
d3707adb 8273 case BUILT_IN_VA_COPY:
5039610b 8274 return expand_builtin_va_copy (exp);
994a57cd 8275 case BUILT_IN_EXPECT:
5039610b 8276 return expand_builtin_expect (exp, target);
1e9168b2
ML
8277 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8278 return expand_builtin_expect_with_probability (exp, target);
45d439ac
JJ
8279 case BUILT_IN_ASSUME_ALIGNED:
8280 return expand_builtin_assume_aligned (exp, target);
a9ccbb60 8281 case BUILT_IN_PREFETCH:
5039610b 8282 expand_builtin_prefetch (exp);
a9ccbb60
JJ
8283 return const0_rtx;
8284
6de9cd9a 8285 case BUILT_IN_INIT_TRAMPOLINE:
183dd130
ILT
8286 return expand_builtin_init_trampoline (exp, true);
8287 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8288 return expand_builtin_init_trampoline (exp, false);
6de9cd9a 8289 case BUILT_IN_ADJUST_TRAMPOLINE:
5039610b 8290 return expand_builtin_adjust_trampoline (exp);
6de9cd9a 8291
4c640e26
EB
8292 case BUILT_IN_INIT_DESCRIPTOR:
8293 return expand_builtin_init_descriptor (exp);
8294 case BUILT_IN_ADJUST_DESCRIPTOR:
8295 return expand_builtin_adjust_descriptor (exp);
8296
d1c38823
ZD
8297 case BUILT_IN_FORK:
8298 case BUILT_IN_EXECL:
8299 case BUILT_IN_EXECV:
8300 case BUILT_IN_EXECLP:
8301 case BUILT_IN_EXECLE:
8302 case BUILT_IN_EXECVP:
8303 case BUILT_IN_EXECVE:
5039610b 8304 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
d1c38823
ZD
8305 if (target)
8306 return target;
8307 break;
28f4ec01 8308
e0a8ecf2
AM
8309 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8310 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8311 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8312 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8313 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8314 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
86951993 8315 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
48ae6c13
RH
8316 if (target)
8317 return target;
8318 break;
8319
e0a8ecf2
AM
8320 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8321 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8322 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8323 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8324 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8325 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
86951993 8326 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
48ae6c13
RH
8327 if (target)
8328 return target;
8329 break;
8330
e0a8ecf2
AM
8331 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8332 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8333 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8334 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8335 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8336 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
86951993 8337 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
48ae6c13
RH
8338 if (target)
8339 return target;
8340 break;
8341
e0a8ecf2
AM
8342 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8343 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8344 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8345 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8346 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8347 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
86951993 8348 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
48ae6c13
RH
8349 if (target)
8350 return target;
8351 break;
8352
e0a8ecf2
AM
8353 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8354 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8355 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8356 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8357 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8358 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
86951993 8359 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
48ae6c13
RH
8360 if (target)
8361 return target;
8362 break;
8363
e0a8ecf2
AM
8364 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8365 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8366 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8367 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8368 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8369 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
86951993 8370 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
48ae6c13
RH
8371 if (target)
8372 return target;
8373 break;
8374
e0a8ecf2
AM
8375 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8376 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8377 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8378 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8379 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8380 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
86951993 8381 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
48ae6c13
RH
8382 if (target)
8383 return target;
8384 break;
8385
e0a8ecf2
AM
8386 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8387 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8388 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8389 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8390 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8391 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
86951993 8392 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
48ae6c13
RH
8393 if (target)
8394 return target;
8395 break;
8396
e0a8ecf2
AM
8397 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8398 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8399 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8400 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8401 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8402 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
86951993 8403 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
48ae6c13
RH
8404 if (target)
8405 return target;
8406 break;
8407
e0a8ecf2
AM
8408 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8409 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8410 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8411 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8412 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8413 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
86951993 8414 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
48ae6c13
RH
8415 if (target)
8416 return target;
8417 break;
8418
e0a8ecf2
AM
8419 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8420 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8421 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8422 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8423 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8424 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
86951993 8425 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
48ae6c13
RH
8426 if (target)
8427 return target;
8428 break;
8429
e0a8ecf2
AM
8430 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8431 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8432 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8433 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8434 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8435 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
86951993 8436 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
48ae6c13
RH
8437 if (target)
8438 return target;
8439 break;
8440
e0a8ecf2
AM
8441 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8442 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8443 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8444 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8445 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
5b5513d0
RH
8446 if (mode == VOIDmode)
8447 mode = TYPE_MODE (boolean_type_node);
48ae6c13
RH
8448 if (!target || !register_operand (target, mode))
8449 target = gen_reg_rtx (mode);
02ee605c 8450
e0a8ecf2
AM
8451 mode = get_builtin_sync_mode
8452 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
5039610b 8453 target = expand_builtin_compare_and_swap (mode, exp, true, target);
48ae6c13
RH
8454 if (target)
8455 return target;
8456 break;
8457
e0a8ecf2
AM
8458 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8459 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8460 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8461 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8462 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8463 mode = get_builtin_sync_mode
8464 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
5039610b 8465 target = expand_builtin_compare_and_swap (mode, exp, false, target);
48ae6c13
RH
8466 if (target)
8467 return target;
8468 break;
8469
e0a8ecf2
AM
8470 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8471 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8472 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8473 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8474 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8475 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8476 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
48ae6c13
RH
8477 if (target)
8478 return target;
8479 break;
8480
e0a8ecf2
AM
8481 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8482 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8483 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8484 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8485 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8486 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8487 expand_builtin_sync_lock_release (mode, exp);
48ae6c13
RH
8488 return const0_rtx;
8489
e0a8ecf2
AM
8490 case BUILT_IN_SYNC_SYNCHRONIZE:
8491 expand_builtin_sync_synchronize ();
48ae6c13
RH
8492 return const0_rtx;
8493
86951993
AM
8494 case BUILT_IN_ATOMIC_EXCHANGE_1:
8495 case BUILT_IN_ATOMIC_EXCHANGE_2:
8496 case BUILT_IN_ATOMIC_EXCHANGE_4:
8497 case BUILT_IN_ATOMIC_EXCHANGE_8:
8498 case BUILT_IN_ATOMIC_EXCHANGE_16:
8499 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8500 target = expand_builtin_atomic_exchange (mode, exp, target);
8501 if (target)
8502 return target;
8503 break;
8504
8505 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8506 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8507 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8508 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8509 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
e351ae85
AM
8510 {
8511 unsigned int nargs, z;
9771b263 8512 vec<tree, va_gc> *vec;
e351ae85
AM
8513
8514 mode =
8515 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8516 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8517 if (target)
8518 return target;
8519
8520 /* If this is turned into an external library call, the weak parameter
8521 must be dropped to match the expected parameter list. */
8522 nargs = call_expr_nargs (exp);
9771b263 8523 vec_alloc (vec, nargs - 1);
e351ae85 8524 for (z = 0; z < 3; z++)
9771b263 8525 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
8526 /* Skip the boolean weak parameter. */
8527 for (z = 4; z < 6; z++)
9771b263 8528 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
8529 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8530 break;
8531 }
86951993
AM
8532
8533 case BUILT_IN_ATOMIC_LOAD_1:
8534 case BUILT_IN_ATOMIC_LOAD_2:
8535 case BUILT_IN_ATOMIC_LOAD_4:
8536 case BUILT_IN_ATOMIC_LOAD_8:
8537 case BUILT_IN_ATOMIC_LOAD_16:
8538 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8539 target = expand_builtin_atomic_load (mode, exp, target);
8540 if (target)
8541 return target;
8542 break;
8543
8544 case BUILT_IN_ATOMIC_STORE_1:
8545 case BUILT_IN_ATOMIC_STORE_2:
8546 case BUILT_IN_ATOMIC_STORE_4:
8547 case BUILT_IN_ATOMIC_STORE_8:
8548 case BUILT_IN_ATOMIC_STORE_16:
8549 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8550 target = expand_builtin_atomic_store (mode, exp);
8551 if (target)
8552 return const0_rtx;
8553 break;
8554
8555 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8556 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8557 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8558 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8559 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8560 {
8561 enum built_in_function lib;
8562 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8563 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8564 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8565 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8566 ignore, lib);
8567 if (target)
8568 return target;
8569 break;
8570 }
8571 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8572 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8573 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8574 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8575 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8576 {
8577 enum built_in_function lib;
8578 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8579 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8580 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8581 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8582 ignore, lib);
8583 if (target)
8584 return target;
8585 break;
8586 }
8587 case BUILT_IN_ATOMIC_AND_FETCH_1:
8588 case BUILT_IN_ATOMIC_AND_FETCH_2:
8589 case BUILT_IN_ATOMIC_AND_FETCH_4:
8590 case BUILT_IN_ATOMIC_AND_FETCH_8:
8591 case BUILT_IN_ATOMIC_AND_FETCH_16:
8592 {
8593 enum built_in_function lib;
8594 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8595 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8596 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8597 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8598 ignore, lib);
8599 if (target)
8600 return target;
8601 break;
8602 }
8603 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8604 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8605 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8606 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8607 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8608 {
8609 enum built_in_function lib;
8610 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8611 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8612 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8613 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8614 ignore, lib);
8615 if (target)
8616 return target;
8617 break;
8618 }
8619 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8620 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8621 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8622 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8623 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8624 {
8625 enum built_in_function lib;
8626 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8627 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8628 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8629 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8630 ignore, lib);
8631 if (target)
8632 return target;
8633 break;
8634 }
8635 case BUILT_IN_ATOMIC_OR_FETCH_1:
8636 case BUILT_IN_ATOMIC_OR_FETCH_2:
8637 case BUILT_IN_ATOMIC_OR_FETCH_4:
8638 case BUILT_IN_ATOMIC_OR_FETCH_8:
8639 case BUILT_IN_ATOMIC_OR_FETCH_16:
8640 {
8641 enum built_in_function lib;
8642 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8643 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8644 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8645 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8646 ignore, lib);
8647 if (target)
8648 return target;
8649 break;
8650 }
8651 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8652 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8653 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8654 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8655 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8656 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8657 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8658 ignore, BUILT_IN_NONE);
8659 if (target)
8660 return target;
8661 break;
8662
8663 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8664 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8665 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8666 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8667 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8668 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8669 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8670 ignore, BUILT_IN_NONE);
8671 if (target)
8672 return target;
8673 break;
8674
8675 case BUILT_IN_ATOMIC_FETCH_AND_1:
8676 case BUILT_IN_ATOMIC_FETCH_AND_2:
8677 case BUILT_IN_ATOMIC_FETCH_AND_4:
8678 case BUILT_IN_ATOMIC_FETCH_AND_8:
8679 case BUILT_IN_ATOMIC_FETCH_AND_16:
8680 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8681 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8682 ignore, BUILT_IN_NONE);
8683 if (target)
8684 return target;
8685 break;
8686
8687 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8688 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8689 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8690 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8691 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8692 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8693 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8694 ignore, BUILT_IN_NONE);
8695 if (target)
8696 return target;
8697 break;
8698
8699 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8700 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8701 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8702 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8703 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8704 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8705 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8706 ignore, BUILT_IN_NONE);
8707 if (target)
8708 return target;
8709 break;
8710
8711 case BUILT_IN_ATOMIC_FETCH_OR_1:
8712 case BUILT_IN_ATOMIC_FETCH_OR_2:
8713 case BUILT_IN_ATOMIC_FETCH_OR_4:
8714 case BUILT_IN_ATOMIC_FETCH_OR_8:
8715 case BUILT_IN_ATOMIC_FETCH_OR_16:
8716 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8717 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8718 ignore, BUILT_IN_NONE);
8719 if (target)
8720 return target;
8721 break;
d660c35e
AM
8722
8723 case BUILT_IN_ATOMIC_TEST_AND_SET:
744accb2 8724 return expand_builtin_atomic_test_and_set (exp, target);
d660c35e
AM
8725
8726 case BUILT_IN_ATOMIC_CLEAR:
8727 return expand_builtin_atomic_clear (exp);
86951993
AM
8728
8729 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8730 return expand_builtin_atomic_always_lock_free (exp);
8731
8732 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8733 target = expand_builtin_atomic_is_lock_free (exp);
8734 if (target)
8735 return target;
8736 break;
8737
8738 case BUILT_IN_ATOMIC_THREAD_FENCE:
8739 expand_builtin_atomic_thread_fence (exp);
8740 return const0_rtx;
8741
8742 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8743 expand_builtin_atomic_signal_fence (exp);
8744 return const0_rtx;
8745
10a0d495
JJ
8746 case BUILT_IN_OBJECT_SIZE:
8747 return expand_builtin_object_size (exp);
8748
8749 case BUILT_IN_MEMCPY_CHK:
8750 case BUILT_IN_MEMPCPY_CHK:
8751 case BUILT_IN_MEMMOVE_CHK:
8752 case BUILT_IN_MEMSET_CHK:
8753 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8754 if (target)
8755 return target;
8756 break;
8757
8758 case BUILT_IN_STRCPY_CHK:
8759 case BUILT_IN_STPCPY_CHK:
8760 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 8761 case BUILT_IN_STPNCPY_CHK:
10a0d495 8762 case BUILT_IN_STRCAT_CHK:
1c2fc017 8763 case BUILT_IN_STRNCAT_CHK:
10a0d495
JJ
8764 case BUILT_IN_SNPRINTF_CHK:
8765 case BUILT_IN_VSNPRINTF_CHK:
8766 maybe_emit_chk_warning (exp, fcode);
8767 break;
8768
8769 case BUILT_IN_SPRINTF_CHK:
8770 case BUILT_IN_VSPRINTF_CHK:
8771 maybe_emit_sprintf_chk_warning (exp, fcode);
8772 break;
8773
f9555f40 8774 case BUILT_IN_FREE:
a3a704a4
MH
8775 if (warn_free_nonheap_object)
8776 maybe_emit_free_warning (exp);
f9555f40
JJ
8777 break;
8778
f959607b
CLT
8779 case BUILT_IN_THREAD_POINTER:
8780 return expand_builtin_thread_pointer (exp, target);
8781
8782 case BUILT_IN_SET_THREAD_POINTER:
8783 expand_builtin_set_thread_pointer (exp);
8784 return const0_rtx;
8785
41dbbb37 8786 case BUILT_IN_ACC_ON_DEVICE:
164453bb
NS
8787 /* Do library call, if we failed to expand the builtin when
8788 folding. */
41dbbb37
TS
8789 break;
8790
1f62d637
TV
8791 case BUILT_IN_GOACC_PARLEVEL_ID:
8792 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8793 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8794
425fc685
RE
8795 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8796 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8797
8798 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8799 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8800 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8801 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8802 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8803 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8804 return expand_speculation_safe_value (mode, exp, target, ignore);
8805
e62f4abc 8806 default: /* just do library call, if unknown builtin */
84b8b0e0 8807 break;
28f4ec01
BS
8808 }
8809
8810 /* The switch statement above can drop through to cause the function
8811 to be called normally. */
8812 return expand_call (exp, target, ignore);
8813}
b0b3afb2 8814
4977bab6 8815/* Determine whether a tree node represents a call to a built-in
feda1845
RS
8816 function. If the tree T is a call to a built-in function with
8817 the right number of arguments of the appropriate types, return
8818 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8819 Otherwise the return value is END_BUILTINS. */
4682ae04 8820
4977bab6 8821enum built_in_function
fa233e34 8822builtin_mathfn_code (const_tree t)
4977bab6 8823{
fa233e34
KG
8824 const_tree fndecl, arg, parmlist;
8825 const_tree argtype, parmtype;
8826 const_call_expr_arg_iterator iter;
4977bab6 8827
5f92d109 8828 if (TREE_CODE (t) != CALL_EXPR)
4977bab6
ZW
8829 return END_BUILTINS;
8830
2f503025 8831 fndecl = get_callee_fndecl (t);
3d78e008
ML
8832 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8833 return END_BUILTINS;
4977bab6 8834
feda1845 8835 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
fa233e34 8836 init_const_call_expr_arg_iterator (t, &iter);
feda1845 8837 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
c0a47a61 8838 {
feda1845
RS
8839 /* If a function doesn't take a variable number of arguments,
8840 the last element in the list will have type `void'. */
8841 parmtype = TREE_VALUE (parmlist);
8842 if (VOID_TYPE_P (parmtype))
8843 {
fa233e34 8844 if (more_const_call_expr_args_p (&iter))
feda1845
RS
8845 return END_BUILTINS;
8846 return DECL_FUNCTION_CODE (fndecl);
8847 }
8848
fa233e34 8849 if (! more_const_call_expr_args_p (&iter))
c0a47a61 8850 return END_BUILTINS;
b8698a0f 8851
fa233e34 8852 arg = next_const_call_expr_arg (&iter);
5039610b 8853 argtype = TREE_TYPE (arg);
feda1845
RS
8854
8855 if (SCALAR_FLOAT_TYPE_P (parmtype))
8856 {
8857 if (! SCALAR_FLOAT_TYPE_P (argtype))
8858 return END_BUILTINS;
8859 }
8860 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8861 {
8862 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8863 return END_BUILTINS;
8864 }
8865 else if (POINTER_TYPE_P (parmtype))
8866 {
8867 if (! POINTER_TYPE_P (argtype))
8868 return END_BUILTINS;
8869 }
8870 else if (INTEGRAL_TYPE_P (parmtype))
8871 {
8872 if (! INTEGRAL_TYPE_P (argtype))
8873 return END_BUILTINS;
8874 }
8875 else
c0a47a61 8876 return END_BUILTINS;
c0a47a61
RS
8877 }
8878
feda1845 8879 /* Variable-length argument list. */
4977bab6
ZW
8880 return DECL_FUNCTION_CODE (fndecl);
8881}
8882
5039610b
SL
8883/* Fold a call to __builtin_constant_p, if we know its argument ARG will
8884 evaluate to a constant. */
b0b3afb2
BS
8885
8886static tree
5039610b 8887fold_builtin_constant_p (tree arg)
b0b3afb2 8888{
b0b3afb2
BS
8889 /* We return 1 for a numeric type that's known to be a constant
8890 value at compile-time or for an aggregate type that's a
8891 literal constant. */
5039610b 8892 STRIP_NOPS (arg);
b0b3afb2
BS
8893
8894 /* If we know this is a constant, emit the constant of one. */
5039610b
SL
8895 if (CONSTANT_CLASS_P (arg)
8896 || (TREE_CODE (arg) == CONSTRUCTOR
8897 && TREE_CONSTANT (arg)))
b0b3afb2 8898 return integer_one_node;
5039610b 8899 if (TREE_CODE (arg) == ADDR_EXPR)
fb664a2c 8900 {
5039610b 8901 tree op = TREE_OPERAND (arg, 0);
fb664a2c
RG
8902 if (TREE_CODE (op) == STRING_CST
8903 || (TREE_CODE (op) == ARRAY_REF
8904 && integer_zerop (TREE_OPERAND (op, 1))
8905 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8906 return integer_one_node;
8907 }
b0b3afb2 8908
0dcd3840
RH
8909 /* If this expression has side effects, show we don't know it to be a
8910 constant. Likewise if it's a pointer or aggregate type since in
8911 those case we only want literals, since those are only optimized
13104975
ZW
8912 when generating RTL, not later.
8913 And finally, if we are compiling an initializer, not code, we
8914 need to return a definite result now; there's not going to be any
8915 more optimization done. */
5039610b
SL
8916 if (TREE_SIDE_EFFECTS (arg)
8917 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8918 || POINTER_TYPE_P (TREE_TYPE (arg))
63b48197 8919 || cfun == 0
4e7d7b3d
JJ
8920 || folding_initializer
8921 || force_folding_builtin_constant_p)
b0b3afb2
BS
8922 return integer_zero_node;
8923
5039610b 8924 return NULL_TREE;
b0b3afb2
BS
8925}
8926
1e9168b2
ML
8927/* Create builtin_expect or builtin_expect_with_probability
8928 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8929 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8930 builtin_expect_with_probability instead uses third argument as PROBABILITY
8931 value. */
6de9cd9a
DN
8932
8933static tree
ed9c79e1 8934build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
1e9168b2 8935 tree predictor, tree probability)
6de9cd9a 8936{
419ce103 8937 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6de9cd9a 8938
1e9168b2
ML
8939 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8940 : BUILT_IN_EXPECT_WITH_PROBABILITY);
419ce103
AN
8941 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8942 ret_type = TREE_TYPE (TREE_TYPE (fn));
8943 pred_type = TREE_VALUE (arg_types);
8944 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8945
db3927fb
AH
8946 pred = fold_convert_loc (loc, pred_type, pred);
8947 expected = fold_convert_loc (loc, expected_type, expected);
1e9168b2
ML
8948
8949 if (probability)
8950 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8951 else
8952 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8953 predictor);
419ce103
AN
8954
8955 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8956 build_int_cst (ret_type, 0));
8957}
8958
1e9168b2 8959/* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
419ce103
AN
8960 NULL_TREE if no simplification is possible. */
8961
ed9c79e1 8962tree
1e9168b2
ML
8963fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8964 tree arg3)
419ce103 8965{
be31603a 8966 tree inner, fndecl, inner_arg0;
419ce103
AN
8967 enum tree_code code;
8968
be31603a
KT
8969 /* Distribute the expected value over short-circuiting operators.
8970 See through the cast from truthvalue_type_node to long. */
8971 inner_arg0 = arg0;
625a9766 8972 while (CONVERT_EXPR_P (inner_arg0)
be31603a
KT
8973 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8974 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8975 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8976
419ce103
AN
8977 /* If this is a builtin_expect within a builtin_expect keep the
8978 inner one. See through a comparison against a constant. It
8979 might have been added to create a thruthvalue. */
be31603a
KT
8980 inner = inner_arg0;
8981
419ce103
AN
8982 if (COMPARISON_CLASS_P (inner)
8983 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8984 inner = TREE_OPERAND (inner, 0);
8985
8986 if (TREE_CODE (inner) == CALL_EXPR
8987 && (fndecl = get_callee_fndecl (inner))
3d78e008
ML
8988 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8989 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
419ce103
AN
8990 return arg0;
8991
be31603a 8992 inner = inner_arg0;
419ce103
AN
8993 code = TREE_CODE (inner);
8994 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8995 {
8996 tree op0 = TREE_OPERAND (inner, 0);
8997 tree op1 = TREE_OPERAND (inner, 1);
0d2f7959 8998 arg1 = save_expr (arg1);
419ce103 8999
1e9168b2
ML
9000 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9001 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
419ce103
AN
9002 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9003
db3927fb 9004 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
419ce103
AN
9005 }
9006
9007 /* If the argument isn't invariant then there's nothing else we can do. */
be31603a 9008 if (!TREE_CONSTANT (inner_arg0))
5039610b 9009 return NULL_TREE;
6de9cd9a 9010
419ce103
AN
9011 /* If we expect that a comparison against the argument will fold to
9012 a constant return the constant. In practice, this means a true
9013 constant or the address of a non-weak symbol. */
be31603a 9014 inner = inner_arg0;
6de9cd9a
DN
9015 STRIP_NOPS (inner);
9016 if (TREE_CODE (inner) == ADDR_EXPR)
9017 {
9018 do
9019 {
9020 inner = TREE_OPERAND (inner, 0);
9021 }
9022 while (TREE_CODE (inner) == COMPONENT_REF
9023 || TREE_CODE (inner) == ARRAY_REF);
8813a647 9024 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
5039610b 9025 return NULL_TREE;
6de9cd9a
DN
9026 }
9027
419ce103
AN
9028 /* Otherwise, ARG0 already has the proper type for the return value. */
9029 return arg0;
6de9cd9a
DN
9030}
9031
5039610b 9032/* Fold a call to __builtin_classify_type with argument ARG. */
5197bd50 9033
ad82abb8 9034static tree
5039610b 9035fold_builtin_classify_type (tree arg)
ad82abb8 9036{
5039610b 9037 if (arg == 0)
45a2c477 9038 return build_int_cst (integer_type_node, no_type_class);
ad82abb8 9039
45a2c477 9040 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
ad82abb8
ZW
9041}
9042
5039610b 9043/* Fold a call to __builtin_strlen with argument ARG. */
667bbbbb
EC
9044
9045static tree
ab996409 9046fold_builtin_strlen (location_t loc, tree type, tree arg)
667bbbbb 9047{
5039610b 9048 if (!validate_arg (arg, POINTER_TYPE))
667bbbbb
EC
9049 return NULL_TREE;
9050 else
9051 {
e09aa5bd
MS
9052 c_strlen_data lendata = { };
9053 tree len = c_strlen (arg, 0, &lendata);
667bbbbb
EC
9054
9055 if (len)
ab996409 9056 return fold_convert_loc (loc, type, len);
667bbbbb 9057
e09aa5bd
MS
9058 if (!lendata.decl)
9059 c_strlen (arg, 1, &lendata);
6ab24ea8 9060
e09aa5bd 9061 if (lendata.decl)
6ab24ea8
MS
9062 {
9063 if (EXPR_HAS_LOCATION (arg))
9064 loc = EXPR_LOCATION (arg);
9065 else if (loc == UNKNOWN_LOCATION)
9066 loc = input_location;
e09aa5bd 9067 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
6ab24ea8
MS
9068 }
9069
667bbbbb
EC
9070 return NULL_TREE;
9071 }
9072}
9073
ab5e2615
RH
9074/* Fold a call to __builtin_inf or __builtin_huge_val. */
9075
9076static tree
db3927fb 9077fold_builtin_inf (location_t loc, tree type, int warn)
ab5e2615 9078{
efdc7e19
RH
9079 REAL_VALUE_TYPE real;
9080
6d84156b
JM
9081 /* __builtin_inff is intended to be usable to define INFINITY on all
9082 targets. If an infinity is not available, INFINITY expands "to a
9083 positive constant of type float that overflows at translation
9084 time", footnote "In this case, using INFINITY will violate the
9085 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9086 Thus we pedwarn to ensure this constraint violation is
9087 diagnosed. */
ab5e2615 9088 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
db3927fb 9089 pedwarn (loc, 0, "target format does not support infinity");
ab5e2615 9090
efdc7e19
RH
9091 real_inf (&real);
9092 return build_real (type, real);
ab5e2615
RH
9093}
9094
75c7c595
RG
9095/* Fold function call to builtin sincos, sincosf, or sincosl. Return
9096 NULL_TREE if no simplification can be made. */
9097
9098static tree
db3927fb
AH
9099fold_builtin_sincos (location_t loc,
9100 tree arg0, tree arg1, tree arg2)
75c7c595 9101{
5039610b 9102 tree type;
5c1a2e63 9103 tree fndecl, call = NULL_TREE;
75c7c595 9104
5039610b
SL
9105 if (!validate_arg (arg0, REAL_TYPE)
9106 || !validate_arg (arg1, POINTER_TYPE)
9107 || !validate_arg (arg2, POINTER_TYPE))
75c7c595
RG
9108 return NULL_TREE;
9109
75c7c595 9110 type = TREE_TYPE (arg0);
75c7c595
RG
9111
9112 /* Calculate the result when the argument is a constant. */
b03ff92e 9113 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
5c1a2e63 9114 if (fn == END_BUILTINS)
75c7c595
RG
9115 return NULL_TREE;
9116
5c1a2e63
RS
9117 /* Canonicalize sincos to cexpi. */
9118 if (TREE_CODE (arg0) == REAL_CST)
9119 {
9120 tree complex_type = build_complex_type (type);
d7ebef06 9121 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
5c1a2e63
RS
9122 }
9123 if (!call)
9124 {
9125 if (!targetm.libc_has_function (function_c99_math_complex)
9126 || !builtin_decl_implicit_p (fn))
9127 return NULL_TREE;
9128 fndecl = builtin_decl_explicit (fn);
9129 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9130 call = builtin_save_expr (call);
9131 }
75c7c595 9132
1b17b994
RB
9133 tree ptype = build_pointer_type (type);
9134 arg1 = fold_convert (ptype, arg1);
9135 arg2 = fold_convert (ptype, arg2);
928c19bb 9136 return build2 (COMPOUND_EXPR, void_type_node,
75c7c595 9137 build2 (MODIFY_EXPR, void_type_node,
db3927fb 9138 build_fold_indirect_ref_loc (loc, arg1),
5c1a2e63 9139 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
75c7c595 9140 build2 (MODIFY_EXPR, void_type_node,
db3927fb 9141 build_fold_indirect_ref_loc (loc, arg2),
5c1a2e63 9142 fold_build1_loc (loc, REALPART_EXPR, type, call)));
75c7c595
RG
9143}
9144
5039610b
SL
9145/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9146 Return NULL_TREE if no simplification can be made. */
5bb650ec
RS
9147
9148static tree
db3927fb 9149fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
5bb650ec 9150{
5039610b
SL
9151 if (!validate_arg (arg1, POINTER_TYPE)
9152 || !validate_arg (arg2, POINTER_TYPE)
9153 || !validate_arg (len, INTEGER_TYPE))
9154 return NULL_TREE;
5bb650ec
RS
9155
9156 /* If the LEN parameter is zero, return zero. */
9157 if (integer_zerop (len))
db3927fb 9158 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
01847e9d 9159 arg1, arg2);
5bb650ec
RS
9160
9161 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9162 if (operand_equal_p (arg1, arg2, 0))
db3927fb 9163 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
01847e9d 9164
01847e9d
RS
9165 /* If len parameter is one, return an expression corresponding to
9166 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
ae7e9ddd 9167 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
01847e9d
RS
9168 {
9169 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
9170 tree cst_uchar_ptr_node
9171 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9172
db3927fb
AH
9173 tree ind1
9174 = fold_convert_loc (loc, integer_type_node,
9175 build1 (INDIRECT_REF, cst_uchar_node,
9176 fold_convert_loc (loc,
9177 cst_uchar_ptr_node,
01847e9d 9178 arg1)));
db3927fb
AH
9179 tree ind2
9180 = fold_convert_loc (loc, integer_type_node,
9181 build1 (INDIRECT_REF, cst_uchar_node,
9182 fold_convert_loc (loc,
9183 cst_uchar_ptr_node,
01847e9d 9184 arg2)));
db3927fb 9185 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
01847e9d 9186 }
5bb650ec 9187
5039610b 9188 return NULL_TREE;
5bb650ec
RS
9189}
9190
5039610b 9191/* Fold a call to builtin isascii with argument ARG. */
df0785d6
KG
9192
9193static tree
db3927fb 9194fold_builtin_isascii (location_t loc, tree arg)
df0785d6 9195{
5039610b
SL
9196 if (!validate_arg (arg, INTEGER_TYPE))
9197 return NULL_TREE;
df0785d6
KG
9198 else
9199 {
9200 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
6728ee79 9201 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
45a2c477 9202 build_int_cst (integer_type_node,
6728ee79 9203 ~ (unsigned HOST_WIDE_INT) 0x7f));
db3927fb 9204 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
45a2c477 9205 arg, integer_zero_node);
df0785d6
KG
9206 }
9207}
9208
5039610b 9209/* Fold a call to builtin toascii with argument ARG. */
df0785d6
KG
9210
9211static tree
db3927fb 9212fold_builtin_toascii (location_t loc, tree arg)
df0785d6 9213{
5039610b
SL
9214 if (!validate_arg (arg, INTEGER_TYPE))
9215 return NULL_TREE;
b8698a0f 9216
5039610b 9217 /* Transform toascii(c) -> (c & 0x7f). */
db3927fb 9218 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
45a2c477 9219 build_int_cst (integer_type_node, 0x7f));
df0785d6
KG
9220}
9221
5039610b 9222/* Fold a call to builtin isdigit with argument ARG. */
61218d19
KG
9223
9224static tree
db3927fb 9225fold_builtin_isdigit (location_t loc, tree arg)
61218d19 9226{
5039610b
SL
9227 if (!validate_arg (arg, INTEGER_TYPE))
9228 return NULL_TREE;
61218d19
KG
9229 else
9230 {
9231 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
c5ff069d
ZW
9232 /* According to the C standard, isdigit is unaffected by locale.
9233 However, it definitely is affected by the target character set. */
c5ff069d
ZW
9234 unsigned HOST_WIDE_INT target_digit0
9235 = lang_hooks.to_target_charset ('0');
9236
9237 if (target_digit0 == 0)
9238 return NULL_TREE;
9239
db3927fb 9240 arg = fold_convert_loc (loc, unsigned_type_node, arg);
6728ee79
MM
9241 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9242 build_int_cst (unsigned_type_node, target_digit0));
db3927fb 9243 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
5cdc4a26 9244 build_int_cst (unsigned_type_node, 9));
61218d19
KG
9245 }
9246}
ef79730c 9247
5039610b 9248/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9655d83b
RS
9249
9250static tree
db3927fb 9251fold_builtin_fabs (location_t loc, tree arg, tree type)
9655d83b 9252{
5039610b
SL
9253 if (!validate_arg (arg, REAL_TYPE))
9254 return NULL_TREE;
9655d83b 9255
db3927fb 9256 arg = fold_convert_loc (loc, type, arg);
db3927fb 9257 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
9258}
9259
5039610b 9260/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9655d83b
RS
9261
9262static tree
db3927fb 9263fold_builtin_abs (location_t loc, tree arg, tree type)
9655d83b 9264{
5039610b
SL
9265 if (!validate_arg (arg, INTEGER_TYPE))
9266 return NULL_TREE;
9655d83b 9267
db3927fb 9268 arg = fold_convert_loc (loc, type, arg);
db3927fb 9269 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
9270}
9271
527cab20
KG
9272/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9273
9274static tree
db3927fb 9275fold_builtin_carg (location_t loc, tree arg, tree type)
527cab20 9276{
c128599a
KG
9277 if (validate_arg (arg, COMPLEX_TYPE)
9278 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
527cab20
KG
9279 {
9280 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
b8698a0f 9281
527cab20
KG
9282 if (atan2_fn)
9283 {
5039610b 9284 tree new_arg = builtin_save_expr (arg);
db3927fb
AH
9285 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9286 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9287 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
527cab20
KG
9288 }
9289 }
b8698a0f 9290
527cab20
KG
9291 return NULL_TREE;
9292}
9293
7a2a25ab
KG
9294/* Fold a call to builtin frexp, we can assume the base is 2. */
9295
9296static tree
db3927fb 9297fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7a2a25ab
KG
9298{
9299 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9300 return NULL_TREE;
b8698a0f 9301
7a2a25ab 9302 STRIP_NOPS (arg0);
b8698a0f 9303
7a2a25ab
KG
9304 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9305 return NULL_TREE;
b8698a0f 9306
db3927fb 9307 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7a2a25ab
KG
9308
9309 /* Proceed if a valid pointer type was passed in. */
9310 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9311 {
9312 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9313 tree frac, exp;
b8698a0f 9314
7a2a25ab
KG
9315 switch (value->cl)
9316 {
9317 case rvc_zero:
9318 /* For +-0, return (*exp = 0, +-0). */
9319 exp = integer_zero_node;
9320 frac = arg0;
9321 break;
9322 case rvc_nan:
9323 case rvc_inf:
9324 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
db3927fb 9325 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7a2a25ab
KG
9326 case rvc_normal:
9327 {
9328 /* Since the frexp function always expects base 2, and in
9329 GCC normalized significands are already in the range
9330 [0.5, 1.0), we have exactly what frexp wants. */
9331 REAL_VALUE_TYPE frac_rvt = *value;
9332 SET_REAL_EXP (&frac_rvt, 0);
9333 frac = build_real (rettype, frac_rvt);
45a2c477 9334 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7a2a25ab
KG
9335 }
9336 break;
9337 default:
9338 gcc_unreachable ();
9339 }
b8698a0f 9340
7a2a25ab 9341 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 9342 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7a2a25ab 9343 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 9344 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7a2a25ab
KG
9345 }
9346
9347 return NULL_TREE;
9348}
9349
3d577eaf
KG
9350/* Fold a call to builtin modf. */
9351
9352static tree
db3927fb 9353fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
3d577eaf
KG
9354{
9355 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9356 return NULL_TREE;
b8698a0f 9357
3d577eaf 9358 STRIP_NOPS (arg0);
b8698a0f 9359
3d577eaf
KG
9360 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9361 return NULL_TREE;
b8698a0f 9362
db3927fb 9363 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3d577eaf
KG
9364
9365 /* Proceed if a valid pointer type was passed in. */
9366 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9367 {
9368 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9369 REAL_VALUE_TYPE trunc, frac;
9370
9371 switch (value->cl)
9372 {
9373 case rvc_nan:
9374 case rvc_zero:
9375 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9376 trunc = frac = *value;
9377 break;
9378 case rvc_inf:
9379 /* For +-Inf, return (*arg1 = arg0, +-0). */
9380 frac = dconst0;
9381 frac.sign = value->sign;
9382 trunc = *value;
9383 break;
9384 case rvc_normal:
9385 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9386 real_trunc (&trunc, VOIDmode, value);
9387 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9388 /* If the original number was negative and already
9389 integral, then the fractional part is -0.0. */
9390 if (value->sign && frac.cl == rvc_zero)
9391 frac.sign = value->sign;
9392 break;
9393 }
b8698a0f 9394
3d577eaf 9395 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 9396 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
3d577eaf
KG
9397 build_real (rettype, trunc));
9398 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 9399 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
3d577eaf
KG
9400 build_real (rettype, frac));
9401 }
b8698a0f 9402
3d577eaf
KG
9403 return NULL_TREE;
9404}
9405
903c723b
TC
9406/* Given a location LOC, an interclass builtin function decl FNDECL
9407 and its single argument ARG, return an folded expression computing
9408 the same, or NULL_TREE if we either couldn't or didn't want to fold
9409 (the latter happen if there's an RTL instruction available). */
9410
9411static tree
9412fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9413{
9414 machine_mode mode;
9415
9416 if (!validate_arg (arg, REAL_TYPE))
9417 return NULL_TREE;
9418
9419 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9420 return NULL_TREE;
9421
9422 mode = TYPE_MODE (TREE_TYPE (arg));
9423
9424 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
02cf2861 9425
903c723b
TC
9426 /* If there is no optab, try generic code. */
9427 switch (DECL_FUNCTION_CODE (fndecl))
9428 {
9429 tree result;
44e10129 9430
903c723b
TC
9431 CASE_FLT_FN (BUILT_IN_ISINF):
9432 {
9433 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9434 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9435 tree type = TREE_TYPE (arg);
9436 REAL_VALUE_TYPE r;
9437 char buf[128];
9438
9439 if (is_ibm_extended)
9440 {
9441 /* NaN and Inf are encoded in the high-order double value
9442 only. The low-order value is not significant. */
9443 type = double_type_node;
9444 mode = DFmode;
9445 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9446 }
00be2a5f 9447 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
9448 real_from_string (&r, buf);
9449 result = build_call_expr (isgr_fn, 2,
9450 fold_build1_loc (loc, ABS_EXPR, type, arg),
9451 build_real (type, r));
9452 return result;
9453 }
9454 CASE_FLT_FN (BUILT_IN_FINITE):
9455 case BUILT_IN_ISFINITE:
9456 {
9457 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9458 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9459 tree type = TREE_TYPE (arg);
9460 REAL_VALUE_TYPE r;
9461 char buf[128];
9462
9463 if (is_ibm_extended)
9464 {
9465 /* NaN and Inf are encoded in the high-order double value
9466 only. The low-order value is not significant. */
9467 type = double_type_node;
9468 mode = DFmode;
9469 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9470 }
00be2a5f 9471 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
9472 real_from_string (&r, buf);
9473 result = build_call_expr (isle_fn, 2,
9474 fold_build1_loc (loc, ABS_EXPR, type, arg),
9475 build_real (type, r));
9476 /*result = fold_build2_loc (loc, UNGT_EXPR,
9477 TREE_TYPE (TREE_TYPE (fndecl)),
9478 fold_build1_loc (loc, ABS_EXPR, type, arg),
9479 build_real (type, r));
9480 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9481 TREE_TYPE (TREE_TYPE (fndecl)),
9482 result);*/
9483 return result;
9484 }
9485 case BUILT_IN_ISNORMAL:
9486 {
9487 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9488 islessequal(fabs(x),DBL_MAX). */
9489 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9490 tree type = TREE_TYPE (arg);
9491 tree orig_arg, max_exp, min_exp;
9492 machine_mode orig_mode = mode;
9493 REAL_VALUE_TYPE rmax, rmin;
9494 char buf[128];
9495
9496 orig_arg = arg = builtin_save_expr (arg);
9497 if (is_ibm_extended)
9498 {
9499 /* Use double to test the normal range of IBM extended
9500 precision. Emin for IBM extended precision is
9501 different to emin for IEEE double, being 53 higher
9502 since the low double exponent is at least 53 lower
9503 than the high double exponent. */
9504 type = double_type_node;
9505 mode = DFmode;
9506 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9507 }
9508 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9509
00be2a5f 9510 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
9511 real_from_string (&rmax, buf);
9512 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9513 real_from_string (&rmin, buf);
9514 max_exp = build_real (type, rmax);
9515 min_exp = build_real (type, rmin);
9516
9517 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9518 if (is_ibm_extended)
9519 {
9520 /* Testing the high end of the range is done just using
9521 the high double, using the same test as isfinite().
9522 For the subnormal end of the range we first test the
9523 high double, then if its magnitude is equal to the
9524 limit of 0x1p-969, we test whether the low double is
9525 non-zero and opposite sign to the high double. */
9526 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9527 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9528 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9529 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9530 arg, min_exp);
9531 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9532 complex_double_type_node, orig_arg);
9533 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9534 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9535 tree zero = build_real (type, dconst0);
9536 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9537 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9538 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9539 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9540 fold_build3 (COND_EXPR,
9541 integer_type_node,
9542 hilt, logt, lolt));
9543 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9544 eq_min, ok_lo);
9545 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9546 gt_min, eq_min);
9547 }
9548 else
9549 {
9550 tree const isge_fn
9551 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9552 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9553 }
9554 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9555 max_exp, min_exp);
9556 return result;
9557 }
9558 default:
9559 break;
9560 }
9561
9562 return NULL_TREE;
9563}
9564
9565/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
5039610b 9566 ARG is the argument for the call. */
64a9295a
PB
9567
9568static tree
903c723b 9569fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
64a9295a 9570{
903c723b
TC
9571 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9572
5039610b 9573 if (!validate_arg (arg, REAL_TYPE))
83322951 9574 return NULL_TREE;
64a9295a 9575
64a9295a
PB
9576 switch (builtin_index)
9577 {
903c723b
TC
9578 case BUILT_IN_ISINF:
9579 if (!HONOR_INFINITIES (arg))
9580 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9581
9582 return NULL_TREE;
9583
05f41289
KG
9584 case BUILT_IN_ISINF_SIGN:
9585 {
9586 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9587 /* In a boolean context, GCC will fold the inner COND_EXPR to
9588 1. So e.g. "if (isinf_sign(x))" would be folded to just
9589 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
72f52f30 9590 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
e79983f4 9591 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
05f41289
KG
9592 tree tmp = NULL_TREE;
9593
9594 arg = builtin_save_expr (arg);
9595
9596 if (signbit_fn && isinf_fn)
9597 {
db3927fb
AH
9598 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9599 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
05f41289 9600
db3927fb 9601 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 9602 signbit_call, integer_zero_node);
db3927fb 9603 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 9604 isinf_call, integer_zero_node);
b8698a0f 9605
db3927fb 9606 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
05f41289 9607 integer_minus_one_node, integer_one_node);
db3927fb
AH
9608 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9609 isinf_call, tmp,
05f41289
KG
9610 integer_zero_node);
9611 }
9612
9613 return tmp;
9614 }
9615
903c723b
TC
9616 case BUILT_IN_ISFINITE:
9617 if (!HONOR_NANS (arg)
9618 && !HONOR_INFINITIES (arg))
9619 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9620
9621 return NULL_TREE;
9622
9623 case BUILT_IN_ISNAN:
9624 if (!HONOR_NANS (arg))
9625 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9626
9627 {
9628 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9629 if (is_ibm_extended)
9630 {
9631 /* NaN and Inf are encoded in the high-order double value
9632 only. The low-order value is not significant. */
9633 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9634 }
9635 }
9636 arg = builtin_save_expr (arg);
9637 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9638
64a9295a 9639 default:
298e6adc 9640 gcc_unreachable ();
64a9295a
PB
9641 }
9642}
9643
903c723b
TC
9644/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9645 This builtin will generate code to return the appropriate floating
9646 point classification depending on the value of the floating point
9647 number passed in. The possible return values must be supplied as
9648 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9649 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9650 one floating point argument which is "type generic". */
9651
9652static tree
9653fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9654{
9655 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9656 arg, type, res, tmp;
9657 machine_mode mode;
9658 REAL_VALUE_TYPE r;
9659 char buf[128];
9660
9661 /* Verify the required arguments in the original call. */
9662 if (nargs != 6
9663 || !validate_arg (args[0], INTEGER_TYPE)
9664 || !validate_arg (args[1], INTEGER_TYPE)
9665 || !validate_arg (args[2], INTEGER_TYPE)
9666 || !validate_arg (args[3], INTEGER_TYPE)
9667 || !validate_arg (args[4], INTEGER_TYPE)
9668 || !validate_arg (args[5], REAL_TYPE))
9669 return NULL_TREE;
9670
9671 fp_nan = args[0];
9672 fp_infinite = args[1];
9673 fp_normal = args[2];
9674 fp_subnormal = args[3];
9675 fp_zero = args[4];
9676 arg = args[5];
9677 type = TREE_TYPE (arg);
9678 mode = TYPE_MODE (type);
9679 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9680
9681 /* fpclassify(x) ->
9682 isnan(x) ? FP_NAN :
9683 (fabs(x) == Inf ? FP_INFINITE :
9684 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9685 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9686
9687 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9688 build_real (type, dconst0));
9689 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9690 tmp, fp_zero, fp_subnormal);
9691
9692 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9693 real_from_string (&r, buf);
9694 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9695 arg, build_real (type, r));
9696 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9697
9698 if (HONOR_INFINITIES (mode))
9699 {
9700 real_inf (&r);
9701 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9702 build_real (type, r));
9703 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9704 fp_infinite, res);
9705 }
9706
9707 if (HONOR_NANS (mode))
9708 {
9709 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9710 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9711 }
9712
9713 return res;
9714}
9715
08039bd8 9716/* Fold a call to an unordered comparison function such as
a35da91f 9717 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
5039610b 9718 being called and ARG0 and ARG1 are the arguments for the call.
64a9295a
PB
9719 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9720 the opposite of the desired result. UNORDERED_CODE is used
9721 for modes that can hold NaNs and ORDERED_CODE is used for
9722 the rest. */
08039bd8
RS
9723
9724static tree
db3927fb 9725fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
08039bd8
RS
9726 enum tree_code unordered_code,
9727 enum tree_code ordered_code)
9728{
14f661f1 9729 tree type = TREE_TYPE (TREE_TYPE (fndecl));
08039bd8 9730 enum tree_code code;
1aeaea8d
GK
9731 tree type0, type1;
9732 enum tree_code code0, code1;
9733 tree cmp_type = NULL_TREE;
08039bd8 9734
1aeaea8d
GK
9735 type0 = TREE_TYPE (arg0);
9736 type1 = TREE_TYPE (arg1);
c22cacf3 9737
1aeaea8d
GK
9738 code0 = TREE_CODE (type0);
9739 code1 = TREE_CODE (type1);
c22cacf3 9740
1aeaea8d
GK
9741 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9742 /* Choose the wider of two real types. */
9743 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9744 ? type0 : type1;
9745 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9746 cmp_type = type0;
9747 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9748 cmp_type = type1;
c22cacf3 9749
db3927fb
AH
9750 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9751 arg1 = fold_convert_loc (loc, cmp_type, arg1);
14f661f1
RS
9752
9753 if (unordered_code == UNORDERED_EXPR)
9754 {
1b457aa4 9755 if (!HONOR_NANS (arg0))
db3927fb
AH
9756 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9757 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
14f661f1 9758 }
08039bd8 9759
1b457aa4 9760 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
db3927fb
AH
9761 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9762 fold_build2_loc (loc, code, type, arg0, arg1));
08039bd8
RS
9763}
9764
1304953e
JJ
9765/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9766 arithmetics if it can never overflow, or into internal functions that
9767 return both result of arithmetics and overflowed boolean flag in
44a845ca
MS
9768 a complex integer result, or some other check for overflow.
9769 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9770 checking part of that. */
1304953e
JJ
9771
9772static tree
9773fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9774 tree arg0, tree arg1, tree arg2)
9775{
9776 enum internal_fn ifn = IFN_LAST;
43574e4f 9777 /* The code of the expression corresponding to the built-in. */
44a845ca
MS
9778 enum tree_code opcode = ERROR_MARK;
9779 bool ovf_only = false;
9780
1304953e
JJ
9781 switch (fcode)
9782 {
44a845ca
MS
9783 case BUILT_IN_ADD_OVERFLOW_P:
9784 ovf_only = true;
9785 /* FALLTHRU */
1304953e
JJ
9786 case BUILT_IN_ADD_OVERFLOW:
9787 case BUILT_IN_SADD_OVERFLOW:
9788 case BUILT_IN_SADDL_OVERFLOW:
9789 case BUILT_IN_SADDLL_OVERFLOW:
9790 case BUILT_IN_UADD_OVERFLOW:
9791 case BUILT_IN_UADDL_OVERFLOW:
9792 case BUILT_IN_UADDLL_OVERFLOW:
43574e4f 9793 opcode = PLUS_EXPR;
1304953e
JJ
9794 ifn = IFN_ADD_OVERFLOW;
9795 break;
44a845ca
MS
9796 case BUILT_IN_SUB_OVERFLOW_P:
9797 ovf_only = true;
9798 /* FALLTHRU */
1304953e
JJ
9799 case BUILT_IN_SUB_OVERFLOW:
9800 case BUILT_IN_SSUB_OVERFLOW:
9801 case BUILT_IN_SSUBL_OVERFLOW:
9802 case BUILT_IN_SSUBLL_OVERFLOW:
9803 case BUILT_IN_USUB_OVERFLOW:
9804 case BUILT_IN_USUBL_OVERFLOW:
9805 case BUILT_IN_USUBLL_OVERFLOW:
43574e4f 9806 opcode = MINUS_EXPR;
1304953e
JJ
9807 ifn = IFN_SUB_OVERFLOW;
9808 break;
44a845ca
MS
9809 case BUILT_IN_MUL_OVERFLOW_P:
9810 ovf_only = true;
9811 /* FALLTHRU */
1304953e
JJ
9812 case BUILT_IN_MUL_OVERFLOW:
9813 case BUILT_IN_SMUL_OVERFLOW:
9814 case BUILT_IN_SMULL_OVERFLOW:
9815 case BUILT_IN_SMULLL_OVERFLOW:
9816 case BUILT_IN_UMUL_OVERFLOW:
9817 case BUILT_IN_UMULL_OVERFLOW:
9818 case BUILT_IN_UMULLL_OVERFLOW:
43574e4f 9819 opcode = MULT_EXPR;
1304953e
JJ
9820 ifn = IFN_MUL_OVERFLOW;
9821 break;
9822 default:
9823 gcc_unreachable ();
9824 }
44a845ca
MS
9825
9826 /* For the "generic" overloads, the first two arguments can have different
9827 types and the last argument determines the target type to use to check
9828 for overflow. The arguments of the other overloads all have the same
9829 type. */
9830 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9831
9832 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9833 arguments are constant, attempt to fold the built-in call into a constant
9834 expression indicating whether or not it detected an overflow. */
9835 if (ovf_only
9836 && TREE_CODE (arg0) == INTEGER_CST
9837 && TREE_CODE (arg1) == INTEGER_CST)
9838 /* Perform the computation in the target type and check for overflow. */
9839 return omit_one_operand_loc (loc, boolean_type_node,
9840 arith_overflowed_p (opcode, type, arg0, arg1)
9841 ? boolean_true_node : boolean_false_node,
9842 arg2);
9843
43574e4f
JJ
9844 tree intres, ovfres;
9845 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9846 {
9847 intres = fold_binary_loc (loc, opcode, type,
9848 fold_convert_loc (loc, type, arg0),
9849 fold_convert_loc (loc, type, arg1));
9850 if (TREE_OVERFLOW (intres))
9851 intres = drop_tree_overflow (intres);
9852 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9853 ? boolean_true_node : boolean_false_node);
9854 }
9855 else
9856 {
9857 tree ctype = build_complex_type (type);
9858 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9859 arg0, arg1);
9860 tree tgt = save_expr (call);
9861 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9862 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9863 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9864 }
44a845ca
MS
9865
9866 if (ovf_only)
9867 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9868
9869 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
1304953e
JJ
9870 tree store
9871 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9872 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9873}
9874
b25aad5f
MS
9875/* Fold a call to __builtin_FILE to a constant string. */
9876
9877static inline tree
9878fold_builtin_FILE (location_t loc)
9879{
9880 if (const char *fname = LOCATION_FILE (loc))
7365279f
BK
9881 {
9882 /* The documentation says this builtin is equivalent to the preprocessor
9883 __FILE__ macro so it appears appropriate to use the same file prefix
9884 mappings. */
9885 fname = remap_macro_filename (fname);
b25aad5f 9886 return build_string_literal (strlen (fname) + 1, fname);
7365279f 9887 }
b25aad5f
MS
9888
9889 return build_string_literal (1, "");
9890}
9891
9892/* Fold a call to __builtin_FUNCTION to a constant string. */
9893
9894static inline tree
9895fold_builtin_FUNCTION ()
9896{
f76b4224
NS
9897 const char *name = "";
9898
b25aad5f 9899 if (current_function_decl)
f76b4224 9900 name = lang_hooks.decl_printable_name (current_function_decl, 0);
b25aad5f 9901
f76b4224 9902 return build_string_literal (strlen (name) + 1, name);
b25aad5f
MS
9903}
9904
9905/* Fold a call to __builtin_LINE to an integer constant. */
9906
9907static inline tree
9908fold_builtin_LINE (location_t loc, tree type)
9909{
9910 return build_int_cst (type, LOCATION_LINE (loc));
9911}
9912
5039610b 9913/* Fold a call to built-in function FNDECL with 0 arguments.
2625bb5d 9914 This function returns NULL_TREE if no simplification was possible. */
b0b3afb2 9915
6de9cd9a 9916static tree
2625bb5d 9917fold_builtin_0 (location_t loc, tree fndecl)
b0b3afb2 9918{
c0a47a61 9919 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5039610b 9920 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
a0d2281e 9921 switch (fcode)
b0b3afb2 9922 {
b25aad5f
MS
9923 case BUILT_IN_FILE:
9924 return fold_builtin_FILE (loc);
9925
9926 case BUILT_IN_FUNCTION:
9927 return fold_builtin_FUNCTION ();
9928
9929 case BUILT_IN_LINE:
9930 return fold_builtin_LINE (loc, type);
9931
5039610b 9932 CASE_FLT_FN (BUILT_IN_INF):
6dc198e3 9933 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
5039610b
SL
9934 case BUILT_IN_INFD32:
9935 case BUILT_IN_INFD64:
9936 case BUILT_IN_INFD128:
db3927fb 9937 return fold_builtin_inf (loc, type, true);
d3147f64 9938
5039610b 9939 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
6dc198e3 9940 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
db3927fb 9941 return fold_builtin_inf (loc, type, false);
d3147f64 9942
5039610b
SL
9943 case BUILT_IN_CLASSIFY_TYPE:
9944 return fold_builtin_classify_type (NULL_TREE);
d3147f64 9945
5039610b
SL
9946 default:
9947 break;
9948 }
9949 return NULL_TREE;
9950}
d3147f64 9951
5039610b 9952/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
2625bb5d 9953 This function returns NULL_TREE if no simplification was possible. */
d3147f64 9954
5039610b 9955static tree
2625bb5d 9956fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
5039610b
SL
9957{
9958 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9959 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
9960
9961 if (TREE_CODE (arg0) == ERROR_MARK)
9962 return NULL_TREE;
9963
d7ebef06 9964 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
5c1a2e63
RS
9965 return ret;
9966
5039610b
SL
9967 switch (fcode)
9968 {
b0b3afb2 9969 case BUILT_IN_CONSTANT_P:
d3147f64 9970 {
5039610b 9971 tree val = fold_builtin_constant_p (arg0);
d3147f64 9972
d3147f64
EC
9973 /* Gimplification will pull the CALL_EXPR for the builtin out of
9974 an if condition. When not optimizing, we'll not CSE it back.
9975 To avoid link error types of regressions, return false now. */
9976 if (!val && !optimize)
9977 val = integer_zero_node;
9978
9979 return val;
9980 }
b0b3afb2 9981
ad82abb8 9982 case BUILT_IN_CLASSIFY_TYPE:
5039610b 9983 return fold_builtin_classify_type (arg0);
ad82abb8 9984
b0b3afb2 9985 case BUILT_IN_STRLEN:
ab996409 9986 return fold_builtin_strlen (loc, type, arg0);
b0b3afb2 9987
ea6a6627 9988 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 9989 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
9990 case BUILT_IN_FABSD32:
9991 case BUILT_IN_FABSD64:
9992 case BUILT_IN_FABSD128:
db3927fb 9993 return fold_builtin_fabs (loc, arg0, type);
9655d83b
RS
9994
9995 case BUILT_IN_ABS:
9996 case BUILT_IN_LABS:
9997 case BUILT_IN_LLABS:
9998 case BUILT_IN_IMAXABS:
db3927fb 9999 return fold_builtin_abs (loc, arg0, type);
07bae5ad 10000
ea6a6627 10001 CASE_FLT_FN (BUILT_IN_CONJ):
c128599a 10002 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 10003 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 10004 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
5039610b 10005 break;
aa6c7c3a 10006
ea6a6627 10007 CASE_FLT_FN (BUILT_IN_CREAL):
c128599a 10008 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 10009 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
6f3d1a5e 10010 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
5039610b 10011 break;
aa6c7c3a 10012
ea6a6627 10013 CASE_FLT_FN (BUILT_IN_CIMAG):
376da68e
KG
10014 if (validate_arg (arg0, COMPLEX_TYPE)
10015 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 10016 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
5039610b 10017 break;
aa6c7c3a 10018
5c1a2e63
RS
10019 CASE_FLT_FN (BUILT_IN_CARG):
10020 return fold_builtin_carg (loc, arg0, type);
43272bf5 10021
5c1a2e63
RS
10022 case BUILT_IN_ISASCII:
10023 return fold_builtin_isascii (loc, arg0);
b8698a0f 10024
5c1a2e63
RS
10025 case BUILT_IN_TOASCII:
10026 return fold_builtin_toascii (loc, arg0);
b8698a0f 10027
5c1a2e63
RS
10028 case BUILT_IN_ISDIGIT:
10029 return fold_builtin_isdigit (loc, arg0);
b8698a0f 10030
903c723b
TC
10031 CASE_FLT_FN (BUILT_IN_FINITE):
10032 case BUILT_IN_FINITED32:
10033 case BUILT_IN_FINITED64:
10034 case BUILT_IN_FINITED128:
10035 case BUILT_IN_ISFINITE:
10036 {
10037 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10038 if (ret)
10039 return ret;
10040 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10041 }
10042
10043 CASE_FLT_FN (BUILT_IN_ISINF):
10044 case BUILT_IN_ISINFD32:
10045 case BUILT_IN_ISINFD64:
10046 case BUILT_IN_ISINFD128:
10047 {
10048 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10049 if (ret)
10050 return ret;
10051 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10052 }
10053
10054 case BUILT_IN_ISNORMAL:
10055 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10056
5c1a2e63 10057 case BUILT_IN_ISINF_SIGN:
903c723b
TC
10058 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10059
10060 CASE_FLT_FN (BUILT_IN_ISNAN):
10061 case BUILT_IN_ISNAND32:
10062 case BUILT_IN_ISNAND64:
10063 case BUILT_IN_ISNAND128:
10064 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
b8698a0f 10065
5c1a2e63
RS
10066 case BUILT_IN_FREE:
10067 if (integer_zerop (arg0))
10068 return build_empty_stmt (loc);
abcc43f5 10069 break;
07bae5ad 10070
5c1a2e63 10071 default:
4835c978 10072 break;
5c1a2e63 10073 }
4977bab6 10074
5c1a2e63 10075 return NULL_TREE;
e19f6bde 10076
5c1a2e63 10077}
b53fed56 10078
b5338fb3
MS
10079/* Folds a call EXPR (which may be null) to built-in function FNDECL
10080 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10081 if no simplification was possible. */
5039610b
SL
10082
10083static tree
b5338fb3 10084fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
5039610b
SL
10085{
10086 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10087 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10088
5c1a2e63
RS
10089 if (TREE_CODE (arg0) == ERROR_MARK
10090 || TREE_CODE (arg1) == ERROR_MARK)
10091 return NULL_TREE;
ea91f957 10092
d7ebef06 10093 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
5c1a2e63 10094 return ret;
752b7d38 10095
5c1a2e63
RS
10096 switch (fcode)
10097 {
752b7d38
KG
10098 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10099 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10100 if (validate_arg (arg0, REAL_TYPE)
c3284718 10101 && validate_arg (arg1, POINTER_TYPE))
752b7d38
KG
10102 return do_mpfr_lgamma_r (arg0, arg1, type);
10103 break;
5039610b 10104
7a2a25ab 10105 CASE_FLT_FN (BUILT_IN_FREXP):
db3927fb 10106 return fold_builtin_frexp (loc, arg0, arg1, type);
7a2a25ab 10107
3d577eaf 10108 CASE_FLT_FN (BUILT_IN_MODF):
db3927fb 10109 return fold_builtin_modf (loc, arg0, arg1, type);
3d577eaf 10110
5039610b 10111 case BUILT_IN_STRSPN:
b5338fb3 10112 return fold_builtin_strspn (loc, expr, arg0, arg1);
5039610b
SL
10113
10114 case BUILT_IN_STRCSPN:
b5338fb3 10115 return fold_builtin_strcspn (loc, expr, arg0, arg1);
5039610b 10116
5039610b 10117 case BUILT_IN_STRPBRK:
b5338fb3 10118 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
5039610b
SL
10119
10120 case BUILT_IN_EXPECT:
1e9168b2 10121 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
5039610b 10122
08039bd8 10123 case BUILT_IN_ISGREATER:
db3927fb
AH
10124 return fold_builtin_unordered_cmp (loc, fndecl,
10125 arg0, arg1, UNLE_EXPR, LE_EXPR);
08039bd8 10126 case BUILT_IN_ISGREATEREQUAL:
db3927fb
AH
10127 return fold_builtin_unordered_cmp (loc, fndecl,
10128 arg0, arg1, UNLT_EXPR, LT_EXPR);
08039bd8 10129 case BUILT_IN_ISLESS:
db3927fb
AH
10130 return fold_builtin_unordered_cmp (loc, fndecl,
10131 arg0, arg1, UNGE_EXPR, GE_EXPR);
08039bd8 10132 case BUILT_IN_ISLESSEQUAL:
db3927fb
AH
10133 return fold_builtin_unordered_cmp (loc, fndecl,
10134 arg0, arg1, UNGT_EXPR, GT_EXPR);
08039bd8 10135 case BUILT_IN_ISLESSGREATER:
db3927fb
AH
10136 return fold_builtin_unordered_cmp (loc, fndecl,
10137 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
08039bd8 10138 case BUILT_IN_ISUNORDERED:
db3927fb
AH
10139 return fold_builtin_unordered_cmp (loc, fndecl,
10140 arg0, arg1, UNORDERED_EXPR,
a35da91f 10141 NOP_EXPR);
08039bd8 10142
d3147f64
EC
10143 /* We do the folding for va_start in the expander. */
10144 case BUILT_IN_VA_START:
10145 break;
a32e70c3 10146
10a0d495 10147 case BUILT_IN_OBJECT_SIZE:
5039610b 10148 return fold_builtin_object_size (arg0, arg1);
10a0d495 10149
86951993
AM
10150 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10151 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10152
10153 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10154 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10155
5039610b
SL
10156 default:
10157 break;
10158 }
10159 return NULL_TREE;
10160}
10161
10162/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
2625bb5d 10163 and ARG2.
5039610b
SL
10164 This function returns NULL_TREE if no simplification was possible. */
10165
10166static tree
db3927fb 10167fold_builtin_3 (location_t loc, tree fndecl,
2625bb5d 10168 tree arg0, tree arg1, tree arg2)
5039610b
SL
10169{
10170 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10171 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
10172
10173 if (TREE_CODE (arg0) == ERROR_MARK
10174 || TREE_CODE (arg1) == ERROR_MARK
10175 || TREE_CODE (arg2) == ERROR_MARK)
10176 return NULL_TREE;
10177
d7ebef06
RS
10178 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10179 arg0, arg1, arg2))
5c1a2e63
RS
10180 return ret;
10181
5039610b
SL
10182 switch (fcode)
10183 {
10184
10185 CASE_FLT_FN (BUILT_IN_SINCOS):
db3927fb 10186 return fold_builtin_sincos (loc, arg0, arg1, arg2);
5039610b 10187
ea91f957
KG
10188 CASE_FLT_FN (BUILT_IN_REMQUO):
10189 if (validate_arg (arg0, REAL_TYPE)
c3284718
RS
10190 && validate_arg (arg1, REAL_TYPE)
10191 && validate_arg (arg2, POINTER_TYPE))
ea91f957
KG
10192 return do_mpfr_remquo (arg0, arg1, arg2);
10193 break;
ea91f957 10194
5039610b 10195 case BUILT_IN_MEMCMP:
5de73c05 10196 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
5039610b 10197
ed9c79e1 10198 case BUILT_IN_EXPECT:
1e9168b2
ML
10199 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10200
10201 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10202 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
ed9c79e1 10203
1304953e
JJ
10204 case BUILT_IN_ADD_OVERFLOW:
10205 case BUILT_IN_SUB_OVERFLOW:
10206 case BUILT_IN_MUL_OVERFLOW:
44a845ca
MS
10207 case BUILT_IN_ADD_OVERFLOW_P:
10208 case BUILT_IN_SUB_OVERFLOW_P:
10209 case BUILT_IN_MUL_OVERFLOW_P:
1304953e
JJ
10210 case BUILT_IN_SADD_OVERFLOW:
10211 case BUILT_IN_SADDL_OVERFLOW:
10212 case BUILT_IN_SADDLL_OVERFLOW:
10213 case BUILT_IN_SSUB_OVERFLOW:
10214 case BUILT_IN_SSUBL_OVERFLOW:
10215 case BUILT_IN_SSUBLL_OVERFLOW:
10216 case BUILT_IN_SMUL_OVERFLOW:
10217 case BUILT_IN_SMULL_OVERFLOW:
10218 case BUILT_IN_SMULLL_OVERFLOW:
10219 case BUILT_IN_UADD_OVERFLOW:
10220 case BUILT_IN_UADDL_OVERFLOW:
10221 case BUILT_IN_UADDLL_OVERFLOW:
10222 case BUILT_IN_USUB_OVERFLOW:
10223 case BUILT_IN_USUBL_OVERFLOW:
10224 case BUILT_IN_USUBLL_OVERFLOW:
10225 case BUILT_IN_UMUL_OVERFLOW:
10226 case BUILT_IN_UMULL_OVERFLOW:
10227 case BUILT_IN_UMULLL_OVERFLOW:
10228 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10229
b0b3afb2
BS
10230 default:
10231 break;
10232 }
5039610b
SL
10233 return NULL_TREE;
10234}
b0b3afb2 10235
b5338fb3
MS
10236/* Folds a call EXPR (which may be null) to built-in function FNDECL.
10237 ARGS is an array of NARGS arguments. IGNORE is true if the result
10238 of the function call is ignored. This function returns NULL_TREE
10239 if no simplification was possible. */
b8698a0f 10240
b5338fb3
MS
10241static tree
10242fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10243 int nargs, bool)
5039610b
SL
10244{
10245 tree ret = NULL_TREE;
f4577fcd 10246
5039610b
SL
10247 switch (nargs)
10248 {
10249 case 0:
2625bb5d 10250 ret = fold_builtin_0 (loc, fndecl);
5039610b
SL
10251 break;
10252 case 1:
2625bb5d 10253 ret = fold_builtin_1 (loc, fndecl, args[0]);
5039610b
SL
10254 break;
10255 case 2:
b5338fb3 10256 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
5039610b
SL
10257 break;
10258 case 3:
2625bb5d 10259 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
5039610b 10260 break;
5039610b 10261 default:
903c723b 10262 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
5039610b
SL
10263 break;
10264 }
10265 if (ret)
10266 {
726a989a 10267 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
db3927fb 10268 SET_EXPR_LOCATION (ret, loc);
5039610b
SL
10269 return ret;
10270 }
10271 return NULL_TREE;
10272}
10273
862d0b35
DN
10274/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10275 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10276 of arguments in ARGS to be omitted. OLDNARGS is the number of
10277 elements in ARGS. */
5039610b
SL
10278
10279static tree
862d0b35
DN
10280rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10281 int skip, tree fndecl, int n, va_list newargs)
5039610b 10282{
862d0b35
DN
10283 int nargs = oldnargs - skip + n;
10284 tree *buffer;
5039610b 10285
862d0b35 10286 if (n > 0)
5039610b 10287 {
862d0b35 10288 int i, j;
5039610b 10289
862d0b35
DN
10290 buffer = XALLOCAVEC (tree, nargs);
10291 for (i = 0; i < n; i++)
10292 buffer[i] = va_arg (newargs, tree);
10293 for (j = skip; j < oldnargs; j++, i++)
10294 buffer[i] = args[j];
10295 }
10296 else
10297 buffer = args + skip;
3bf5906b 10298
862d0b35
DN
10299 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10300}
5039610b 10301
0889e9bc
JJ
10302/* Return true if FNDECL shouldn't be folded right now.
10303 If a built-in function has an inline attribute always_inline
10304 wrapper, defer folding it after always_inline functions have
10305 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10306 might not be performed. */
10307
e7f9dae0 10308bool
0889e9bc
JJ
10309avoid_folding_inline_builtin (tree fndecl)
10310{
10311 return (DECL_DECLARED_INLINE_P (fndecl)
10312 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10313 && cfun
10314 && !cfun->always_inline_functions_inlined
10315 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10316}
10317
6de9cd9a 10318/* A wrapper function for builtin folding that prevents warnings for
caf93cb0 10319 "statement without effect" and the like, caused by removing the
6de9cd9a
DN
10320 call node earlier than the warning is generated. */
10321
10322tree
db3927fb 10323fold_call_expr (location_t loc, tree exp, bool ignore)
6de9cd9a 10324{
5039610b
SL
10325 tree ret = NULL_TREE;
10326 tree fndecl = get_callee_fndecl (exp);
3d78e008 10327 if (fndecl && fndecl_built_in_p (fndecl)
6ef5231b
JJ
10328 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10329 yet. Defer folding until we see all the arguments
10330 (after inlining). */
10331 && !CALL_EXPR_VA_ARG_PACK (exp))
10332 {
10333 int nargs = call_expr_nargs (exp);
10334
10335 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10336 instead last argument is __builtin_va_arg_pack (). Defer folding
10337 even in that case, until arguments are finalized. */
10338 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10339 {
10340 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
3d78e008 10341 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
6ef5231b
JJ
10342 return NULL_TREE;
10343 }
10344
0889e9bc
JJ
10345 if (avoid_folding_inline_builtin (fndecl))
10346 return NULL_TREE;
10347
5039610b 10348 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
f311c3b4
NF
10349 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10350 CALL_EXPR_ARGP (exp), ignore);
5039610b
SL
10351 else
10352 {
a6a0570f 10353 tree *args = CALL_EXPR_ARGP (exp);
b5338fb3 10354 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
5039610b 10355 if (ret)
db3927fb 10356 return ret;
5039610b 10357 }
6de9cd9a 10358 }
5039610b
SL
10359 return NULL_TREE;
10360}
b8698a0f 10361
a6a0570f
RB
10362/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10363 N arguments are passed in the array ARGARRAY. Return a folded
10364 expression or NULL_TREE if no simplification was possible. */
4977bab6
ZW
10365
10366tree
a6a0570f 10367fold_builtin_call_array (location_t loc, tree,
94a0dd7b
SL
10368 tree fn,
10369 int n,
10370 tree *argarray)
6385a28f 10371{
a6a0570f
RB
10372 if (TREE_CODE (fn) != ADDR_EXPR)
10373 return NULL_TREE;
5039610b 10374
a6a0570f
RB
10375 tree fndecl = TREE_OPERAND (fn, 0);
10376 if (TREE_CODE (fndecl) == FUNCTION_DECL
3d78e008 10377 && fndecl_built_in_p (fndecl))
a6a0570f
RB
10378 {
10379 /* If last argument is __builtin_va_arg_pack (), arguments to this
10380 function are not finalized yet. Defer folding until they are. */
10381 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10382 {
10383 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
3d78e008 10384 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
a6a0570f
RB
10385 return NULL_TREE;
10386 }
10387 if (avoid_folding_inline_builtin (fndecl))
10388 return NULL_TREE;
10389 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10390 return targetm.fold_builtin (fndecl, n, argarray, false);
10391 else
b5338fb3 10392 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
a6a0570f 10393 }
5039610b 10394
a6a0570f 10395 return NULL_TREE;
5039610b
SL
10396}
10397
43ea30dc
NF
10398/* Construct a new CALL_EXPR using the tail of the argument list of EXP
10399 along with N new arguments specified as the "..." parameters. SKIP
10400 is the number of arguments in EXP to be omitted. This function is used
10401 to do varargs-to-varargs transformations. */
10402
10403static tree
10404rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10405{
10406 va_list ap;
10407 tree t;
10408
10409 va_start (ap, n);
10410 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10411 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10412 va_end (ap);
5039610b 10413
43ea30dc 10414 return t;
5039610b
SL
10415}
10416
10417/* Validate a single argument ARG against a tree code CODE representing
0dba7960 10418 a type. Return true when argument is valid. */
b8698a0f 10419
5039610b 10420static bool
0dba7960 10421validate_arg (const_tree arg, enum tree_code code)
5039610b
SL
10422{
10423 if (!arg)
10424 return false;
10425 else if (code == POINTER_TYPE)
0dba7960 10426 return POINTER_TYPE_P (TREE_TYPE (arg));
4cd8e76f
RG
10427 else if (code == INTEGER_TYPE)
10428 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
5039610b 10429 return code == TREE_CODE (TREE_TYPE (arg));
6385a28f 10430}
019fa094 10431
726a989a
RB
10432/* This function validates the types of a function call argument list
10433 against a specified list of tree_codes. If the last specifier is a 0,
10434 that represents an ellipses, otherwise the last specifier must be a
10435 VOID_TYPE.
10436
10437 This is the GIMPLE version of validate_arglist. Eventually we want to
10438 completely convert builtins.c to work from GIMPLEs and the tree based
10439 validate_arglist will then be removed. */
10440
10441bool
538dd0b7 10442validate_gimple_arglist (const gcall *call, ...)
726a989a
RB
10443{
10444 enum tree_code code;
10445 bool res = 0;
10446 va_list ap;
10447 const_tree arg;
10448 size_t i;
10449
10450 va_start (ap, call);
10451 i = 0;
10452
10453 do
10454 {
72b5577d 10455 code = (enum tree_code) va_arg (ap, int);
726a989a
RB
10456 switch (code)
10457 {
10458 case 0:
10459 /* This signifies an ellipses, any further arguments are all ok. */
10460 res = true;
10461 goto end;
10462 case VOID_TYPE:
10463 /* This signifies an endlink, if no arguments remain, return
10464 true, otherwise return false. */
10465 res = (i == gimple_call_num_args (call));
10466 goto end;
10467 default:
10468 /* If no parameters remain or the parameter's code does not
10469 match the specified code, return false. Otherwise continue
10470 checking any remaining arguments. */
10471 arg = gimple_call_arg (call, i++);
10472 if (!validate_arg (arg, code))
10473 goto end;
10474 break;
10475 }
10476 }
10477 while (1);
10478
10479 /* We need gotos here since we can only have one VA_CLOSE in a
10480 function. */
10481 end: ;
10482 va_end (ap);
10483
10484 return res;
10485}
10486
f6155fda
SS
10487/* Default target-specific builtin expander that does nothing. */
10488
10489rtx
4682ae04
AJ
10490default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10491 rtx target ATTRIBUTE_UNUSED,
10492 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 10493 machine_mode mode ATTRIBUTE_UNUSED,
4682ae04 10494 int ignore ATTRIBUTE_UNUSED)
f6155fda
SS
10495{
10496 return NULL_RTX;
10497}
34ee7f82 10498
7dc61d6c
KG
10499/* Returns true is EXP represents data that would potentially reside
10500 in a readonly section. */
10501
fef5a0d9 10502bool
7dc61d6c
KG
10503readonly_data_expr (tree exp)
10504{
10505 STRIP_NOPS (exp);
10506
aef0afc4
UW
10507 if (TREE_CODE (exp) != ADDR_EXPR)
10508 return false;
10509
10510 exp = get_base_address (TREE_OPERAND (exp, 0));
10511 if (!exp)
10512 return false;
10513
10514 /* Make sure we call decl_readonly_section only for trees it
10515 can handle (since it returns true for everything it doesn't
10516 understand). */
caf93cb0 10517 if (TREE_CODE (exp) == STRING_CST
aef0afc4 10518 || TREE_CODE (exp) == CONSTRUCTOR
8813a647 10519 || (VAR_P (exp) && TREE_STATIC (exp)))
aef0afc4 10520 return decl_readonly_section (exp, 0);
7dc61d6c
KG
10521 else
10522 return false;
10523}
6de9cd9a 10524
5039610b
SL
10525/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10526 to the call, and TYPE is its return type.
6de9cd9a 10527
5039610b 10528 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10529 simplified form of the call as a tree.
10530
10531 The simplified form may be a constant or other expression which
10532 computes the same value, but in a more efficient manner (including
10533 calls to other builtin functions).
10534
10535 The call may contain arguments which need to be evaluated, but
10536 which are not useful to determine the result of the call. In
10537 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10538 COMPOUND_EXPR will be an argument which must be evaluated.
10539 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10540 COMPOUND_EXPR in the chain will contain the tree for the simplified
10541 form of the builtin function call. */
10542
10543static tree
b5338fb3 10544fold_builtin_strpbrk (location_t loc, tree expr, tree s1, tree s2, tree type)
6de9cd9a 10545{
5039610b
SL
10546 if (!validate_arg (s1, POINTER_TYPE)
10547 || !validate_arg (s2, POINTER_TYPE))
10548 return NULL_TREE;
6de9cd9a 10549
b5338fb3
MS
10550 if (!check_nul_terminated_array (expr, s1)
10551 || !check_nul_terminated_array (expr, s2))
10552 return NULL_TREE;
6de9cd9a 10553
b5338fb3
MS
10554 tree fn;
10555 const char *p1, *p2;
6de9cd9a 10556
b5338fb3
MS
10557 p2 = c_getstr (s2);
10558 if (p2 == NULL)
10559 return NULL_TREE;
6de9cd9a 10560
b5338fb3
MS
10561 p1 = c_getstr (s1);
10562 if (p1 != NULL)
10563 {
10564 const char *r = strpbrk (p1, p2);
10565 tree tem;
6de9cd9a 10566
b5338fb3
MS
10567 if (r == NULL)
10568 return build_int_cst (TREE_TYPE (s1), 0);
6de9cd9a 10569
b5338fb3
MS
10570 /* Return an offset into the constant string argument. */
10571 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10572 return fold_convert_loc (loc, type, tem);
10573 }
6de9cd9a 10574
b5338fb3
MS
10575 if (p2[0] == '\0')
10576 /* strpbrk(x, "") == NULL.
10577 Evaluate and ignore s1 in case it had side-effects. */
10578 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
6de9cd9a 10579
b5338fb3
MS
10580 if (p2[1] != '\0')
10581 return NULL_TREE; /* Really call strpbrk. */
10582
10583 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10584 if (!fn)
10585 return NULL_TREE;
10586
10587 /* New argument list transforming strpbrk(s1, s2) to
10588 strchr(s1, s2[0]). */
10589 return build_call_expr_loc (loc, fn, 2, s1,
10590 build_int_cst (integer_type_node, p2[0]));
6de9cd9a
DN
10591}
10592
5039610b
SL
10593/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10594 to the call.
6de9cd9a 10595
5039610b 10596 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10597 simplified form of the call as a tree.
10598
10599 The simplified form may be a constant or other expression which
10600 computes the same value, but in a more efficient manner (including
10601 calls to other builtin functions).
10602
10603 The call may contain arguments which need to be evaluated, but
10604 which are not useful to determine the result of the call. In
10605 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10606 COMPOUND_EXPR will be an argument which must be evaluated.
10607 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10608 COMPOUND_EXPR in the chain will contain the tree for the simplified
10609 form of the builtin function call. */
10610
10611static tree
b5338fb3 10612fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
6de9cd9a 10613{
5039610b
SL
10614 if (!validate_arg (s1, POINTER_TYPE)
10615 || !validate_arg (s2, POINTER_TYPE))
10616 return NULL_TREE;
6de9cd9a 10617
b5338fb3
MS
10618 if (!check_nul_terminated_array (expr, s1)
10619 || !check_nul_terminated_array (expr, s2))
10620 return NULL_TREE;
10621
10622 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10623
10624 /* If either argument is "", return NULL_TREE. */
10625 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10626 /* Evaluate and ignore both arguments in case either one has
10627 side-effects. */
10628 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
08039bd8 10629 s1, s2);
b5338fb3 10630 return NULL_TREE;
6de9cd9a
DN
10631}
10632
5039610b
SL
10633/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10634 to the call.
6de9cd9a 10635
5039610b 10636 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10637 simplified form of the call as a tree.
10638
10639 The simplified form may be a constant or other expression which
10640 computes the same value, but in a more efficient manner (including
10641 calls to other builtin functions).
10642
10643 The call may contain arguments which need to be evaluated, but
10644 which are not useful to determine the result of the call. In
10645 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10646 COMPOUND_EXPR will be an argument which must be evaluated.
10647 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10648 COMPOUND_EXPR in the chain will contain the tree for the simplified
10649 form of the builtin function call. */
10650
10651static tree
b5338fb3 10652fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
6de9cd9a 10653{
5039610b
SL
10654 if (!validate_arg (s1, POINTER_TYPE)
10655 || !validate_arg (s2, POINTER_TYPE))
10656 return NULL_TREE;
b5338fb3
MS
10657
10658 if (!check_nul_terminated_array (expr, s1)
10659 || !check_nul_terminated_array (expr, s2))
10660 return NULL_TREE;
10661
10662 /* If the first argument is "", return NULL_TREE. */
10663 const char *p1 = c_getstr (s1);
10664 if (p1 && *p1 == '\0')
6de9cd9a 10665 {
b5338fb3
MS
10666 /* Evaluate and ignore argument s2 in case it has
10667 side-effects. */
10668 return omit_one_operand_loc (loc, size_type_node,
002bd9f0 10669 size_zero_node, s2);
b5338fb3 10670 }
6de9cd9a 10671
b5338fb3
MS
10672 /* If the second argument is "", return __builtin_strlen(s1). */
10673 const char *p2 = c_getstr (s2);
10674 if (p2 && *p2 == '\0')
10675 {
10676 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
6de9cd9a 10677
b5338fb3
MS
10678 /* If the replacement _DECL isn't initialized, don't do the
10679 transformation. */
10680 if (!fn)
10681 return NULL_TREE;
6de9cd9a 10682
b5338fb3 10683 return build_call_expr_loc (loc, fn, 1, s1);
6de9cd9a 10684 }
b5338fb3 10685 return NULL_TREE;
6de9cd9a
DN
10686}
10687
5039610b 10688/* Fold the next_arg or va_start call EXP. Returns true if there was an error
2efcfa4e
AP
10689 produced. False otherwise. This is done so that we don't output the error
10690 or warning twice or three times. */
726a989a 10691
2efcfa4e 10692bool
5039610b 10693fold_builtin_next_arg (tree exp, bool va_start_p)
6de9cd9a
DN
10694{
10695 tree fntype = TREE_TYPE (current_function_decl);
5039610b
SL
10696 int nargs = call_expr_nargs (exp);
10697 tree arg;
34c88790
DS
10698 /* There is good chance the current input_location points inside the
10699 definition of the va_start macro (perhaps on the token for
10700 builtin) in a system header, so warnings will not be emitted.
10701 Use the location in real source code. */
620e594b 10702 location_t current_location =
34c88790
DS
10703 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10704 NULL);
6de9cd9a 10705
f38958e8 10706 if (!stdarg_p (fntype))
2efcfa4e 10707 {
a9c697b8 10708 error ("%<va_start%> used in function with fixed arguments");
2efcfa4e
AP
10709 return true;
10710 }
5039610b
SL
10711
10712 if (va_start_p)
8870e212 10713 {
5039610b
SL
10714 if (va_start_p && (nargs != 2))
10715 {
10716 error ("wrong number of arguments to function %<va_start%>");
10717 return true;
10718 }
10719 arg = CALL_EXPR_ARG (exp, 1);
8870e212
JJ
10720 }
10721 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10722 when we checked the arguments and if needed issued a warning. */
5039610b 10723 else
6de9cd9a 10724 {
5039610b
SL
10725 if (nargs == 0)
10726 {
10727 /* Evidently an out of date version of <stdarg.h>; can't validate
10728 va_start's second argument, but can still work as intended. */
34c88790 10729 warning_at (current_location,
b9c8da34
DS
10730 OPT_Wvarargs,
10731 "%<__builtin_next_arg%> called without an argument");
5039610b
SL
10732 return true;
10733 }
10734 else if (nargs > 1)
c22cacf3 10735 {
5039610b 10736 error ("wrong number of arguments to function %<__builtin_next_arg%>");
c22cacf3
MS
10737 return true;
10738 }
5039610b
SL
10739 arg = CALL_EXPR_ARG (exp, 0);
10740 }
10741
4e3825db
MM
10742 if (TREE_CODE (arg) == SSA_NAME)
10743 arg = SSA_NAME_VAR (arg);
10744
5039610b 10745 /* We destructively modify the call to be __builtin_va_start (ap, 0)
b8698a0f 10746 or __builtin_next_arg (0) the first time we see it, after checking
5039610b
SL
10747 the arguments and if needed issuing a warning. */
10748 if (!integer_zerop (arg))
10749 {
10750 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8870e212 10751
6de9cd9a
DN
10752 /* Strip off all nops for the sake of the comparison. This
10753 is not quite the same as STRIP_NOPS. It does more.
10754 We must also strip off INDIRECT_EXPR for C++ reference
10755 parameters. */
1043771b 10756 while (CONVERT_EXPR_P (arg)
6de9cd9a
DN
10757 || TREE_CODE (arg) == INDIRECT_REF)
10758 arg = TREE_OPERAND (arg, 0);
10759 if (arg != last_parm)
c22cacf3 10760 {
118f3b19
KH
10761 /* FIXME: Sometimes with the tree optimizers we can get the
10762 not the last argument even though the user used the last
10763 argument. We just warn and set the arg to be the last
10764 argument so that we will get wrong-code because of
10765 it. */
34c88790 10766 warning_at (current_location,
b9c8da34 10767 OPT_Wvarargs,
34c88790 10768 "second parameter of %<va_start%> not last named argument");
2efcfa4e 10769 }
2985f531
MLI
10770
10771 /* Undefined by C99 7.15.1.4p4 (va_start):
10772 "If the parameter parmN is declared with the register storage
10773 class, with a function or array type, or with a type that is
10774 not compatible with the type that results after application of
10775 the default argument promotions, the behavior is undefined."
10776 */
10777 else if (DECL_REGISTER (arg))
34c88790
DS
10778 {
10779 warning_at (current_location,
b9c8da34 10780 OPT_Wvarargs,
9c582551 10781 "undefined behavior when second parameter of "
34c88790
DS
10782 "%<va_start%> is declared with %<register%> storage");
10783 }
2985f531 10784
8870e212 10785 /* We want to verify the second parameter just once before the tree
c22cacf3
MS
10786 optimizers are run and then avoid keeping it in the tree,
10787 as otherwise we could warn even for correct code like:
10788 void foo (int i, ...)
10789 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
5039610b
SL
10790 if (va_start_p)
10791 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10792 else
10793 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
2efcfa4e
AP
10794 }
10795 return false;
6de9cd9a
DN
10796}
10797
10798
5039610b 10799/* Expand a call EXP to __builtin_object_size. */
10a0d495 10800
9b2b7279 10801static rtx
10a0d495
JJ
10802expand_builtin_object_size (tree exp)
10803{
10804 tree ost;
10805 int object_size_type;
10806 tree fndecl = get_callee_fndecl (exp);
10a0d495 10807
5039610b 10808 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10a0d495 10809 {
0f2c4a8f 10810 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
c94ed7a1 10811 exp, fndecl);
10a0d495
JJ
10812 expand_builtin_trap ();
10813 return const0_rtx;
10814 }
10815
5039610b 10816 ost = CALL_EXPR_ARG (exp, 1);
10a0d495
JJ
10817 STRIP_NOPS (ost);
10818
10819 if (TREE_CODE (ost) != INTEGER_CST
10820 || tree_int_cst_sgn (ost) < 0
10821 || compare_tree_int (ost, 3) > 0)
10822 {
0f2c4a8f 10823 error ("%Klast argument of %qD is not integer constant between 0 and 3",
c94ed7a1 10824 exp, fndecl);
10a0d495
JJ
10825 expand_builtin_trap ();
10826 return const0_rtx;
10827 }
10828
9439e9a1 10829 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
10830
10831 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10832}
10833
10834/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10835 FCODE is the BUILT_IN_* to use.
5039610b 10836 Return NULL_RTX if we failed; the caller should emit a normal call,
10a0d495
JJ
10837 otherwise try to get the result in TARGET, if convenient (and in
10838 mode MODE if that's convenient). */
10839
10840static rtx
ef4bddc2 10841expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10a0d495
JJ
10842 enum built_in_function fcode)
10843{
5039610b 10844 if (!validate_arglist (exp,
10a0d495
JJ
10845 POINTER_TYPE,
10846 fcode == BUILT_IN_MEMSET_CHK
10847 ? INTEGER_TYPE : POINTER_TYPE,
10848 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5039610b 10849 return NULL_RTX;
10a0d495 10850
cc8bea0a
MS
10851 tree dest = CALL_EXPR_ARG (exp, 0);
10852 tree src = CALL_EXPR_ARG (exp, 1);
10853 tree len = CALL_EXPR_ARG (exp, 2);
10854 tree size = CALL_EXPR_ARG (exp, 3);
10a0d495 10855
cc8bea0a
MS
10856 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10857 /*str=*/NULL_TREE, size);
ee92e7ba
MS
10858
10859 if (!tree_fits_uhwi_p (size))
5039610b 10860 return NULL_RTX;
10a0d495 10861
cc269bb6 10862 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10a0d495 10863 {
ee92e7ba
MS
10864 /* Avoid transforming the checking call to an ordinary one when
10865 an overflow has been detected or when the call couldn't be
10866 validated because the size is not constant. */
10867 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10868 return NULL_RTX;
10a0d495 10869
ee92e7ba 10870 tree fn = NULL_TREE;
10a0d495
JJ
10871 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10872 mem{cpy,pcpy,move,set} is available. */
10873 switch (fcode)
10874 {
10875 case BUILT_IN_MEMCPY_CHK:
e79983f4 10876 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10a0d495
JJ
10877 break;
10878 case BUILT_IN_MEMPCPY_CHK:
e79983f4 10879 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10a0d495
JJ
10880 break;
10881 case BUILT_IN_MEMMOVE_CHK:
e79983f4 10882 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10a0d495
JJ
10883 break;
10884 case BUILT_IN_MEMSET_CHK:
e79983f4 10885 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10a0d495
JJ
10886 break;
10887 default:
10888 break;
10889 }
10890
10891 if (! fn)
5039610b 10892 return NULL_RTX;
10a0d495 10893
aa493694 10894 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
44e10129
MM
10895 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10896 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
10897 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10898 }
10899 else if (fcode == BUILT_IN_MEMSET_CHK)
5039610b 10900 return NULL_RTX;
10a0d495
JJ
10901 else
10902 {
0eb77834 10903 unsigned int dest_align = get_pointer_alignment (dest);
10a0d495
JJ
10904
10905 /* If DEST is not a pointer type, call the normal function. */
10906 if (dest_align == 0)
5039610b 10907 return NULL_RTX;
10a0d495
JJ
10908
10909 /* If SRC and DEST are the same (and not volatile), do nothing. */
10910 if (operand_equal_p (src, dest, 0))
10911 {
10912 tree expr;
10913
10914 if (fcode != BUILT_IN_MEMPCPY_CHK)
10915 {
10916 /* Evaluate and ignore LEN in case it has side-effects. */
10917 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10918 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10919 }
10920
5d49b6a7 10921 expr = fold_build_pointer_plus (dest, len);
10a0d495
JJ
10922 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10923 }
10924
10925 /* __memmove_chk special case. */
10926 if (fcode == BUILT_IN_MEMMOVE_CHK)
10927 {
0eb77834 10928 unsigned int src_align = get_pointer_alignment (src);
10a0d495
JJ
10929
10930 if (src_align == 0)
5039610b 10931 return NULL_RTX;
10a0d495
JJ
10932
10933 /* If src is categorized for a readonly section we can use
10934 normal __memcpy_chk. */
10935 if (readonly_data_expr (src))
10936 {
e79983f4 10937 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10a0d495 10938 if (!fn)
5039610b 10939 return NULL_RTX;
aa493694
JJ
10940 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10941 dest, src, len, size);
44e10129
MM
10942 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10943 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
10944 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10945 }
10946 }
5039610b 10947 return NULL_RTX;
10a0d495
JJ
10948 }
10949}
10950
10951/* Emit warning if a buffer overflow is detected at compile time. */
10952
10953static void
10954maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10955{
ee92e7ba
MS
10956 /* The source string. */
10957 tree srcstr = NULL_TREE;
10958 /* The size of the destination object. */
10959 tree objsize = NULL_TREE;
10960 /* The string that is being concatenated with (as in __strcat_chk)
10961 or null if it isn't. */
10962 tree catstr = NULL_TREE;
10963 /* The maximum length of the source sequence in a bounded operation
10964 (such as __strncat_chk) or null if the operation isn't bounded
10965 (such as __strcat_chk). */
cc8bea0a 10966 tree maxread = NULL_TREE;
9c1caf50
MS
10967 /* The exact size of the access (such as in __strncpy_chk). */
10968 tree size = NULL_TREE;
10a0d495
JJ
10969
10970 switch (fcode)
10971 {
10972 case BUILT_IN_STRCPY_CHK:
10973 case BUILT_IN_STPCPY_CHK:
ee92e7ba
MS
10974 srcstr = CALL_EXPR_ARG (exp, 1);
10975 objsize = CALL_EXPR_ARG (exp, 2);
10976 break;
10977
10a0d495 10978 case BUILT_IN_STRCAT_CHK:
ee92e7ba
MS
10979 /* For __strcat_chk the warning will be emitted only if overflowing
10980 by at least strlen (dest) + 1 bytes. */
10981 catstr = CALL_EXPR_ARG (exp, 0);
10982 srcstr = CALL_EXPR_ARG (exp, 1);
10983 objsize = CALL_EXPR_ARG (exp, 2);
10a0d495 10984 break;
ee92e7ba 10985
1c2fc017 10986 case BUILT_IN_STRNCAT_CHK:
ee92e7ba
MS
10987 catstr = CALL_EXPR_ARG (exp, 0);
10988 srcstr = CALL_EXPR_ARG (exp, 1);
cc8bea0a 10989 maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
10990 objsize = CALL_EXPR_ARG (exp, 3);
10991 break;
10992
10a0d495 10993 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 10994 case BUILT_IN_STPNCPY_CHK:
ee92e7ba 10995 srcstr = CALL_EXPR_ARG (exp, 1);
9c1caf50 10996 size = CALL_EXPR_ARG (exp, 2);
ee92e7ba 10997 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495 10998 break;
ee92e7ba 10999
10a0d495
JJ
11000 case BUILT_IN_SNPRINTF_CHK:
11001 case BUILT_IN_VSNPRINTF_CHK:
cc8bea0a 11002 maxread = CALL_EXPR_ARG (exp, 1);
ee92e7ba 11003 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495
JJ
11004 break;
11005 default:
11006 gcc_unreachable ();
11007 }
11008
cc8bea0a 11009 if (catstr && maxread)
10a0d495 11010 {
ee92e7ba
MS
11011 /* Check __strncat_chk. There is no way to determine the length
11012 of the string to which the source string is being appended so
11013 just warn when the length of the source string is not known. */
d9c5a8b9
MS
11014 check_strncat_sizes (exp, objsize);
11015 return;
10a0d495 11016 }
10a0d495 11017
cc8bea0a
MS
11018 /* The destination argument is the first one for all built-ins above. */
11019 tree dst = CALL_EXPR_ARG (exp, 0);
11020
11021 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10a0d495
JJ
11022}
11023
11024/* Emit warning if a buffer overflow is detected at compile time
11025 in __sprintf_chk/__vsprintf_chk calls. */
11026
11027static void
11028maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11029{
451409e4 11030 tree size, len, fmt;
10a0d495 11031 const char *fmt_str;
5039610b 11032 int nargs = call_expr_nargs (exp);
10a0d495
JJ
11033
11034 /* Verify the required arguments in the original call. */
b8698a0f 11035
5039610b 11036 if (nargs < 4)
10a0d495 11037 return;
5039610b
SL
11038 size = CALL_EXPR_ARG (exp, 2);
11039 fmt = CALL_EXPR_ARG (exp, 3);
10a0d495 11040
cc269bb6 11041 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10a0d495
JJ
11042 return;
11043
11044 /* Check whether the format is a literal string constant. */
11045 fmt_str = c_getstr (fmt);
11046 if (fmt_str == NULL)
11047 return;
11048
62e5bf5d 11049 if (!init_target_chars ())
000ba23d
KG
11050 return;
11051
10a0d495 11052 /* If the format doesn't contain % args or %%, we know its size. */
000ba23d 11053 if (strchr (fmt_str, target_percent) == 0)
10a0d495
JJ
11054 len = build_int_cstu (size_type_node, strlen (fmt_str));
11055 /* If the format is "%s" and first ... argument is a string literal,
11056 we know it too. */
5039610b
SL
11057 else if (fcode == BUILT_IN_SPRINTF_CHK
11058 && strcmp (fmt_str, target_percent_s) == 0)
10a0d495
JJ
11059 {
11060 tree arg;
11061
5039610b 11062 if (nargs < 5)
10a0d495 11063 return;
5039610b 11064 arg = CALL_EXPR_ARG (exp, 4);
10a0d495
JJ
11065 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11066 return;
11067
11068 len = c_strlen (arg, 1);
cc269bb6 11069 if (!len || ! tree_fits_uhwi_p (len))
10a0d495
JJ
11070 return;
11071 }
11072 else
11073 return;
11074
ee92e7ba
MS
11075 /* Add one for the terminating nul. */
11076 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
cc8bea0a
MS
11077
11078 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
11079 /*maxread=*/NULL_TREE, len, size);
10a0d495
JJ
11080}
11081
f9555f40
JJ
11082/* Emit warning if a free is called with address of a variable. */
11083
11084static void
11085maybe_emit_free_warning (tree exp)
11086{
9616781d
JJ
11087 if (call_expr_nargs (exp) != 1)
11088 return;
11089
f9555f40
JJ
11090 tree arg = CALL_EXPR_ARG (exp, 0);
11091
11092 STRIP_NOPS (arg);
11093 if (TREE_CODE (arg) != ADDR_EXPR)
11094 return;
11095
11096 arg = get_base_address (TREE_OPERAND (arg, 0));
70f34814 11097 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
f9555f40
JJ
11098 return;
11099
11100 if (SSA_VAR_P (arg))
a3a704a4
MH
11101 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11102 "%Kattempt to free a non-heap object %qD", exp, arg);
f9555f40 11103 else
a3a704a4
MH
11104 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11105 "%Kattempt to free a non-heap object", exp);
f9555f40
JJ
11106}
11107
5039610b
SL
11108/* Fold a call to __builtin_object_size with arguments PTR and OST,
11109 if possible. */
10a0d495 11110
9b2b7279 11111static tree
5039610b 11112fold_builtin_object_size (tree ptr, tree ost)
10a0d495 11113{
88e06841 11114 unsigned HOST_WIDE_INT bytes;
10a0d495
JJ
11115 int object_size_type;
11116
5039610b
SL
11117 if (!validate_arg (ptr, POINTER_TYPE)
11118 || !validate_arg (ost, INTEGER_TYPE))
11119 return NULL_TREE;
10a0d495 11120
10a0d495
JJ
11121 STRIP_NOPS (ost);
11122
11123 if (TREE_CODE (ost) != INTEGER_CST
11124 || tree_int_cst_sgn (ost) < 0
11125 || compare_tree_int (ost, 3) > 0)
5039610b 11126 return NULL_TREE;
10a0d495 11127
9439e9a1 11128 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
11129
11130 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11131 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11132 and (size_t) 0 for types 2 and 3. */
11133 if (TREE_SIDE_EFFECTS (ptr))
2ac7cbb5 11134 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10a0d495
JJ
11135
11136 if (TREE_CODE (ptr) == ADDR_EXPR)
88e06841 11137 {
05a64756 11138 compute_builtin_object_size (ptr, object_size_type, &bytes);
807e902e 11139 if (wi::fits_to_tree_p (bytes, size_type_node))
88e06841
AS
11140 return build_int_cstu (size_type_node, bytes);
11141 }
10a0d495
JJ
11142 else if (TREE_CODE (ptr) == SSA_NAME)
11143 {
10a0d495
JJ
11144 /* If object size is not known yet, delay folding until
11145 later. Maybe subsequent passes will help determining
11146 it. */
05a64756
MS
11147 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11148 && wi::fits_to_tree_p (bytes, size_type_node))
88e06841 11149 return build_int_cstu (size_type_node, bytes);
10a0d495
JJ
11150 }
11151
88e06841 11152 return NULL_TREE;
10a0d495
JJ
11153}
11154
903c723b
TC
11155/* Builtins with folding operations that operate on "..." arguments
11156 need special handling; we need to store the arguments in a convenient
11157 data structure before attempting any folding. Fortunately there are
11158 only a few builtins that fall into this category. FNDECL is the
11159 function, EXP is the CALL_EXPR for the call. */
11160
11161static tree
11162fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11163{
11164 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11165 tree ret = NULL_TREE;
11166
11167 switch (fcode)
11168 {
11169 case BUILT_IN_FPCLASSIFY:
11170 ret = fold_builtin_fpclassify (loc, args, nargs);
11171 break;
11172
11173 default:
11174 break;
11175 }
11176 if (ret)
11177 {
11178 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11179 SET_EXPR_LOCATION (ret, loc);
11180 TREE_NO_WARNING (ret) = 1;
11181 return ret;
11182 }
11183 return NULL_TREE;
11184}
11185
000ba23d
KG
11186/* Initialize format string characters in the target charset. */
11187
fef5a0d9 11188bool
000ba23d
KG
11189init_target_chars (void)
11190{
11191 static bool init;
11192 if (!init)
11193 {
11194 target_newline = lang_hooks.to_target_charset ('\n');
11195 target_percent = lang_hooks.to_target_charset ('%');
11196 target_c = lang_hooks.to_target_charset ('c');
11197 target_s = lang_hooks.to_target_charset ('s');
11198 if (target_newline == 0 || target_percent == 0 || target_c == 0
11199 || target_s == 0)
11200 return false;
11201
11202 target_percent_c[0] = target_percent;
11203 target_percent_c[1] = target_c;
11204 target_percent_c[2] = '\0';
11205
11206 target_percent_s[0] = target_percent;
11207 target_percent_s[1] = target_s;
11208 target_percent_s[2] = '\0';
11209
11210 target_percent_s_newline[0] = target_percent;
11211 target_percent_s_newline[1] = target_s;
11212 target_percent_s_newline[2] = target_newline;
11213 target_percent_s_newline[3] = '\0';
c22cacf3 11214
000ba23d
KG
11215 init = true;
11216 }
11217 return true;
11218}
1f3f1f68 11219
4413d881
KG
11220/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11221 and no overflow/underflow occurred. INEXACT is true if M was not
2f8e468b 11222 exactly calculated. TYPE is the tree type for the result. This
4413d881
KG
11223 function assumes that you cleared the MPFR flags and then
11224 calculated M to see if anything subsequently set a flag prior to
11225 entering this function. Return NULL_TREE if any checks fail. */
11226
11227static tree
62e5bf5d 11228do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
4413d881
KG
11229{
11230 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11231 overflow/underflow occurred. If -frounding-math, proceed iff the
11232 result of calling FUNC was exact. */
62e5bf5d 11233 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
4413d881
KG
11234 && (!flag_rounding_math || !inexact))
11235 {
11236 REAL_VALUE_TYPE rr;
11237
90ca6847 11238 real_from_mpfr (&rr, m, type, MPFR_RNDN);
4413d881
KG
11239 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11240 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11241 but the mpft_t is not, then we underflowed in the
11242 conversion. */
4c8c70e0 11243 if (real_isfinite (&rr)
4413d881
KG
11244 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11245 {
11246 REAL_VALUE_TYPE rmode;
11247
11248 real_convert (&rmode, TYPE_MODE (type), &rr);
11249 /* Proceed iff the specified mode can hold the value. */
11250 if (real_identical (&rmode, &rr))
11251 return build_real (type, rmode);
11252 }
11253 }
11254 return NULL_TREE;
11255}
11256
c128599a
KG
11257/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11258 number and no overflow/underflow occurred. INEXACT is true if M
11259 was not exactly calculated. TYPE is the tree type for the result.
11260 This function assumes that you cleared the MPFR flags and then
11261 calculated M to see if anything subsequently set a flag prior to
ca75b926
KG
11262 entering this function. Return NULL_TREE if any checks fail, if
11263 FORCE_CONVERT is true, then bypass the checks. */
c128599a
KG
11264
11265static tree
ca75b926 11266do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
c128599a
KG
11267{
11268 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11269 overflow/underflow occurred. If -frounding-math, proceed iff the
11270 result of calling FUNC was exact. */
ca75b926
KG
11271 if (force_convert
11272 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11273 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11274 && (!flag_rounding_math || !inexact)))
c128599a
KG
11275 {
11276 REAL_VALUE_TYPE re, im;
11277
90ca6847
TB
11278 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11279 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
c128599a
KG
11280 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11281 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11282 but the mpft_t is not, then we underflowed in the
11283 conversion. */
ca75b926
KG
11284 if (force_convert
11285 || (real_isfinite (&re) && real_isfinite (&im)
11286 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11287 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
c128599a
KG
11288 {
11289 REAL_VALUE_TYPE re_mode, im_mode;
11290
11291 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11292 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11293 /* Proceed iff the specified mode can hold the value. */
ca75b926
KG
11294 if (force_convert
11295 || (real_identical (&re_mode, &re)
11296 && real_identical (&im_mode, &im)))
c128599a
KG
11297 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11298 build_real (TREE_TYPE (type), im_mode));
11299 }
11300 }
11301 return NULL_TREE;
11302}
c128599a 11303
ea91f957
KG
11304/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11305 the pointer *(ARG_QUO) and return the result. The type is taken
11306 from the type of ARG0 and is used for setting the precision of the
11307 calculation and results. */
11308
11309static tree
11310do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11311{
11312 tree const type = TREE_TYPE (arg0);
11313 tree result = NULL_TREE;
b8698a0f 11314
ea91f957
KG
11315 STRIP_NOPS (arg0);
11316 STRIP_NOPS (arg1);
b8698a0f 11317
ea91f957
KG
11318 /* To proceed, MPFR must exactly represent the target floating point
11319 format, which only happens when the target base equals two. */
11320 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11321 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11322 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11323 {
11324 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11325 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11326
4c8c70e0 11327 if (real_isfinite (ra0) && real_isfinite (ra1))
ea91f957 11328 {
3e479de3
UW
11329 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11330 const int prec = fmt->p;
90ca6847 11331 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
ea91f957
KG
11332 tree result_rem;
11333 long integer_quo;
11334 mpfr_t m0, m1;
11335
11336 mpfr_inits2 (prec, m0, m1, NULL);
90ca6847
TB
11337 mpfr_from_real (m0, ra0, MPFR_RNDN);
11338 mpfr_from_real (m1, ra1, MPFR_RNDN);
ea91f957 11339 mpfr_clear_flags ();
3e479de3 11340 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
ea91f957
KG
11341 /* Remquo is independent of the rounding mode, so pass
11342 inexact=0 to do_mpfr_ckconv(). */
11343 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11344 mpfr_clears (m0, m1, NULL);
11345 if (result_rem)
11346 {
11347 /* MPFR calculates quo in the host's long so it may
11348 return more bits in quo than the target int can hold
11349 if sizeof(host long) > sizeof(target int). This can
11350 happen even for native compilers in LP64 mode. In
11351 these cases, modulo the quo value with the largest
11352 number that the target int can hold while leaving one
11353 bit for the sign. */
11354 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11355 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11356
11357 /* Dereference the quo pointer argument. */
11358 arg_quo = build_fold_indirect_ref (arg_quo);
11359 /* Proceed iff a valid pointer type was passed in. */
11360 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11361 {
11362 /* Set the value. */
45a2c477
RG
11363 tree result_quo
11364 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11365 build_int_cst (TREE_TYPE (arg_quo),
11366 integer_quo));
ea91f957
KG
11367 TREE_SIDE_EFFECTS (result_quo) = 1;
11368 /* Combine the quo assignment with the rem. */
11369 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11370 result_quo, result_rem));
11371 }
11372 }
11373 }
11374 }
11375 return result;
11376}
752b7d38
KG
11377
11378/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11379 resulting value as a tree with type TYPE. The mpfr precision is
11380 set to the precision of TYPE. We assume that this mpfr function
11381 returns zero if the result could be calculated exactly within the
11382 requested precision. In addition, the integer pointer represented
11383 by ARG_SG will be dereferenced and set to the appropriate signgam
11384 (-1,1) value. */
11385
11386static tree
11387do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11388{
11389 tree result = NULL_TREE;
11390
11391 STRIP_NOPS (arg);
b8698a0f 11392
752b7d38
KG
11393 /* To proceed, MPFR must exactly represent the target floating point
11394 format, which only happens when the target base equals two. Also
11395 verify ARG is a constant and that ARG_SG is an int pointer. */
11396 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11397 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11398 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11399 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11400 {
11401 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11402
11403 /* In addition to NaN and Inf, the argument cannot be zero or a
11404 negative integer. */
4c8c70e0 11405 if (real_isfinite (ra)
752b7d38 11406 && ra->cl != rvc_zero
c3284718 11407 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
752b7d38 11408 {
3e479de3
UW
11409 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11410 const int prec = fmt->p;
90ca6847 11411 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
752b7d38
KG
11412 int inexact, sg;
11413 mpfr_t m;
11414 tree result_lg;
11415
11416 mpfr_init2 (m, prec);
90ca6847 11417 mpfr_from_real (m, ra, MPFR_RNDN);
752b7d38 11418 mpfr_clear_flags ();
3e479de3 11419 inexact = mpfr_lgamma (m, &sg, m, rnd);
752b7d38
KG
11420 result_lg = do_mpfr_ckconv (m, type, inexact);
11421 mpfr_clear (m);
11422 if (result_lg)
11423 {
11424 tree result_sg;
11425
11426 /* Dereference the arg_sg pointer argument. */
11427 arg_sg = build_fold_indirect_ref (arg_sg);
11428 /* Assign the signgam value into *arg_sg. */
11429 result_sg = fold_build2 (MODIFY_EXPR,
11430 TREE_TYPE (arg_sg), arg_sg,
45a2c477 11431 build_int_cst (TREE_TYPE (arg_sg), sg));
752b7d38
KG
11432 TREE_SIDE_EFFECTS (result_sg) = 1;
11433 /* Combine the signgam assignment with the lgamma result. */
11434 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11435 result_sg, result_lg));
11436 }
11437 }
11438 }
11439
11440 return result;
11441}
726a989a 11442
a41d064d
KG
11443/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11444 mpc function FUNC on it and return the resulting value as a tree
11445 with type TYPE. The mpfr precision is set to the precision of
11446 TYPE. We assume that function FUNC returns zero if the result
ca75b926
KG
11447 could be calculated exactly within the requested precision. If
11448 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11449 in the arguments and/or results. */
a41d064d 11450
2f440f6a 11451tree
ca75b926 11452do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
a41d064d
KG
11453 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11454{
11455 tree result = NULL_TREE;
b8698a0f 11456
a41d064d
KG
11457 STRIP_NOPS (arg0);
11458 STRIP_NOPS (arg1);
11459
11460 /* To proceed, MPFR must exactly represent the target floating point
11461 format, which only happens when the target base equals two. */
11462 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11463 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11464 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11465 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11466 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11467 {
11468 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11469 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11470 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11471 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11472
ca75b926
KG
11473 if (do_nonfinite
11474 || (real_isfinite (re0) && real_isfinite (im0)
11475 && real_isfinite (re1) && real_isfinite (im1)))
a41d064d
KG
11476 {
11477 const struct real_format *const fmt =
11478 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11479 const int prec = fmt->p;
90ca6847
TB
11480 const mpfr_rnd_t rnd = fmt->round_towards_zero
11481 ? MPFR_RNDZ : MPFR_RNDN;
a41d064d
KG
11482 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11483 int inexact;
11484 mpc_t m0, m1;
b8698a0f 11485
a41d064d
KG
11486 mpc_init2 (m0, prec);
11487 mpc_init2 (m1, prec);
c3284718
RS
11488 mpfr_from_real (mpc_realref (m0), re0, rnd);
11489 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11490 mpfr_from_real (mpc_realref (m1), re1, rnd);
11491 mpfr_from_real (mpc_imagref (m1), im1, rnd);
a41d064d
KG
11492 mpfr_clear_flags ();
11493 inexact = func (m0, m0, m1, crnd);
ca75b926 11494 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
a41d064d
KG
11495 mpc_clear (m0);
11496 mpc_clear (m1);
11497 }
11498 }
11499
11500 return result;
11501}
c128599a 11502
726a989a
RB
11503/* A wrapper function for builtin folding that prevents warnings for
11504 "statement without effect" and the like, caused by removing the
11505 call node earlier than the warning is generated. */
11506
11507tree
538dd0b7 11508fold_call_stmt (gcall *stmt, bool ignore)
726a989a
RB
11509{
11510 tree ret = NULL_TREE;
11511 tree fndecl = gimple_call_fndecl (stmt);
db3927fb 11512 location_t loc = gimple_location (stmt);
3d78e008 11513 if (fndecl && fndecl_built_in_p (fndecl)
726a989a
RB
11514 && !gimple_call_va_arg_pack_p (stmt))
11515 {
11516 int nargs = gimple_call_num_args (stmt);
8897c9ce
NF
11517 tree *args = (nargs > 0
11518 ? gimple_call_arg_ptr (stmt, 0)
11519 : &error_mark_node);
726a989a 11520
0889e9bc
JJ
11521 if (avoid_folding_inline_builtin (fndecl))
11522 return NULL_TREE;
726a989a
RB
11523 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11524 {
8897c9ce 11525 return targetm.fold_builtin (fndecl, nargs, args, ignore);
726a989a
RB
11526 }
11527 else
11528 {
b5338fb3 11529 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
726a989a
RB
11530 if (ret)
11531 {
11532 /* Propagate location information from original call to
11533 expansion of builtin. Otherwise things like
11534 maybe_emit_chk_warning, that operate on the expansion
11535 of a builtin, will use the wrong location information. */
11536 if (gimple_has_location (stmt))
11537 {
11538 tree realret = ret;
11539 if (TREE_CODE (ret) == NOP_EXPR)
11540 realret = TREE_OPERAND (ret, 0);
11541 if (CAN_HAVE_LOCATION_P (realret)
11542 && !EXPR_HAS_LOCATION (realret))
db3927fb 11543 SET_EXPR_LOCATION (realret, loc);
726a989a
RB
11544 return realret;
11545 }
11546 return ret;
11547 }
11548 }
11549 }
11550 return NULL_TREE;
11551}
d7f09764 11552
e79983f4 11553/* Look up the function in builtin_decl that corresponds to DECL
d7f09764
DN
11554 and set ASMSPEC as its user assembler name. DECL must be a
11555 function decl that declares a builtin. */
11556
11557void
11558set_builtin_user_assembler_name (tree decl, const char *asmspec)
11559{
3d78e008 11560 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
d7f09764
DN
11561 && asmspec != 0);
11562
ee516de9 11563 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
ce835863 11564 set_user_assembler_name (builtin, asmspec);
ee516de9
EB
11565
11566 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11567 && INT_TYPE_SIZE < BITS_PER_WORD)
d7f09764 11568 {
fffbab82 11569 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
ee516de9 11570 set_user_assembler_libfunc ("ffs", asmspec);
fffbab82 11571 set_optab_libfunc (ffs_optab, mode, "ffs");
d7f09764
DN
11572 }
11573}
bec922f0
SL
11574
11575/* Return true if DECL is a builtin that expands to a constant or similarly
11576 simple code. */
11577bool
11578is_simple_builtin (tree decl)
11579{
3d78e008 11580 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
bec922f0
SL
11581 switch (DECL_FUNCTION_CODE (decl))
11582 {
11583 /* Builtins that expand to constants. */
11584 case BUILT_IN_CONSTANT_P:
11585 case BUILT_IN_EXPECT:
11586 case BUILT_IN_OBJECT_SIZE:
11587 case BUILT_IN_UNREACHABLE:
11588 /* Simple register moves or loads from stack. */
45d439ac 11589 case BUILT_IN_ASSUME_ALIGNED:
bec922f0
SL
11590 case BUILT_IN_RETURN_ADDRESS:
11591 case BUILT_IN_EXTRACT_RETURN_ADDR:
11592 case BUILT_IN_FROB_RETURN_ADDR:
11593 case BUILT_IN_RETURN:
11594 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11595 case BUILT_IN_FRAME_ADDRESS:
11596 case BUILT_IN_VA_END:
11597 case BUILT_IN_STACK_SAVE:
11598 case BUILT_IN_STACK_RESTORE:
11599 /* Exception state returns or moves registers around. */
11600 case BUILT_IN_EH_FILTER:
11601 case BUILT_IN_EH_POINTER:
11602 case BUILT_IN_EH_COPY_VALUES:
11603 return true;
11604
11605 default:
11606 return false;
11607 }
11608
11609 return false;
11610}
11611
11612/* Return true if DECL is a builtin that is not expensive, i.e., they are
11613 most probably expanded inline into reasonably simple code. This is a
11614 superset of is_simple_builtin. */
11615bool
11616is_inexpensive_builtin (tree decl)
11617{
11618 if (!decl)
11619 return false;
11620 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11621 return true;
11622 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11623 switch (DECL_FUNCTION_CODE (decl))
11624 {
11625 case BUILT_IN_ABS:
9e878cf1 11626 CASE_BUILT_IN_ALLOCA:
ac868f29 11627 case BUILT_IN_BSWAP16:
bec922f0
SL
11628 case BUILT_IN_BSWAP32:
11629 case BUILT_IN_BSWAP64:
11630 case BUILT_IN_CLZ:
11631 case BUILT_IN_CLZIMAX:
11632 case BUILT_IN_CLZL:
11633 case BUILT_IN_CLZLL:
11634 case BUILT_IN_CTZ:
11635 case BUILT_IN_CTZIMAX:
11636 case BUILT_IN_CTZL:
11637 case BUILT_IN_CTZLL:
11638 case BUILT_IN_FFS:
11639 case BUILT_IN_FFSIMAX:
11640 case BUILT_IN_FFSL:
11641 case BUILT_IN_FFSLL:
11642 case BUILT_IN_IMAXABS:
11643 case BUILT_IN_FINITE:
11644 case BUILT_IN_FINITEF:
11645 case BUILT_IN_FINITEL:
11646 case BUILT_IN_FINITED32:
11647 case BUILT_IN_FINITED64:
11648 case BUILT_IN_FINITED128:
11649 case BUILT_IN_FPCLASSIFY:
11650 case BUILT_IN_ISFINITE:
11651 case BUILT_IN_ISINF_SIGN:
11652 case BUILT_IN_ISINF:
11653 case BUILT_IN_ISINFF:
11654 case BUILT_IN_ISINFL:
11655 case BUILT_IN_ISINFD32:
11656 case BUILT_IN_ISINFD64:
11657 case BUILT_IN_ISINFD128:
11658 case BUILT_IN_ISNAN:
11659 case BUILT_IN_ISNANF:
11660 case BUILT_IN_ISNANL:
11661 case BUILT_IN_ISNAND32:
11662 case BUILT_IN_ISNAND64:
11663 case BUILT_IN_ISNAND128:
11664 case BUILT_IN_ISNORMAL:
11665 case BUILT_IN_ISGREATER:
11666 case BUILT_IN_ISGREATEREQUAL:
11667 case BUILT_IN_ISLESS:
11668 case BUILT_IN_ISLESSEQUAL:
11669 case BUILT_IN_ISLESSGREATER:
11670 case BUILT_IN_ISUNORDERED:
11671 case BUILT_IN_VA_ARG_PACK:
11672 case BUILT_IN_VA_ARG_PACK_LEN:
11673 case BUILT_IN_VA_COPY:
11674 case BUILT_IN_TRAP:
11675 case BUILT_IN_SAVEREGS:
11676 case BUILT_IN_POPCOUNTL:
11677 case BUILT_IN_POPCOUNTLL:
11678 case BUILT_IN_POPCOUNTIMAX:
11679 case BUILT_IN_POPCOUNT:
11680 case BUILT_IN_PARITYL:
11681 case BUILT_IN_PARITYLL:
11682 case BUILT_IN_PARITYIMAX:
11683 case BUILT_IN_PARITY:
11684 case BUILT_IN_LABS:
11685 case BUILT_IN_LLABS:
11686 case BUILT_IN_PREFETCH:
41dbbb37 11687 case BUILT_IN_ACC_ON_DEVICE:
bec922f0
SL
11688 return true;
11689
11690 default:
11691 return is_simple_builtin (decl);
11692 }
11693
11694 return false;
11695}
488c6247
ML
11696
11697/* Return true if T is a constant and the value cast to a target char
11698 can be represented by a host char.
11699 Store the casted char constant in *P if so. */
11700
11701bool
11702target_char_cst_p (tree t, char *p)
11703{
11704 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11705 return false;
11706
11707 *p = (char)tree_to_uhwi (t);
11708 return true;
11709}
5747e0c0
XHL
11710
11711/* Return true if the builtin DECL is implemented in a standard library.
11712 Otherwise returns false which doesn't guarantee it is not (thus the list of
11713 handled builtins below may be incomplete). */
11714
11715bool
11716builtin_with_linkage_p (tree decl)
11717{
11718 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11719 switch (DECL_FUNCTION_CODE (decl))
11720 {
11721 CASE_FLT_FN (BUILT_IN_ACOS):
11722 CASE_FLT_FN (BUILT_IN_ACOSH):
11723 CASE_FLT_FN (BUILT_IN_ASIN):
11724 CASE_FLT_FN (BUILT_IN_ASINH):
11725 CASE_FLT_FN (BUILT_IN_ATAN):
11726 CASE_FLT_FN (BUILT_IN_ATANH):
11727 CASE_FLT_FN (BUILT_IN_ATAN2):
11728 CASE_FLT_FN (BUILT_IN_CBRT):
11729 CASE_FLT_FN (BUILT_IN_CEIL):
11730 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11731 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11732 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11733 CASE_FLT_FN (BUILT_IN_COS):
11734 CASE_FLT_FN (BUILT_IN_COSH):
11735 CASE_FLT_FN (BUILT_IN_ERF):
11736 CASE_FLT_FN (BUILT_IN_ERFC):
11737 CASE_FLT_FN (BUILT_IN_EXP):
11738 CASE_FLT_FN (BUILT_IN_EXP2):
11739 CASE_FLT_FN (BUILT_IN_EXPM1):
11740 CASE_FLT_FN (BUILT_IN_FABS):
11741 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11742 CASE_FLT_FN (BUILT_IN_FDIM):
11743 CASE_FLT_FN (BUILT_IN_FLOOR):
11744 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11745 CASE_FLT_FN (BUILT_IN_FMA):
11746 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11747 CASE_FLT_FN (BUILT_IN_FMAX):
11748 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11749 CASE_FLT_FN (BUILT_IN_FMIN):
11750 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11751 CASE_FLT_FN (BUILT_IN_FMOD):
11752 CASE_FLT_FN (BUILT_IN_FREXP):
11753 CASE_FLT_FN (BUILT_IN_HYPOT):
11754 CASE_FLT_FN (BUILT_IN_ILOGB):
11755 CASE_FLT_FN (BUILT_IN_LDEXP):
11756 CASE_FLT_FN (BUILT_IN_LGAMMA):
11757 CASE_FLT_FN (BUILT_IN_LLRINT):
11758 CASE_FLT_FN (BUILT_IN_LLROUND):
11759 CASE_FLT_FN (BUILT_IN_LOG):
11760 CASE_FLT_FN (BUILT_IN_LOG10):
11761 CASE_FLT_FN (BUILT_IN_LOG1P):
11762 CASE_FLT_FN (BUILT_IN_LOG2):
11763 CASE_FLT_FN (BUILT_IN_LOGB):
11764 CASE_FLT_FN (BUILT_IN_LRINT):
11765 CASE_FLT_FN (BUILT_IN_LROUND):
11766 CASE_FLT_FN (BUILT_IN_MODF):
11767 CASE_FLT_FN (BUILT_IN_NAN):
11768 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11769 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11770 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11771 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11772 CASE_FLT_FN (BUILT_IN_POW):
11773 CASE_FLT_FN (BUILT_IN_REMAINDER):
11774 CASE_FLT_FN (BUILT_IN_REMQUO):
11775 CASE_FLT_FN (BUILT_IN_RINT):
11776 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11777 CASE_FLT_FN (BUILT_IN_ROUND):
11778 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11779 CASE_FLT_FN (BUILT_IN_SCALBLN):
11780 CASE_FLT_FN (BUILT_IN_SCALBN):
11781 CASE_FLT_FN (BUILT_IN_SIN):
11782 CASE_FLT_FN (BUILT_IN_SINH):
11783 CASE_FLT_FN (BUILT_IN_SINCOS):
11784 CASE_FLT_FN (BUILT_IN_SQRT):
11785 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11786 CASE_FLT_FN (BUILT_IN_TAN):
11787 CASE_FLT_FN (BUILT_IN_TANH):
11788 CASE_FLT_FN (BUILT_IN_TGAMMA):
11789 CASE_FLT_FN (BUILT_IN_TRUNC):
11790 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11791 return true;
11792 default:
11793 break;
11794 }
11795 return false;
11796}