]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
Update copyright years.
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
fbd26352 2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
7c2ecb89 20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
53800dbe 24#include "config.h"
25#include "system.h"
805e22b2 26#include "coretypes.h"
9ef16211 27#include "backend.h"
7c29e30e 28#include "target.h"
29#include "rtl.h"
9ef16211 30#include "tree.h"
ea36272b 31#include "memmodel.h"
9ef16211 32#include "gimple.h"
7c29e30e 33#include "predict.h"
a950155e 34#include "params.h"
7c29e30e 35#include "tm_p.h"
36#include "stringpool.h"
c296f633 37#include "tree-vrp.h"
7c29e30e 38#include "tree-ssanames.h"
39#include "expmed.h"
40#include "optabs.h"
7c29e30e 41#include "emit-rtl.h"
42#include "recog.h"
7c29e30e 43#include "diagnostic-core.h"
b20a8bb4 44#include "alias.h"
b20a8bb4 45#include "fold-const.h"
6c21be92 46#include "fold-const-call.h"
e6a18b5a 47#include "gimple-ssa-warn-restrict.h"
9ed99284 48#include "stor-layout.h"
49#include "calls.h"
50#include "varasm.h"
51#include "tree-object-size.h"
dae0b5cb 52#include "realmpfr.h"
94ea8568 53#include "cfgrtl.h"
53800dbe 54#include "except.h"
d53441c8 55#include "dojump.h"
56#include "explow.h"
d53441c8 57#include "stmt.h"
53800dbe 58#include "expr.h"
d8fc4d0b 59#include "libfuncs.h"
53800dbe 60#include "output.h"
61#include "typeclass.h"
63c62881 62#include "langhooks.h"
162719b3 63#include "value-prof.h"
3b9c3a16 64#include "builtins.h"
30a86690 65#include "stringpool.h"
66#include "attribs.h"
f9acf11a 67#include "asan.h"
1f24b8e9 68#include "internal-fn.h"
e3240774 69#include "case-cfn-macros.h"
732905bb 70#include "gimple-fold.h"
5aef8938 71#include "intl.h"
859b51f8 72#include "file-prefix-map.h" /* remap_macro_filename() */
a7babc1e 73#include "gomp-constants.h"
74#include "omp-general.h"
5383fb56 75
3b9c3a16 76struct target_builtins default_target_builtins;
77#if SWITCHABLE_TARGET
78struct target_builtins *this_target_builtins = &default_target_builtins;
79#endif
80
ab7943b9 81/* Define the names of the builtin function types and codes. */
96423453 82const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84
9cfddb70 85#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 86const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 87{
88#include "builtins.def"
89};
ab7943b9 90
cffdfb3d 91/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 92 initialized to NULL_TREE. */
cffdfb3d 93builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 94
0b049e15 95/* Non-zero if __builtin_constant_p should be folded right away. */
96bool force_folding_builtin_constant_p;
97
f77c4496 98static rtx c_readstr (const char *, scalar_int_mode);
aecda0d6 99static int target_char_cast (tree, char *);
d8ae1baa 100static rtx get_memory_rtx (tree, tree);
aecda0d6 101static int apply_args_size (void);
102static int apply_result_size (void);
aecda0d6 103static rtx result_vector (int, rtx);
aecda0d6 104static void expand_builtin_prefetch (tree);
105static rtx expand_builtin_apply_args (void);
106static rtx expand_builtin_apply_args_1 (void);
107static rtx expand_builtin_apply (rtx, rtx, rtx);
108static void expand_builtin_return (rtx);
109static enum type_class type_to_class (tree);
110static rtx expand_builtin_classify_type (tree);
6b43bae4 111static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 112static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 113static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 114static rtx expand_builtin_sincos (tree);
f97eea22 115static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 116static rtx expand_builtin_int_roundingfn (tree, rtx);
117static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 118static rtx expand_builtin_next_arg (void);
aecda0d6 119static rtx expand_builtin_va_start (tree);
120static rtx expand_builtin_va_end (tree);
121static rtx expand_builtin_va_copy (tree);
0dbefa15 122static rtx inline_expand_builtin_string_cmp (tree, rtx);
a65c4d64 123static rtx expand_builtin_strcmp (tree, rtx);
3754d046 124static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
f77c4496 125static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
8d6c6ef5 126static rtx expand_builtin_memchr (tree, rtx);
a65c4d64 127static rtx expand_builtin_memcpy (tree, rtx);
d0fbba1a 128static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
02aa6d73 129 rtx target, tree exp,
130 memop_ret retmode);
4d317237 131static rtx expand_builtin_memmove (tree, rtx);
d0fbba1a 132static rtx expand_builtin_mempcpy (tree, rtx);
02aa6d73 133static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
5aef8938 134static rtx expand_builtin_strcat (tree, rtx);
a65c4d64 135static rtx expand_builtin_strcpy (tree, rtx);
a788aa5f 136static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
3754d046 137static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
4d317237 138static rtx expand_builtin_stpncpy (tree, rtx);
5aef8938 139static rtx expand_builtin_strncat (tree, rtx);
a65c4d64 140static rtx expand_builtin_strncpy (tree, rtx);
f77c4496 141static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
3754d046 142static rtx expand_builtin_memset (tree, rtx, machine_mode);
143static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 144static rtx expand_builtin_bzero (tree);
3754d046 145static rtx expand_builtin_strlen (tree, rtx, machine_mode);
864bd5de 146static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
2b29cc6a 147static rtx expand_builtin_alloca (tree);
3754d046 148static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 149static rtx expand_builtin_frame_address (tree, tree);
389dd41b 150static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 151static rtx expand_builtin_expect (tree, rtx);
01107f42 152static rtx expand_builtin_expect_with_probability (tree, rtx);
aecda0d6 153static tree fold_builtin_constant_p (tree);
154static tree fold_builtin_classify_type (tree);
c7cbde74 155static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 156static tree fold_builtin_inf (location_t, tree, int);
389dd41b 157static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
184fac50 158static bool validate_arg (const_tree, enum tree_code code);
aecda0d6 159static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 160static rtx expand_builtin_signbit (tree, rtx);
389dd41b 161static tree fold_builtin_memcmp (location_t, tree, tree, tree);
389dd41b 162static tree fold_builtin_isascii (location_t, tree);
163static tree fold_builtin_toascii (location_t, tree);
164static tree fold_builtin_isdigit (location_t, tree);
165static tree fold_builtin_fabs (location_t, tree, tree);
166static tree fold_builtin_abs (location_t, tree, tree);
167static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 168 enum tree_code);
e80cc485 169static tree fold_builtin_0 (location_t, tree);
170static tree fold_builtin_1 (location_t, tree, tree);
171static tree fold_builtin_2 (location_t, tree, tree, tree);
172static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
12f08300 173static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 174
175static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
389dd41b 176static tree fold_builtin_strspn (location_t, tree, tree);
177static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 178
0a39fd54 179static rtx expand_builtin_object_size (tree);
3754d046 180static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 181 enum built_in_function);
182static void maybe_emit_chk_warning (tree, enum built_in_function);
183static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 184static void maybe_emit_free_warning (tree);
c2f47e15 185static tree fold_builtin_object_size (tree, tree);
99eabcc1 186
e788f202 187unsigned HOST_WIDE_INT target_newline;
b9ea678c 188unsigned HOST_WIDE_INT target_percent;
99eabcc1 189static unsigned HOST_WIDE_INT target_c;
190static unsigned HOST_WIDE_INT target_s;
aea88c77 191char target_percent_c[3];
b9ea678c 192char target_percent_s[3];
e788f202 193char target_percent_s_newline[4];
e5407ca6 194static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 195static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 196static void expand_builtin_sync_synchronize (void);
0a39fd54 197
7bfefa9d 198/* Return true if NAME starts with __builtin_ or __sync_. */
199
b29139ad 200static bool
1c47b3e8 201is_builtin_name (const char *name)
b6a5fc45 202{
b6a5fc45 203 if (strncmp (name, "__builtin_", 10) == 0)
204 return true;
205 if (strncmp (name, "__sync_", 7) == 0)
206 return true;
1cd6e20d 207 if (strncmp (name, "__atomic_", 9) == 0)
208 return true;
b6a5fc45 209 return false;
210}
4ee9c684 211
1c47b3e8 212/* Return true if NODE should be considered for inline expansion regardless
213 of the optimization level. This means whenever a function is invoked with
214 its "internal" name, which normally contains the prefix "__builtin". */
215
ae62deea 216bool
1c47b3e8 217called_as_built_in (tree node)
218{
219 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
220 we want the name used to call the function, not the name it
221 will have. */
222 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
223 return is_builtin_name (name);
224}
225
ceea063b 226/* Compute values M and N such that M divides (address of EXP - N) and such
227 that N < M. If these numbers can be determined, store M in alignp and N in
228 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
229 *alignp and any bit-offset to *bitposp.
0d8f7716 230
231 Note that the address (and thus the alignment) computed here is based
232 on the address to which a symbol resolves, whereas DECL_ALIGN is based
233 on the address at which an object is actually located. These two
234 addresses are not always the same. For example, on ARM targets,
235 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 236 whereas foo() itself starts on an even address.
698537d1 237
3482bf13 238 If ADDR_P is true we are taking the address of the memory reference EXP
239 and thus cannot rely on the access taking place. */
240
241static bool
242get_object_alignment_2 (tree exp, unsigned int *alignp,
243 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 244{
eaa09bfd 245 poly_int64 bitsize, bitpos;
98ab9e8f 246 tree offset;
3754d046 247 machine_mode mode;
292237f3 248 int unsignedp, reversep, volatilep;
c8a2b4ff 249 unsigned int align = BITS_PER_UNIT;
ceea063b 250 bool known_alignment = false;
698537d1 251
98ab9e8f 252 /* Get the innermost object and the constant (bitpos) and possibly
253 variable (offset) offset of the access. */
292237f3 254 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
b3b6e4b5 255 &unsignedp, &reversep, &volatilep);
98ab9e8f 256
257 /* Extract alignment information from the innermost object and
258 possibly adjust bitpos and offset. */
3482bf13 259 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 260 {
3482bf13 261 /* Function addresses can encode extra information besides their
262 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
263 allows the low bit to be used as a virtual bit, we know
264 that the address itself must be at least 2-byte aligned. */
265 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
266 align = 2 * BITS_PER_UNIT;
0d8f7716 267 }
3482bf13 268 else if (TREE_CODE (exp) == LABEL_DECL)
269 ;
270 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 271 {
3482bf13 272 /* The alignment of a CONST_DECL is determined by its initializer. */
273 exp = DECL_INITIAL (exp);
98ab9e8f 274 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 275 if (CONSTANT_CLASS_P (exp))
579d67ba 276 align = targetm.constant_alignment (exp, align);
e532afed 277
3482bf13 278 known_alignment = true;
98ab9e8f 279 }
3482bf13 280 else if (DECL_P (exp))
ceea063b 281 {
3482bf13 282 align = DECL_ALIGN (exp);
ceea063b 283 known_alignment = true;
ceea063b 284 }
3482bf13 285 else if (TREE_CODE (exp) == INDIRECT_REF
286 || TREE_CODE (exp) == MEM_REF
287 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 288 {
289 tree addr = TREE_OPERAND (exp, 0);
ceea063b 290 unsigned ptr_align;
291 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 292 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 293
ab1e78e5 294 /* If the address is explicitely aligned, handle that. */
98ab9e8f 295 if (TREE_CODE (addr) == BIT_AND_EXPR
296 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
297 {
ab1e78e5 298 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
299 ptr_bitmask *= BITS_PER_UNIT;
ac29ece2 300 align = least_bit_hwi (ptr_bitmask);
98ab9e8f 301 addr = TREE_OPERAND (addr, 0);
302 }
ceea063b 303
3482bf13 304 known_alignment
305 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 306 align = MAX (ptr_align, align);
307
ab1e78e5 308 /* Re-apply explicit alignment to the bitpos. */
309 ptr_bitpos &= ptr_bitmask;
310
4083990a 311 /* The alignment of the pointer operand in a TARGET_MEM_REF
312 has to take the variable offset parts into account. */
3482bf13 313 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 314 {
3482bf13 315 if (TMR_INDEX (exp))
316 {
317 unsigned HOST_WIDE_INT step = 1;
318 if (TMR_STEP (exp))
f9ae6f95 319 step = TREE_INT_CST_LOW (TMR_STEP (exp));
ac29ece2 320 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
3482bf13 321 }
322 if (TMR_INDEX2 (exp))
323 align = BITS_PER_UNIT;
324 known_alignment = false;
153c3b50 325 }
ceea063b 326
3482bf13 327 /* When EXP is an actual memory reference then we can use
328 TYPE_ALIGN of a pointer indirection to derive alignment.
329 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 330 alignment knowledge and if using that alignment would
331 improve the situation. */
700a9760 332 unsigned int talign;
4083990a 333 if (!addr_p && !known_alignment
700a9760 334 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
335 && talign > align)
336 align = talign;
4083990a 337 else
338 {
339 /* Else adjust bitpos accordingly. */
340 bitpos += ptr_bitpos;
341 if (TREE_CODE (exp) == MEM_REF
342 || TREE_CODE (exp) == TARGET_MEM_REF)
90ca1268 343 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
4083990a 344 }
98ab9e8f 345 }
3482bf13 346 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 347 {
3482bf13 348 /* STRING_CST are the only constant objects we allow to be not
349 wrapped inside a CONST_DECL. */
350 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 351 if (CONSTANT_CLASS_P (exp))
579d67ba 352 align = targetm.constant_alignment (exp, align);
e532afed 353
3482bf13 354 known_alignment = true;
98ab9e8f 355 }
98ab9e8f 356
357 /* If there is a non-constant offset part extract the maximum
358 alignment that can prevail. */
c8a2b4ff 359 if (offset)
98ab9e8f 360 {
ad464c56 361 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 362 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 363 {
c8a2b4ff 364 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
365 if (inner)
366 align = MIN (align, inner);
98ab9e8f 367 }
98ab9e8f 368 }
369
eaa09bfd 370 /* Account for the alignment of runtime coefficients, so that the constant
371 bitpos is guaranteed to be accurate. */
372 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
373 if (alt_align != 0 && alt_align < align)
374 {
375 align = alt_align;
376 known_alignment = false;
377 }
378
3482bf13 379 *alignp = align;
eaa09bfd 380 *bitposp = bitpos.coeffs[0] & (align - 1);
ceea063b 381 return known_alignment;
0c883ef3 382}
383
3482bf13 384/* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
388
389bool
390get_object_alignment_1 (tree exp, unsigned int *alignp,
391 unsigned HOST_WIDE_INT *bitposp)
392{
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
394}
395
957d0361 396/* Return the alignment in bits of EXP, an object. */
0c883ef3 397
398unsigned int
957d0361 399get_object_alignment (tree exp)
0c883ef3 400{
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
403
ceea063b 404 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 405
98ab9e8f 406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
408
409 if (bitpos != 0)
ac29ece2 410 align = least_bit_hwi (bitpos);
957d0361 411 return align;
698537d1 412}
413
ceea063b 414/* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
53800dbe 418
ceea063b 419 If EXP is not a pointer, false is returned too. */
53800dbe 420
ceea063b 421bool
422get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
53800dbe 424{
153c3b50 425 STRIP_NOPS (exp);
535e2026 426
153c3b50 427 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
906a9403 430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 {
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
439 {
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
442 {
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
446 }
447 }
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
451 }
153c3b50 452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 454 {
ceea063b 455 unsigned int ptr_align, ptr_misalign;
153c3b50 456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 457
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 {
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
d10da77a 462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
3482bf13 466 /* We cannot really tell whether this result is an approximation. */
b428654a 467 return false;
ceea063b 468 }
469 else
69fbc3aa 470 {
471 *bitposp = 0;
ceea063b 472 *alignp = BITS_PER_UNIT;
473 return false;
69fbc3aa 474 }
53800dbe 475 }
0bb8b39a 476 else if (TREE_CODE (exp) == INTEGER_CST)
477 {
478 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
482 }
153c3b50 483
69fbc3aa 484 *bitposp = 0;
ceea063b 485 *alignp = BITS_PER_UNIT;
486 return false;
53800dbe 487}
488
69fbc3aa 489/* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
495
496unsigned int
497get_pointer_alignment (tree exp)
498{
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
ceea063b 501
502 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 503
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
506
507 if (bitpos != 0)
ac29ece2 508 align = least_bit_hwi (bitpos);
69fbc3aa 509
510 return align;
511}
512
c4183f31 513/* Return the number of leading non-zero elements in the sequence
c62d63d4 514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516
c4183f31 517unsigned
c62d63d4 518string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519{
520 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521
522 unsigned n;
523
524 if (eltsize == 1)
525 {
526 /* Optimize the common case of plain char. */
527 for (n = 0; n < maxelts; n++)
528 {
529 const char *elt = (const char*) ptr + n;
530 if (!*elt)
531 break;
532 }
533 }
534 else
535 {
536 for (n = 0; n < maxelts; n++)
537 {
538 const char *elt = (const char*) ptr + n * eltsize;
539 if (!memcmp (elt, "\0\0\0\0", eltsize))
540 break;
541 }
542 }
543 return n;
544}
545
7af57b1c 546/* For a call at LOC to a function FN that expects a string in the argument
547 ARG, issue a diagnostic due to it being a called with an argument
548 declared at NONSTR that is a character array with no terminating NUL. */
549
550void
551warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
552{
553 if (TREE_NO_WARNING (arg))
554 return;
555
556 loc = expansion_point_location_if_in_system_header (loc);
557
558 if (warning_at (loc, OPT_Wstringop_overflow_,
559 "%qs argument missing terminating nul", fn))
560 {
561 inform (DECL_SOURCE_LOCATION (decl),
562 "referenced argument declared here");
563 TREE_NO_WARNING (arg) = 1;
564 }
565}
566
a788aa5f 567/* If EXP refers to an unterminated constant character array return
568 the declaration of the object of which the array is a member or
fec27bf2 569 element and if SIZE is not null, set *SIZE to the size of
570 the unterminated array and set *EXACT if the size is exact or
571 clear it otherwise. Otherwise return null. */
a788aa5f 572
50e57712 573tree
fec27bf2 574unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
a788aa5f 575{
fec27bf2 576 /* C_STRLEN will return NULL and set DECL in the info
577 structure if EXP references a unterminated array. */
98d5ba5d 578 c_strlen_data lendata = { };
579 tree len = c_strlen (exp, 1, &lendata);
6f7fa01f 580 if (len == NULL_TREE && lendata.minlen && lendata.decl)
fec27bf2 581 {
582 if (size)
583 {
6f7fa01f 584 len = lendata.minlen;
98d5ba5d 585 if (lendata.off)
fec27bf2 586 {
98d5ba5d 587 /* Constant offsets are already accounted for in LENDATA.MINLEN,
588 but not in a SSA_NAME + CST expression. */
589 if (TREE_CODE (lendata.off) == INTEGER_CST)
fec27bf2 590 *exact = true;
98d5ba5d 591 else if (TREE_CODE (lendata.off) == PLUS_EXPR
592 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
fec27bf2 593 {
594 /* Subtract the offset from the size of the array. */
595 *exact = false;
98d5ba5d 596 tree temp = TREE_OPERAND (lendata.off, 1);
fec27bf2 597 temp = fold_convert (ssizetype, temp);
598 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
599 }
600 else
601 *exact = false;
602 }
603 else
604 *exact = true;
605
606 *size = len;
607 }
98d5ba5d 608 return lendata.decl;
fec27bf2 609 }
610
611 return NULL_TREE;
a788aa5f 612}
613
c62d63d4 614/* Compute the length of a null-terminated character string or wide
615 character string handling character sizes of 1, 2, and 4 bytes.
616 TREE_STRING_LENGTH is not the right way because it evaluates to
617 the size of the character array in bytes (as opposed to characters)
618 and because it can contain a zero byte in the middle.
53800dbe 619
4172d65e 620 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 621 into the instruction stream and zero if it is going to be expanded.
4172d65e 622 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 623 is returned, otherwise NULL, since
624 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
625 evaluate the side-effects.
626
6bda159e 627 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
628 accesses. Note that this implies the result is not going to be emitted
629 into the instruction stream.
630
2b84b289 631 Additional information about the string accessed may be recorded
632 in DATA. For example, if SRC references an unterminated string,
633 then the declaration will be stored in the DECL field. If the
634 length of the unterminated string can be determined, it'll be
635 stored in the LEN field. Note this length could well be different
636 than what a C strlen call would return.
7af57b1c 637
893c4605 638 ELTSIZE is 1 for normal single byte character strings, and 2 or
639 4 for wide characer strings. ELTSIZE is by default 1.
902de8ed 640
893c4605 641 The value returned is of type `ssizetype'. */
53800dbe 642
4ee9c684 643tree
2b84b289 644c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
53800dbe 645{
2b84b289 646 /* If we were not passed a DATA pointer, then get one to a local
647 structure. That avoids having to check DATA for NULL before
648 each time we want to use it. */
7d2853d1 649 c_strlen_data local_strlen_data = { };
2b84b289 650 if (!data)
651 data = &local_strlen_data;
652
2a0aa722 653 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
681fab1e 654 STRIP_NOPS (src);
655 if (TREE_CODE (src) == COND_EXPR
656 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
657 {
658 tree len1, len2;
659
2b84b289 660 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
661 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
0862b7e9 662 if (tree_int_cst_equal (len1, len2))
681fab1e 663 return len1;
664 }
665
666 if (TREE_CODE (src) == COMPOUND_EXPR
667 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
2b84b289 668 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
681fab1e 669
c62d63d4 670 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 671
c62d63d4 672 /* Offset from the beginning of the string in bytes. */
673 tree byteoff;
893c4605 674 tree memsize;
7af57b1c 675 tree decl;
676 src = string_constant (src, &byteoff, &memsize, &decl);
53800dbe 677 if (src == 0)
c2f47e15 678 return NULL_TREE;
902de8ed 679
c62d63d4 680 /* Determine the size of the string element. */
893c4605 681 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
682 return NULL_TREE;
c62d63d4 683
684 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
47d2cd73 685 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
c4183f31 686 in case the latter is less than the size of the array, such as when
687 SRC refers to a short string literal used to initialize a large array.
688 In that case, the elements of the array after the terminating NUL are
689 all NUL. */
690 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
9b7116a1 691 strelts = strelts / eltsize;
c4183f31 692
893c4605 693 if (!tree_fits_uhwi_p (memsize))
694 return NULL_TREE;
695
9b7116a1 696 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
c62d63d4 697
698 /* PTR can point to the byte representation of any string type, including
699 char* and wchar_t*. */
700 const char *ptr = TREE_STRING_POINTER (src);
902de8ed 701
c62d63d4 702 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
53800dbe 703 {
893c4605 704 /* The code below works only for single byte character types. */
705 if (eltsize != 1)
706 return NULL_TREE;
707
c4183f31 708 /* If the string has an internal NUL character followed by any
709 non-NUL characters (e.g., "foo\0bar"), we can't compute
710 the offset to the following NUL if we don't know where to
53800dbe 711 start searching for it. */
c4183f31 712 unsigned len = string_length (ptr, eltsize, strelts);
902de8ed 713
2b84b289 714 /* Return when an embedded null character is found or none at all.
715 In the latter case, set the DECL/LEN field in the DATA structure
716 so that callers may examine them. */
7af57b1c 717 if (len + 1 < strelts)
893c4605 718 return NULL_TREE;
7af57b1c 719 else if (len >= maxelts)
720 {
2b84b289 721 data->decl = decl;
fec27bf2 722 data->off = byteoff;
6f7fa01f 723 data->minlen = ssize_int (len);
7af57b1c 724 return NULL_TREE;
725 }
d5d661d5 726
9b7116a1 727 /* For empty strings the result should be zero. */
728 if (len == 0)
729 return ssize_int (0);
730
53800dbe 731 /* We don't know the starting offset, but we do know that the string
c4183f31 732 has no internal zero bytes. If the offset falls within the bounds
733 of the string subtract the offset from the length of the string,
734 and return that. Otherwise the length is zero. Take care to
735 use SAVE_EXPR in case the OFFSET has side-effects. */
f90ef0b2 736 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
737 : byteoff;
738 offsave = fold_convert_loc (loc, sizetype, offsave);
c4183f31 739 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
f90ef0b2 740 size_int (len));
741 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
742 offsave);
743 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
c4183f31 744 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
745 build_zero_cst (ssizetype));
53800dbe 746 }
747
c62d63d4 748 /* Offset from the beginning of the string in elements. */
749 HOST_WIDE_INT eltoff;
750
53800dbe 751 /* We have a known offset into the string. Start searching there for
27d0c333 752 a null character if we can represent it as a single HOST_WIDE_INT. */
c62d63d4 753 if (byteoff == 0)
754 eltoff = 0;
2a0aa722 755 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
c62d63d4 756 eltoff = -1;
53800dbe 757 else
2a0aa722 758 eltoff = tree_to_uhwi (byteoff) / eltsize;
902de8ed 759
1f63a7d6 760 /* If the offset is known to be out of bounds, warn, and call strlen at
761 runtime. */
9b7116a1 762 if (eltoff < 0 || eltoff >= maxelts)
53800dbe 763 {
1f63a7d6 764 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 765 if (only_value != 2
766 && !TREE_NO_WARNING (src))
1f63a7d6 767 {
d5d661d5 768 warning_at (loc, OPT_Warray_bounds,
769 "offset %qwi outside bounds of constant string",
c62d63d4 770 eltoff);
1f63a7d6 771 TREE_NO_WARNING (src) = 1;
772 }
c2f47e15 773 return NULL_TREE;
53800dbe 774 }
902de8ed 775
893c4605 776 /* If eltoff is larger than strelts but less than maxelts the
777 string length is zero, since the excess memory will be zero. */
778 if (eltoff > strelts)
779 return ssize_int (0);
780
53800dbe 781 /* Use strlen to search for the first zero byte. Since any strings
782 constructed with build_string will have nulls appended, we win even
783 if we get handed something like (char[4])"abcd".
784
c62d63d4 785 Since ELTOFF is our starting index into the string, no further
53800dbe 786 calculation is needed. */
c62d63d4 787 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
893c4605 788 strelts - eltoff);
c62d63d4 789
9b7116a1 790 /* Don't know what to return if there was no zero termination.
2b84b289 791 Ideally this would turn into a gcc_checking_assert over time.
792 Set DECL/LEN so callers can examine them. */
9b7116a1 793 if (len >= maxelts - eltoff)
7af57b1c 794 {
2b84b289 795 data->decl = decl;
fec27bf2 796 data->off = byteoff;
6f7fa01f 797 data->minlen = ssize_int (len);
7af57b1c 798 return NULL_TREE;
799 }
2a0aa722 800
c62d63d4 801 return ssize_int (len);
53800dbe 802}
803
e913b5cd 804/* Return a constant integer corresponding to target reading
8c85fcb7 805 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 806
6840589f 807static rtx
f77c4496 808c_readstr (const char *str, scalar_int_mode mode)
6840589f 809{
6840589f 810 HOST_WIDE_INT ch;
811 unsigned int i, j;
e913b5cd 812 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 813
814 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 815 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
816 / HOST_BITS_PER_WIDE_INT;
817
a12aa4cc 818 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 819 for (i = 0; i < len; i++)
820 tmp[i] = 0;
6840589f 821
6840589f 822 ch = 1;
823 for (i = 0; i < GET_MODE_SIZE (mode); i++)
824 {
825 j = i;
826 if (WORDS_BIG_ENDIAN)
827 j = GET_MODE_SIZE (mode) - i - 1;
828 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 829 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 830 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
831 j *= BITS_PER_UNIT;
7d3f6cc7 832
6840589f 833 if (ch)
834 ch = (unsigned char) str[i];
e913b5cd 835 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 836 }
ddb1be65 837
ab2c1de8 838 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 839 return immed_wide_int_const (c, mode);
6840589f 840}
841
ecc318ff 842/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 843 host char type, return zero and put that value into variable pointed to by
ecc318ff 844 P. */
845
846static int
aecda0d6 847target_char_cast (tree cst, char *p)
ecc318ff 848{
849 unsigned HOST_WIDE_INT val, hostval;
850
c19686c5 851 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 852 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
853 return 1;
854
e913b5cd 855 /* Do not care if it fits or not right here. */
f9ae6f95 856 val = TREE_INT_CST_LOW (cst);
e913b5cd 857
ecc318ff 858 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
edc19fd0 859 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ecc318ff 860
861 hostval = val;
862 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
edc19fd0 863 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ecc318ff 864
865 if (val != hostval)
866 return 1;
867
868 *p = hostval;
869 return 0;
870}
871
4ee9c684 872/* Similar to save_expr, but assumes that arbitrary code is not executed
873 in between the multiple evaluations. In particular, we assume that a
874 non-addressable local variable will not be modified. */
875
876static tree
877builtin_save_expr (tree exp)
878{
f6c35aa4 879 if (TREE_CODE (exp) == SSA_NAME
880 || (TREE_ADDRESSABLE (exp) == 0
881 && (TREE_CODE (exp) == PARM_DECL
53e9c5c4 882 || (VAR_P (exp) && !TREE_STATIC (exp)))))
4ee9c684 883 return exp;
884
885 return save_expr (exp);
886}
887
53800dbe 888/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
889 times to get the address of either a higher stack frame, or a return
890 address located within it (depending on FNDECL_CODE). */
902de8ed 891
c626df3d 892static rtx
869d0ef0 893expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 894{
895 int i;
869d0ef0 896 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
3f840859 897 if (tem == NULL_RTX)
e3e15c50 898 {
3f840859 899 /* For a zero count with __builtin_return_address, we don't care what
900 frame address we return, because target-specific definitions will
901 override us. Therefore frame pointer elimination is OK, and using
902 the soft frame pointer is OK.
903
904 For a nonzero count, or a zero count with __builtin_frame_address,
905 we require a stable offset from the current frame pointer to the
906 previous one, so we must use the hard frame pointer, and
907 we must disable frame pointer elimination. */
908 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
909 tem = frame_pointer_rtx;
910 else
911 {
912 tem = hard_frame_pointer_rtx;
e3e15c50 913
3f840859 914 /* Tell reload not to eliminate the frame pointer. */
915 crtl->accesses_prior_frames = 1;
916 }
e3e15c50 917 }
869d0ef0 918
53800dbe 919 if (count > 0)
920 SETUP_FRAME_ADDRESSES ();
53800dbe 921
3a69c60c 922 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 923 register. There is no way to access it off of the current frame
924 pointer, but it can be accessed off the previous frame pointer by
925 reading the value from the register window save area. */
a26d6c60 926 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 927 count--;
53800dbe 928
929 /* Scan back COUNT frames to the specified frame. */
930 for (i = 0; i < count; i++)
931 {
932 /* Assume the dynamic chain pointer is in the word that the
933 frame address points to, unless otherwise specified. */
53800dbe 934 tem = DYNAMIC_CHAIN_ADDRESS (tem);
53800dbe 935 tem = memory_address (Pmode, tem);
00060fc2 936 tem = gen_frame_mem (Pmode, tem);
83fc1478 937 tem = copy_to_reg (tem);
53800dbe 938 }
939
3a69c60c 940 /* For __builtin_frame_address, return what we've got. But, on
941 the SPARC for example, we may have to add a bias. */
53800dbe 942 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 943 return FRAME_ADDR_RTX (tem);
53800dbe 944
3a69c60c 945 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 946#ifdef RETURN_ADDR_RTX
947 tem = RETURN_ADDR_RTX (count, tem);
948#else
949 tem = memory_address (Pmode,
29c05e22 950 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 951 tem = gen_frame_mem (Pmode, tem);
53800dbe 952#endif
953 return tem;
954}
955
f7c44134 956/* Alias set used for setjmp buffer. */
32c2fdea 957static alias_set_type setjmp_alias_set = -1;
f7c44134 958
6b7f6858 959/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 960 return to RECEIVER_LABEL. This is also called directly by the SJLJ
961 exception handling code. */
53800dbe 962
6b7f6858 963void
aecda0d6 964expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 965{
3754d046 966 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 967 rtx stack_save;
f7c44134 968 rtx mem;
53800dbe 969
f7c44134 970 if (setjmp_alias_set == -1)
971 setjmp_alias_set = new_alias_set ();
972
85d654dd 973 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 974
37ae8504 975 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 976
6b7f6858 977 /* We store the frame pointer and the address of receiver_label in
978 the buffer and use the rest of it for the stack save area, which
979 is machine-dependent. */
53800dbe 980
f7c44134 981 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 982 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 983 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 984
29c05e22 985 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
986 GET_MODE_SIZE (Pmode))),
ab6ab77e 987 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 988
989 emit_move_insn (validize_mem (mem),
6b7f6858 990 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 991
992 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 993 plus_constant (Pmode, buf_addr,
53800dbe 994 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 995 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 996 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 997
998 /* If there is further processing to do, do it. */
a3c81e61 999 if (targetm.have_builtin_setjmp_setup ())
1000 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
53800dbe 1001
29f09705 1002 /* We have a nonlocal label. */
18d50ae6 1003 cfun->has_nonlocal_label = 1;
6b7f6858 1004}
53800dbe 1005
2c8a1497 1006/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 1007 also called directly by the SJLJ exception handling code.
1008 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 1009
1010void
a3c81e61 1011expand_builtin_setjmp_receiver (rtx receiver_label)
6b7f6858 1012{
82c7907c 1013 rtx chain;
1014
4598ade9 1015 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 1016 marked as used by this function. */
18b42941 1017 emit_use (hard_frame_pointer_rtx);
53800dbe 1018
1019 /* Mark the static chain as clobbered here so life information
1020 doesn't get messed up for it. */
3c56e0c1 1021 chain = rtx_for_static_chain (current_function_decl, true);
82c7907c 1022 if (chain && REG_P (chain))
1023 emit_clobber (chain);
53800dbe 1024
1025 /* Now put in the code to restore the frame pointer, and argument
491e04ef 1026 pointer, if needed. */
a3c81e61 1027 if (! targetm.have_nonlocal_goto ())
62dcb5c8 1028 {
1029 /* First adjust our frame pointer to its actual value. It was
1030 previously set to the start of the virtual area corresponding to
1031 the stacked variables when we branched here and now needs to be
1032 adjusted to the actual hardware fp value.
1033
1034 Assignments to virtual registers are converted by
1035 instantiate_virtual_regs into the corresponding assignment
1036 to the underlying register (fp in this case) that makes
1037 the original assignment true.
1038 So the following insn will actually be decrementing fp by
8374586c 1039 TARGET_STARTING_FRAME_OFFSET. */
62dcb5c8 1040 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1041
1042 /* Restoring the frame pointer also modifies the hard frame pointer.
1043 Mark it used (so that the previous assignment remains live once
1044 the frame pointer is eliminated) and clobbered (to represent the
1045 implicit update from the assignment). */
1046 emit_use (hard_frame_pointer_rtx);
1047 emit_clobber (hard_frame_pointer_rtx);
1048 }
53800dbe 1049
a494b6d7 1050 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
53800dbe 1051 {
4598ade9 1052 /* If the argument pointer can be eliminated in favor of the
1053 frame pointer, we don't need to restore it. We assume here
1054 that if such an elimination is present, it can always be used.
1055 This is the case on all known machines; if we don't make this
1056 assumption, we do unnecessary saving on many machines. */
53800dbe 1057 size_t i;
e99c3a1d 1058 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 1059
3098b2d3 1060 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 1061 if (elim_regs[i].from == ARG_POINTER_REGNUM
1062 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1063 break;
1064
3098b2d3 1065 if (i == ARRAY_SIZE (elim_regs))
53800dbe 1066 {
1067 /* Now restore our arg pointer from the address at which it
05927e40 1068 was saved in our stack frame. */
27a7a23a 1069 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 1070 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 1071 }
1072 }
53800dbe 1073
a3c81e61 1074 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1075 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1076 else if (targetm.have_nonlocal_goto_receiver ())
1077 emit_insn (targetm.gen_nonlocal_goto_receiver ());
53800dbe 1078 else
a3c81e61 1079 { /* Nothing */ }
57f6bb94 1080
3072d30e 1081 /* We must not allow the code we just generated to be reordered by
1082 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 1083 happen immediately, not later. */
3072d30e 1084 emit_insn (gen_blockage ());
6b7f6858 1085}
53800dbe 1086
53800dbe 1087/* __builtin_longjmp is passed a pointer to an array of five words (not
1088 all will be used on all machines). It operates similarly to the C
1089 library function of the same name, but is more efficient. Much of
2c8a1497 1090 the code below is copied from the handling of non-local gotos. */
53800dbe 1091
c626df3d 1092static void
aecda0d6 1093expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 1094{
1e0c0b35 1095 rtx fp, lab, stack;
1096 rtx_insn *insn, *last;
3754d046 1097 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 1098
48e1416a 1099 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 1100 function */
1101 if (SUPPORTS_STACK_ALIGNMENT)
1102 crtl->need_drap = true;
1103
f7c44134 1104 if (setjmp_alias_set == -1)
1105 setjmp_alias_set = new_alias_set ();
1106
85d654dd 1107 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 1108
53800dbe 1109 buf_addr = force_reg (Pmode, buf_addr);
1110
82c7907c 1111 /* We require that the user must pass a second argument of 1, because
1112 that is what builtin_setjmp will return. */
64db345d 1113 gcc_assert (value == const1_rtx);
53800dbe 1114
4712c7d6 1115 last = get_last_insn ();
a3c81e61 1116 if (targetm.have_builtin_longjmp ())
1117 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
53800dbe 1118 else
53800dbe 1119 {
1120 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 1121 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 1122 GET_MODE_SIZE (Pmode)));
1123
29c05e22 1124 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 1125 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 1126 set_mem_alias_set (fp, setjmp_alias_set);
1127 set_mem_alias_set (lab, setjmp_alias_set);
1128 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 1129
1130 /* Pick up FP, label, and SP from the block and jump. This code is
1131 from expand_goto in stmt.c; see there for detailed comments. */
a3c81e61 1132 if (targetm.have_nonlocal_goto ())
53800dbe 1133 /* We have to pass a value to the nonlocal_goto pattern that will
1134 get copied into the static_chain pointer, but it does not matter
1135 what that value is, because builtin_setjmp does not use it. */
a3c81e61 1136 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 1137 else
53800dbe 1138 {
1139 lab = copy_to_reg (lab);
1140
18b42941 1141 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1142 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1143
5f337044 1144 /* Restore the frame pointer and stack pointer. We must use a
1145 temporary since the setjmp buffer may be a local. */
1146 fp = copy_to_reg (fp);
e9c97615 1147 emit_stack_restore (SAVE_NONLOCAL, stack);
5f337044 1148 emit_move_insn (hard_frame_pointer_rtx, fp);
53800dbe 1149
18b42941 1150 emit_use (hard_frame_pointer_rtx);
1151 emit_use (stack_pointer_rtx);
53800dbe 1152 emit_indirect_jump (lab);
1153 }
1154 }
615166bb 1155
1156 /* Search backwards and mark the jump insn as a non-local goto.
1157 Note that this precludes the use of __builtin_longjmp to a
1158 __builtin_setjmp target in the same function. However, we've
1159 already cautioned the user that these functions are for
1160 internal exception handling use only. */
449c0509 1161 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1162 {
64db345d 1163 gcc_assert (insn != last);
7d3f6cc7 1164
6d7dc5b9 1165 if (JUMP_P (insn))
449c0509 1166 {
a1ddb869 1167 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1168 break;
1169 }
6d7dc5b9 1170 else if (CALL_P (insn))
9342ee68 1171 break;
449c0509 1172 }
53800dbe 1173}
1174
0e80b01d 1175static inline bool
1176more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1177{
1178 return (iter->i < iter->n);
1179}
1180
1181/* This function validates the types of a function call argument list
1182 against a specified list of tree_codes. If the last specifier is a 0,
5cfa3fc8 1183 that represents an ellipsis, otherwise the last specifier must be a
0e80b01d 1184 VOID_TYPE. */
1185
1186static bool
1187validate_arglist (const_tree callexpr, ...)
1188{
1189 enum tree_code code;
1190 bool res = 0;
1191 va_list ap;
1192 const_call_expr_arg_iterator iter;
1193 const_tree arg;
1194
1195 va_start (ap, callexpr);
1196 init_const_call_expr_arg_iterator (callexpr, &iter);
1197
5cfa3fc8 1198 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
184fac50 1199 tree fn = CALL_EXPR_FN (callexpr);
1200 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
5cfa3fc8 1201
1202 for (unsigned argno = 1; ; ++argno)
0e80b01d 1203 {
1204 code = (enum tree_code) va_arg (ap, int);
5cfa3fc8 1205
0e80b01d 1206 switch (code)
1207 {
1208 case 0:
1209 /* This signifies an ellipses, any further arguments are all ok. */
1210 res = true;
1211 goto end;
1212 case VOID_TYPE:
1213 /* This signifies an endlink, if no arguments remain, return
1214 true, otherwise return false. */
1215 res = !more_const_call_expr_args_p (&iter);
1216 goto end;
5cfa3fc8 1217 case POINTER_TYPE:
1218 /* The actual argument must be nonnull when either the whole
1219 called function has been declared nonnull, or when the formal
1220 argument corresponding to the actual argument has been. */
184fac50 1221 if (argmap
1222 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1223 {
1224 arg = next_const_call_expr_arg (&iter);
1225 if (!validate_arg (arg, code) || integer_zerop (arg))
1226 goto end;
1227 break;
1228 }
5cfa3fc8 1229 /* FALLTHRU */
0e80b01d 1230 default:
1231 /* If no parameters remain or the parameter's code does not
1232 match the specified code, return false. Otherwise continue
1233 checking any remaining arguments. */
1234 arg = next_const_call_expr_arg (&iter);
184fac50 1235 if (!validate_arg (arg, code))
0e80b01d 1236 goto end;
1237 break;
1238 }
1239 }
0e80b01d 1240
1241 /* We need gotos here since we can only have one VA_CLOSE in a
1242 function. */
1243 end: ;
1244 va_end (ap);
1245
5cfa3fc8 1246 BITMAP_FREE (argmap);
1247
0e80b01d 1248 return res;
1249}
1250
4ee9c684 1251/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1252 and the address of the save area. */
1253
1254static rtx
c2f47e15 1255expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1256{
1257 tree t_label, t_save_area;
1e0c0b35 1258 rtx r_label, r_save_area, r_fp, r_sp;
1259 rtx_insn *insn;
4ee9c684 1260
c2f47e15 1261 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1262 return NULL_RTX;
1263
c2f47e15 1264 t_label = CALL_EXPR_ARG (exp, 0);
1265 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1266
8ec3c5c2 1267 r_label = expand_normal (t_label);
3dce56cc 1268 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1269 r_save_area = expand_normal (t_save_area);
3dce56cc 1270 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1271 /* Copy the address of the save location to a register just in case it was
1272 based on the frame pointer. */
51adbc8a 1273 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1274 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1275 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1276 plus_constant (Pmode, r_save_area,
1277 GET_MODE_SIZE (Pmode)));
4ee9c684 1278
18d50ae6 1279 crtl->has_nonlocal_goto = 1;
4ee9c684 1280
4ee9c684 1281 /* ??? We no longer need to pass the static chain value, afaik. */
a3c81e61 1282 if (targetm.have_nonlocal_goto ())
1283 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
4ee9c684 1284 else
4ee9c684 1285 {
1286 r_label = copy_to_reg (r_label);
1287
18b42941 1288 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1289 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1290
5f337044 1291 /* Restore the frame pointer and stack pointer. We must use a
1292 temporary since the setjmp buffer may be a local. */
1293 r_fp = copy_to_reg (r_fp);
e9c97615 1294 emit_stack_restore (SAVE_NONLOCAL, r_sp);
5f337044 1295 emit_move_insn (hard_frame_pointer_rtx, r_fp);
491e04ef 1296
4ee9c684 1297 /* USE of hard_frame_pointer_rtx added for consistency;
1298 not clear if really needed. */
18b42941 1299 emit_use (hard_frame_pointer_rtx);
1300 emit_use (stack_pointer_rtx);
ad0d0af8 1301
1302 /* If the architecture is using a GP register, we must
1303 conservatively assume that the target function makes use of it.
1304 The prologue of functions with nonlocal gotos must therefore
1305 initialize the GP register to the appropriate value, and we
1306 must then make sure that this value is live at the point
1307 of the jump. (Note that this doesn't necessarily apply
1308 to targets with a nonlocal_goto pattern; they are free
1309 to implement it in their own way. Note also that this is
1310 a no-op if the GP register is a global invariant.) */
1e826931 1311 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1312 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
18b42941 1313 emit_use (pic_offset_table_rtx);
ad0d0af8 1314
4ee9c684 1315 emit_indirect_jump (r_label);
1316 }
491e04ef 1317
4ee9c684 1318 /* Search backwards to the jump insn and mark it as a
1319 non-local goto. */
1320 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1321 {
6d7dc5b9 1322 if (JUMP_P (insn))
4ee9c684 1323 {
a1ddb869 1324 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1325 break;
1326 }
6d7dc5b9 1327 else if (CALL_P (insn))
4ee9c684 1328 break;
1329 }
1330
1331 return const0_rtx;
1332}
1333
843d08a9 1334/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1335 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1336 It updates the stack pointer in that block to the current value. This is
1337 also called directly by the SJLJ exception handling code. */
843d08a9 1338
97354ae4 1339void
843d08a9 1340expand_builtin_update_setjmp_buf (rtx buf_addr)
1341{
3754d046 1342 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
77e843a0 1343 buf_addr = convert_memory_address (Pmode, buf_addr);
d1ff492e 1344 rtx stack_save
843d08a9 1345 = gen_rtx_MEM (sa_mode,
1346 memory_address
1347 (sa_mode,
29c05e22 1348 plus_constant (Pmode, buf_addr,
1349 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1350
e9c97615 1351 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1352}
1353
5e3608d8 1354/* Expand a call to __builtin_prefetch. For a target that does not support
1355 data prefetch, evaluate the memory address argument in case it has side
1356 effects. */
1357
1358static void
c2f47e15 1359expand_builtin_prefetch (tree exp)
5e3608d8 1360{
1361 tree arg0, arg1, arg2;
c2f47e15 1362 int nargs;
5e3608d8 1363 rtx op0, op1, op2;
1364
c2f47e15 1365 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1366 return;
1367
c2f47e15 1368 arg0 = CALL_EXPR_ARG (exp, 0);
1369
26a5cadb 1370 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1371 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1372 locality). */
c2f47e15 1373 nargs = call_expr_nargs (exp);
1374 if (nargs > 1)
1375 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1376 else
c2f47e15 1377 arg1 = integer_zero_node;
1378 if (nargs > 2)
1379 arg2 = CALL_EXPR_ARG (exp, 2);
1380 else
2512209b 1381 arg2 = integer_three_node;
5e3608d8 1382
1383 /* Argument 0 is an address. */
1384 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1385
1386 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1387 if (TREE_CODE (arg1) != INTEGER_CST)
1388 {
07e3a3d2 1389 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1390 arg1 = integer_zero_node;
5e3608d8 1391 }
8ec3c5c2 1392 op1 = expand_normal (arg1);
5e3608d8 1393 /* Argument 1 must be either zero or one. */
1394 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1395 {
c3ceba8e 1396 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1397 " using zero");
5e3608d8 1398 op1 = const0_rtx;
1399 }
1400
1401 /* Argument 2 (locality) must be a compile-time constant int. */
1402 if (TREE_CODE (arg2) != INTEGER_CST)
1403 {
07e3a3d2 1404 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1405 arg2 = integer_zero_node;
1406 }
8ec3c5c2 1407 op2 = expand_normal (arg2);
5e3608d8 1408 /* Argument 2 must be 0, 1, 2, or 3. */
1409 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1410 {
c3ceba8e 1411 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1412 op2 = const0_rtx;
1413 }
1414
1d375a79 1415 if (targetm.have_prefetch ())
5e3608d8 1416 {
8786db1e 1417 struct expand_operand ops[3];
1418
1419 create_address_operand (&ops[0], op0);
1420 create_integer_operand (&ops[1], INTVAL (op1));
1421 create_integer_operand (&ops[2], INTVAL (op2));
1d375a79 1422 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
8786db1e 1423 return;
5e3608d8 1424 }
0a534ba7 1425
f0ce3b1f 1426 /* Don't do anything with direct references to volatile memory, but
1427 generate code to handle other side effects. */
e16ceb8e 1428 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1429 emit_insn (op0);
5e3608d8 1430}
1431
f7c44134 1432/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1433 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1434 the maximum length of the block of memory that might be accessed or
1435 NULL if unknown. */
f7c44134 1436
53800dbe 1437static rtx
d8ae1baa 1438get_memory_rtx (tree exp, tree len)
53800dbe 1439{
ad0a178f 1440 tree orig_exp = exp;
1441 rtx addr, mem;
ad0a178f 1442
1443 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1444 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1445 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1446 exp = TREE_OPERAND (exp, 0);
1447
1448 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1449 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1450
f7c44134 1451 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1452 First remove any nops. */
72dd6141 1453 while (CONVERT_EXPR_P (exp)
f7c44134 1454 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1455 exp = TREE_OPERAND (exp, 0);
1456
5dd3f78f 1457 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1458 (as builtin stringops may alias with anything). */
1459 exp = fold_build2 (MEM_REF,
1460 build_array_type (char_type_node,
1461 build_range_type (sizetype,
1462 size_one_node, len)),
1463 exp, build_int_cst (ptr_type_node, 0));
1464
1465 /* If the MEM_REF has no acceptable address, try to get the base object
1466 from the original address we got, and build an all-aliasing
1467 unknown-sized access to that one. */
1468 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1469 set_mem_attributes (mem, exp, 0);
1470 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1471 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1472 0))))
eec8e941 1473 {
5dd3f78f 1474 exp = build_fold_addr_expr (exp);
1475 exp = fold_build2 (MEM_REF,
1476 build_array_type (char_type_node,
1477 build_range_type (sizetype,
1478 size_zero_node,
1479 NULL)),
1480 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1481 set_mem_attributes (mem, exp, 0);
eec8e941 1482 }
5dd3f78f 1483 set_mem_alias_set (mem, 0);
53800dbe 1484 return mem;
1485}
1486\f
1487/* Built-in functions to perform an untyped call and return. */
1488
3b9c3a16 1489#define apply_args_mode \
1490 (this_target_builtins->x_apply_args_mode)
1491#define apply_result_mode \
1492 (this_target_builtins->x_apply_result_mode)
53800dbe 1493
53800dbe 1494/* Return the size required for the block returned by __builtin_apply_args,
1495 and initialize apply_args_mode. */
1496
1497static int
aecda0d6 1498apply_args_size (void)
53800dbe 1499{
1500 static int size = -1;
58e9ce8f 1501 int align;
1502 unsigned int regno;
53800dbe 1503
1504 /* The values computed by this function never change. */
1505 if (size < 0)
1506 {
1507 /* The first value is the incoming arg-pointer. */
1508 size = GET_MODE_SIZE (Pmode);
1509
1510 /* The second value is the structure value address unless this is
1511 passed as an "invisible" first argument. */
6812c89e 1512 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1513 size += GET_MODE_SIZE (Pmode);
1514
1515 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1516 if (FUNCTION_ARG_REGNO_P (regno))
1517 {
d8ba6ec1 1518 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1519
64db345d 1520 gcc_assert (mode != VOIDmode);
53800dbe 1521
1522 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1523 if (size % align != 0)
1524 size = CEIL (size, align) * align;
53800dbe 1525 size += GET_MODE_SIZE (mode);
1526 apply_args_mode[regno] = mode;
1527 }
1528 else
1529 {
d8ba6ec1 1530 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
53800dbe 1531 }
1532 }
1533 return size;
1534}
1535
1536/* Return the size required for the block returned by __builtin_apply,
1537 and initialize apply_result_mode. */
1538
1539static int
aecda0d6 1540apply_result_size (void)
53800dbe 1541{
1542 static int size = -1;
1543 int align, regno;
53800dbe 1544
1545 /* The values computed by this function never change. */
1546 if (size < 0)
1547 {
1548 size = 0;
1549
1550 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1551 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1552 {
d8ba6ec1 1553 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1554
64db345d 1555 gcc_assert (mode != VOIDmode);
53800dbe 1556
1557 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1558 if (size % align != 0)
1559 size = CEIL (size, align) * align;
1560 size += GET_MODE_SIZE (mode);
1561 apply_result_mode[regno] = mode;
1562 }
1563 else
d8ba6ec1 1564 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
53800dbe 1565
1566 /* Allow targets that use untyped_call and untyped_return to override
1567 the size so that machine-specific information can be stored here. */
1568#ifdef APPLY_RESULT_SIZE
1569 size = APPLY_RESULT_SIZE;
1570#endif
1571 }
1572 return size;
1573}
1574
53800dbe 1575/* Create a vector describing the result block RESULT. If SAVEP is true,
1576 the result block is used to save the values; otherwise it is used to
1577 restore the values. */
1578
1579static rtx
aecda0d6 1580result_vector (int savep, rtx result)
53800dbe 1581{
1582 int regno, size, align, nelts;
d8ba6ec1 1583 fixed_size_mode mode;
53800dbe 1584 rtx reg, mem;
364c0c59 1585 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1586
53800dbe 1587 size = nelts = 0;
1588 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1589 if ((mode = apply_result_mode[regno]) != VOIDmode)
1590 {
1591 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1592 if (size % align != 0)
1593 size = CEIL (size, align) * align;
1594 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1595 mem = adjust_address (result, mode, size);
53800dbe 1596 savevec[nelts++] = (savep
d1f9b275 1597 ? gen_rtx_SET (mem, reg)
1598 : gen_rtx_SET (reg, mem));
53800dbe 1599 size += GET_MODE_SIZE (mode);
1600 }
1601 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1602}
53800dbe 1603
1604/* Save the state required to perform an untyped call with the same
1605 arguments as were passed to the current function. */
1606
1607static rtx
aecda0d6 1608expand_builtin_apply_args_1 (void)
53800dbe 1609{
1c7e61a7 1610 rtx registers, tem;
53800dbe 1611 int size, align, regno;
d8ba6ec1 1612 fixed_size_mode mode;
6812c89e 1613 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1614
1615 /* Create a block where the arg-pointer, structure value address,
1616 and argument registers can be saved. */
1617 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1618
1619 /* Walk past the arg-pointer and structure value address. */
1620 size = GET_MODE_SIZE (Pmode);
6812c89e 1621 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1622 size += GET_MODE_SIZE (Pmode);
1623
1624 /* Save each register used in calling a function to the block. */
1625 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1626 if ((mode = apply_args_mode[regno]) != VOIDmode)
1627 {
53800dbe 1628 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1629 if (size % align != 0)
1630 size = CEIL (size, align) * align;
1631
1632 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1633
e513d163 1634 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1635 size += GET_MODE_SIZE (mode);
1636 }
1637
1638 /* Save the arg pointer to the block. */
27a7a23a 1639 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1640 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1641 as we might have pretended they were passed. Make sure it's a valid
1642 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1643 if (STACK_GROWS_DOWNWARD)
1644 tem
1645 = force_operand (plus_constant (Pmode, tem,
1646 crtl->args.pretend_args_size),
1647 NULL_RTX);
1c7e61a7 1648 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1649
53800dbe 1650 size = GET_MODE_SIZE (Pmode);
1651
1652 /* Save the structure value address unless this is passed as an
1653 "invisible" first argument. */
45550790 1654 if (struct_incoming_value)
53800dbe 1655 {
e513d163 1656 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1657 copy_to_reg (struct_incoming_value));
53800dbe 1658 size += GET_MODE_SIZE (Pmode);
1659 }
1660
1661 /* Return the address of the block. */
1662 return copy_addr_to_reg (XEXP (registers, 0));
1663}
1664
1665/* __builtin_apply_args returns block of memory allocated on
1666 the stack into which is stored the arg pointer, structure
1667 value address, static chain, and all the registers that might
1668 possibly be used in performing a function call. The code is
1669 moved to the start of the function so the incoming values are
1670 saved. */
27d0c333 1671
53800dbe 1672static rtx
aecda0d6 1673expand_builtin_apply_args (void)
53800dbe 1674{
1675 /* Don't do __builtin_apply_args more than once in a function.
1676 Save the result of the first call and reuse it. */
1677 if (apply_args_value != 0)
1678 return apply_args_value;
1679 {
1680 /* When this function is called, it means that registers must be
1681 saved on entry to this function. So we migrate the
1682 call to the first insn of this function. */
1683 rtx temp;
53800dbe 1684
1685 start_sequence ();
1686 temp = expand_builtin_apply_args_1 ();
9ed997be 1687 rtx_insn *seq = get_insns ();
53800dbe 1688 end_sequence ();
1689
1690 apply_args_value = temp;
1691
31d3e01c 1692 /* Put the insns after the NOTE that starts the function.
1693 If this is inside a start_sequence, make the outer-level insn
53800dbe 1694 chain current, so the code is placed at the start of the
0ef1a651 1695 function. If internal_arg_pointer is a non-virtual pseudo,
1696 it needs to be placed after the function that initializes
1697 that pseudo. */
53800dbe 1698 push_topmost_sequence ();
0ef1a651 1699 if (REG_P (crtl->args.internal_arg_pointer)
1700 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1701 emit_insn_before (seq, parm_birth_insn);
1702 else
1703 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1704 pop_topmost_sequence ();
1705 return temp;
1706 }
1707}
1708
1709/* Perform an untyped call and save the state required to perform an
1710 untyped return of whatever value was returned by the given function. */
1711
1712static rtx
aecda0d6 1713expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1714{
1715 int size, align, regno;
d8ba6ec1 1716 fixed_size_mode mode;
1e0c0b35 1717 rtx incoming_args, result, reg, dest, src;
1718 rtx_call_insn *call_insn;
53800dbe 1719 rtx old_stack_level = 0;
1720 rtx call_fusage = 0;
6812c89e 1721 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1722
85d654dd 1723 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1724
53800dbe 1725 /* Create a block where the return registers can be saved. */
1726 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1727
53800dbe 1728 /* Fetch the arg pointer from the ARGUMENTS block. */
1729 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1730 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1731 if (!STACK_GROWS_DOWNWARD)
1732 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1733 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1734
04a46d40 1735 /* Push a new argument block and copy the arguments. Do not allow
1736 the (potential) memcpy call below to interfere with our stack
1737 manipulations. */
53800dbe 1738 do_pending_stack_adjust ();
04a46d40 1739 NO_DEFER_POP;
53800dbe 1740
2358393e 1741 /* Save the stack with nonlocal if available. */
71512c05 1742 if (targetm.have_save_stack_nonlocal ())
e9c97615 1743 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1744 else
e9c97615 1745 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1746
59647703 1747 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1748 arguments to the outgoing arguments address. We can pass TRUE
1749 as the 4th argument because we just saved the stack pointer
1750 and will restore it right after the call. */
2b34677f 1751 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
27a7a23a 1752
1753 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1754 may have already set current_function_calls_alloca to true.
1755 current_function_calls_alloca won't be set if argsize is zero,
1756 so we have to guarantee need_drap is true here. */
1757 if (SUPPORTS_STACK_ALIGNMENT)
1758 crtl->need_drap = true;
1759
59647703 1760 dest = virtual_outgoing_args_rtx;
3764c94e 1761 if (!STACK_GROWS_DOWNWARD)
1762 {
1763 if (CONST_INT_P (argsize))
1764 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1765 else
1766 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1767 }
2a631e19 1768 dest = gen_rtx_MEM (BLKmode, dest);
1769 set_mem_align (dest, PARM_BOUNDARY);
1770 src = gen_rtx_MEM (BLKmode, incoming_args);
1771 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1772 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1773
1774 /* Refer to the argument block. */
1775 apply_args_size ();
1776 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1777 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1778
1779 /* Walk past the arg-pointer and structure value address. */
1780 size = GET_MODE_SIZE (Pmode);
45550790 1781 if (struct_value)
53800dbe 1782 size += GET_MODE_SIZE (Pmode);
1783
1784 /* Restore each of the registers previously saved. Make USE insns
1785 for each of these registers for use in making the call. */
1786 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1787 if ((mode = apply_args_mode[regno]) != VOIDmode)
1788 {
1789 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1790 if (size % align != 0)
1791 size = CEIL (size, align) * align;
1792 reg = gen_rtx_REG (mode, regno);
e513d163 1793 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1794 use_reg (&call_fusage, reg);
1795 size += GET_MODE_SIZE (mode);
1796 }
1797
1798 /* Restore the structure value address unless this is passed as an
1799 "invisible" first argument. */
1800 size = GET_MODE_SIZE (Pmode);
45550790 1801 if (struct_value)
53800dbe 1802 {
1803 rtx value = gen_reg_rtx (Pmode);
e513d163 1804 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1805 emit_move_insn (struct_value, value);
8ad4c111 1806 if (REG_P (struct_value))
45550790 1807 use_reg (&call_fusage, struct_value);
53800dbe 1808 size += GET_MODE_SIZE (Pmode);
1809 }
1810
1811 /* All arguments and registers used for the call are set up by now! */
82c7907c 1812 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1813
1814 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1815 and we don't want to load it into a register as an optimization,
1816 because prepare_call_address already did it if it should be done. */
1817 if (GET_CODE (function) != SYMBOL_REF)
1818 function = memory_address (FUNCTION_MODE, function);
1819
1820 /* Generate the actual call instruction and save the return value. */
1d99ab0a 1821 if (targetm.have_untyped_call ())
1822 {
1823 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1824 emit_call_insn (targetm.gen_untyped_call (mem, result,
1825 result_vector (1, result)));
1826 }
7f265a08 1827 else if (targetm.have_call_value ())
53800dbe 1828 {
1829 rtx valreg = 0;
1830
1831 /* Locate the unique return register. It is not possible to
1832 express a call that sets more than one return register using
1833 call_value; use untyped_call for that. In fact, untyped_call
1834 only needs to save the return registers in the given block. */
1835 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1836 if ((mode = apply_result_mode[regno]) != VOIDmode)
1837 {
7f265a08 1838 gcc_assert (!valreg); /* have_untyped_call required. */
7d3f6cc7 1839
53800dbe 1840 valreg = gen_rtx_REG (mode, regno);
1841 }
1842
7f265a08 1843 emit_insn (targetm.gen_call_value (valreg,
1844 gen_rtx_MEM (FUNCTION_MODE, function),
1845 const0_rtx, NULL_RTX, const0_rtx));
53800dbe 1846
e513d163 1847 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1848 }
1849 else
64db345d 1850 gcc_unreachable ();
53800dbe 1851
d5f9786f 1852 /* Find the CALL insn we just emitted, and attach the register usage
1853 information. */
1854 call_insn = last_call_insn ();
1855 add_function_usage_to (call_insn, call_fusage);
53800dbe 1856
1857 /* Restore the stack. */
71512c05 1858 if (targetm.have_save_stack_nonlocal ())
e9c97615 1859 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1860 else
e9c97615 1861 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1862 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1863
04a46d40 1864 OK_DEFER_POP;
1865
53800dbe 1866 /* Return the address of the result block. */
85d654dd 1867 result = copy_addr_to_reg (XEXP (result, 0));
1868 return convert_memory_address (ptr_mode, result);
53800dbe 1869}
1870
1871/* Perform an untyped return. */
1872
1873static void
aecda0d6 1874expand_builtin_return (rtx result)
53800dbe 1875{
1876 int size, align, regno;
d8ba6ec1 1877 fixed_size_mode mode;
53800dbe 1878 rtx reg;
57c26b3a 1879 rtx_insn *call_fusage = 0;
53800dbe 1880
85d654dd 1881 result = convert_memory_address (Pmode, result);
726ec87c 1882
53800dbe 1883 apply_result_size ();
1884 result = gen_rtx_MEM (BLKmode, result);
1885
1d99ab0a 1886 if (targetm.have_untyped_return ())
53800dbe 1887 {
1d99ab0a 1888 rtx vector = result_vector (0, result);
1889 emit_jump_insn (targetm.gen_untyped_return (result, vector));
53800dbe 1890 emit_barrier ();
1891 return;
1892 }
53800dbe 1893
1894 /* Restore the return value and note that each value is used. */
1895 size = 0;
1896 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1897 if ((mode = apply_result_mode[regno]) != VOIDmode)
1898 {
1899 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1900 if (size % align != 0)
1901 size = CEIL (size, align) * align;
1902 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1903 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1904
1905 push_to_sequence (call_fusage);
18b42941 1906 emit_use (reg);
53800dbe 1907 call_fusage = get_insns ();
1908 end_sequence ();
1909 size += GET_MODE_SIZE (mode);
1910 }
1911
1912 /* Put the USE insns before the return. */
31d3e01c 1913 emit_insn (call_fusage);
53800dbe 1914
1915 /* Return whatever values was restored by jumping directly to the end
1916 of the function. */
62380d2d 1917 expand_naked_return ();
53800dbe 1918}
1919
539a3a92 1920/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1921
539a3a92 1922static enum type_class
aecda0d6 1923type_to_class (tree type)
539a3a92 1924{
1925 switch (TREE_CODE (type))
1926 {
1927 case VOID_TYPE: return void_type_class;
1928 case INTEGER_TYPE: return integer_type_class;
539a3a92 1929 case ENUMERAL_TYPE: return enumeral_type_class;
1930 case BOOLEAN_TYPE: return boolean_type_class;
1931 case POINTER_TYPE: return pointer_type_class;
1932 case REFERENCE_TYPE: return reference_type_class;
1933 case OFFSET_TYPE: return offset_type_class;
1934 case REAL_TYPE: return real_type_class;
1935 case COMPLEX_TYPE: return complex_type_class;
1936 case FUNCTION_TYPE: return function_type_class;
1937 case METHOD_TYPE: return method_type_class;
1938 case RECORD_TYPE: return record_type_class;
1939 case UNION_TYPE:
1940 case QUAL_UNION_TYPE: return union_type_class;
1941 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1942 ? string_type_class : array_type_class);
539a3a92 1943 case LANG_TYPE: return lang_type_class;
1944 default: return no_type_class;
1945 }
1946}
bf8e3599 1947
c2f47e15 1948/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1949
53800dbe 1950static rtx
c2f47e15 1951expand_builtin_classify_type (tree exp)
53800dbe 1952{
c2f47e15 1953 if (call_expr_nargs (exp))
1954 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1955 return GEN_INT (no_type_class);
1956}
1957
8c32188e 1958/* This helper macro, meant to be used in mathfn_built_in below, determines
1959 which among a set of builtin math functions is appropriate for a given type
1960 mode. The `F' (float) and `L' (long double) are automatically generated
1961 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1962 types, there are additional types that are considered with 'F32', 'F64',
1963 'F128', etc. suffixes. */
e3240774 1964#define CASE_MATHFN(MATHFN) \
1965 CASE_CFN_##MATHFN: \
1966 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1967 fcodel = BUILT_IN_##MATHFN##L ; break;
8c32188e 1968/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1969 types. */
1970#define CASE_MATHFN_FLOATN(MATHFN) \
1971 CASE_CFN_##MATHFN: \
1972 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1973 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1974 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1975 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1976 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1977 break;
cd2656b0 1978/* Similar to above, but appends _R after any F/L suffix. */
e3240774 1979#define CASE_MATHFN_REENT(MATHFN) \
1980 case CFN_BUILT_IN_##MATHFN##_R: \
1981 case CFN_BUILT_IN_##MATHFN##F_R: \
1982 case CFN_BUILT_IN_##MATHFN##L_R: \
1983 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1984 fcodel = BUILT_IN_##MATHFN##L_R ; break;
07976da7 1985
6c21be92 1986/* Return a function equivalent to FN but operating on floating-point
1987 values of type TYPE, or END_BUILTINS if no such function exists.
e3240774 1988 This is purely an operation on function codes; it does not guarantee
1989 that the target actually has an implementation of the function. */
c319d56a 1990
6c21be92 1991static built_in_function
e3240774 1992mathfn_built_in_2 (tree type, combined_fn fn)
0a68165a 1993{
8c32188e 1994 tree mtype;
6c21be92 1995 built_in_function fcode, fcodef, fcodel;
8c32188e 1996 built_in_function fcodef16 = END_BUILTINS;
1997 built_in_function fcodef32 = END_BUILTINS;
1998 built_in_function fcodef64 = END_BUILTINS;
1999 built_in_function fcodef128 = END_BUILTINS;
2000 built_in_function fcodef32x = END_BUILTINS;
2001 built_in_function fcodef64x = END_BUILTINS;
2002 built_in_function fcodef128x = END_BUILTINS;
07976da7 2003
2004 switch (fn)
2005 {
e3240774 2006 CASE_MATHFN (ACOS)
2007 CASE_MATHFN (ACOSH)
2008 CASE_MATHFN (ASIN)
2009 CASE_MATHFN (ASINH)
2010 CASE_MATHFN (ATAN)
2011 CASE_MATHFN (ATAN2)
2012 CASE_MATHFN (ATANH)
2013 CASE_MATHFN (CBRT)
054e9558 2014 CASE_MATHFN_FLOATN (CEIL)
e3240774 2015 CASE_MATHFN (CEXPI)
8c32188e 2016 CASE_MATHFN_FLOATN (COPYSIGN)
e3240774 2017 CASE_MATHFN (COS)
2018 CASE_MATHFN (COSH)
2019 CASE_MATHFN (DREM)
2020 CASE_MATHFN (ERF)
2021 CASE_MATHFN (ERFC)
2022 CASE_MATHFN (EXP)
2023 CASE_MATHFN (EXP10)
2024 CASE_MATHFN (EXP2)
2025 CASE_MATHFN (EXPM1)
2026 CASE_MATHFN (FABS)
2027 CASE_MATHFN (FDIM)
054e9558 2028 CASE_MATHFN_FLOATN (FLOOR)
8c32188e 2029 CASE_MATHFN_FLOATN (FMA)
2030 CASE_MATHFN_FLOATN (FMAX)
2031 CASE_MATHFN_FLOATN (FMIN)
e3240774 2032 CASE_MATHFN (FMOD)
2033 CASE_MATHFN (FREXP)
2034 CASE_MATHFN (GAMMA)
2035 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2036 CASE_MATHFN (HUGE_VAL)
2037 CASE_MATHFN (HYPOT)
2038 CASE_MATHFN (ILOGB)
2039 CASE_MATHFN (ICEIL)
2040 CASE_MATHFN (IFLOOR)
2041 CASE_MATHFN (INF)
2042 CASE_MATHFN (IRINT)
2043 CASE_MATHFN (IROUND)
2044 CASE_MATHFN (ISINF)
2045 CASE_MATHFN (J0)
2046 CASE_MATHFN (J1)
2047 CASE_MATHFN (JN)
2048 CASE_MATHFN (LCEIL)
2049 CASE_MATHFN (LDEXP)
2050 CASE_MATHFN (LFLOOR)
2051 CASE_MATHFN (LGAMMA)
2052 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2053 CASE_MATHFN (LLCEIL)
2054 CASE_MATHFN (LLFLOOR)
2055 CASE_MATHFN (LLRINT)
2056 CASE_MATHFN (LLROUND)
2057 CASE_MATHFN (LOG)
2058 CASE_MATHFN (LOG10)
2059 CASE_MATHFN (LOG1P)
2060 CASE_MATHFN (LOG2)
2061 CASE_MATHFN (LOGB)
2062 CASE_MATHFN (LRINT)
2063 CASE_MATHFN (LROUND)
2064 CASE_MATHFN (MODF)
2065 CASE_MATHFN (NAN)
2066 CASE_MATHFN (NANS)
054e9558 2067 CASE_MATHFN_FLOATN (NEARBYINT)
e3240774 2068 CASE_MATHFN (NEXTAFTER)
2069 CASE_MATHFN (NEXTTOWARD)
2070 CASE_MATHFN (POW)
2071 CASE_MATHFN (POWI)
2072 CASE_MATHFN (POW10)
2073 CASE_MATHFN (REMAINDER)
2074 CASE_MATHFN (REMQUO)
054e9558 2075 CASE_MATHFN_FLOATN (RINT)
2076 CASE_MATHFN_FLOATN (ROUND)
e3240774 2077 CASE_MATHFN (SCALB)
2078 CASE_MATHFN (SCALBLN)
2079 CASE_MATHFN (SCALBN)
2080 CASE_MATHFN (SIGNBIT)
2081 CASE_MATHFN (SIGNIFICAND)
2082 CASE_MATHFN (SIN)
2083 CASE_MATHFN (SINCOS)
2084 CASE_MATHFN (SINH)
8c32188e 2085 CASE_MATHFN_FLOATN (SQRT)
e3240774 2086 CASE_MATHFN (TAN)
2087 CASE_MATHFN (TANH)
2088 CASE_MATHFN (TGAMMA)
054e9558 2089 CASE_MATHFN_FLOATN (TRUNC)
e3240774 2090 CASE_MATHFN (Y0)
2091 CASE_MATHFN (Y1)
2092 CASE_MATHFN (YN)
07976da7 2093
e3240774 2094 default:
2095 return END_BUILTINS;
2096 }
07976da7 2097
8c32188e 2098 mtype = TYPE_MAIN_VARIANT (type);
2099 if (mtype == double_type_node)
6c21be92 2100 return fcode;
8c32188e 2101 else if (mtype == float_type_node)
6c21be92 2102 return fcodef;
8c32188e 2103 else if (mtype == long_double_type_node)
6c21be92 2104 return fcodel;
8c32188e 2105 else if (mtype == float16_type_node)
2106 return fcodef16;
2107 else if (mtype == float32_type_node)
2108 return fcodef32;
2109 else if (mtype == float64_type_node)
2110 return fcodef64;
2111 else if (mtype == float128_type_node)
2112 return fcodef128;
2113 else if (mtype == float32x_type_node)
2114 return fcodef32x;
2115 else if (mtype == float64x_type_node)
2116 return fcodef64x;
2117 else if (mtype == float128x_type_node)
2118 return fcodef128x;
07976da7 2119 else
6c21be92 2120 return END_BUILTINS;
2121}
2122
2123/* Return mathematic function equivalent to FN but operating directly on TYPE,
2124 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2125 otherwise use the explicit declaration. If we can't do the conversion,
2126 return null. */
2127
2128static tree
e3240774 2129mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
6c21be92 2130{
2131 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2132 if (fcode2 == END_BUILTINS)
c2f47e15 2133 return NULL_TREE;
b9a16870 2134
2135 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2136 return NULL_TREE;
2137
2138 return builtin_decl_explicit (fcode2);
0a68165a 2139}
2140
e3240774 2141/* Like mathfn_built_in_1, but always use the implicit array. */
c319d56a 2142
2143tree
e3240774 2144mathfn_built_in (tree type, combined_fn fn)
c319d56a 2145{
2146 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2147}
2148
e3240774 2149/* Like mathfn_built_in_1, but take a built_in_function and
2150 always use the implicit array. */
2151
2152tree
2153mathfn_built_in (tree type, enum built_in_function fn)
2154{
2155 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2156}
2157
1f24b8e9 2158/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2159 return its code, otherwise return IFN_LAST. Note that this function
2160 only tests whether the function is defined in internals.def, not whether
2161 it is actually available on the target. */
2162
2163internal_fn
2164associated_internal_fn (tree fndecl)
2165{
2166 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2167 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2168 switch (DECL_FUNCTION_CODE (fndecl))
2169 {
2170#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2171 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
8c32188e 2172#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2173 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2174 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
c9452b7c 2175#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2176 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1f24b8e9 2177#include "internal-fn.def"
2178
2179 CASE_FLT_FN (BUILT_IN_POW10):
2180 return IFN_EXP10;
2181
2182 CASE_FLT_FN (BUILT_IN_DREM):
2183 return IFN_REMAINDER;
2184
2185 CASE_FLT_FN (BUILT_IN_SCALBN):
2186 CASE_FLT_FN (BUILT_IN_SCALBLN):
2187 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2188 return IFN_LDEXP;
2189 return IFN_LAST;
2190
2191 default:
2192 return IFN_LAST;
2193 }
2194}
2195
2196/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2197 on the current target by a call to an internal function, return the
2198 code of that internal function, otherwise return IFN_LAST. The caller
2199 is responsible for ensuring that any side-effects of the built-in
2200 call are dealt with correctly. E.g. if CALL sets errno, the caller
2201 must decide that the errno result isn't needed or make it available
2202 in some other way. */
2203
2204internal_fn
2205replacement_internal_fn (gcall *call)
2206{
2207 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2208 {
2209 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2210 if (ifn != IFN_LAST)
2211 {
2212 tree_pair types = direct_internal_fn_types (ifn, call);
acdfe9e0 2213 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2214 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1f24b8e9 2215 return ifn;
2216 }
2217 }
2218 return IFN_LAST;
2219}
2220
7e0713b1 2221/* Expand a call to the builtin trinary math functions (fma).
2222 Return NULL_RTX if a normal call should be emitted rather than expanding the
2223 function in-line. EXP is the expression that is a call to the builtin
2224 function; if convenient, the result should be placed in TARGET.
2225 SUBTARGET may be used as the target for computing one of EXP's
2226 operands. */
2227
2228static rtx
2229expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2230{
2231 optab builtin_optab;
1e0c0b35 2232 rtx op0, op1, op2, result;
2233 rtx_insn *insns;
7e0713b1 2234 tree fndecl = get_callee_fndecl (exp);
2235 tree arg0, arg1, arg2;
3754d046 2236 machine_mode mode;
7e0713b1 2237
2238 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2239 return NULL_RTX;
2240
2241 arg0 = CALL_EXPR_ARG (exp, 0);
2242 arg1 = CALL_EXPR_ARG (exp, 1);
2243 arg2 = CALL_EXPR_ARG (exp, 2);
2244
2245 switch (DECL_FUNCTION_CODE (fndecl))
2246 {
2247 CASE_FLT_FN (BUILT_IN_FMA):
8c32188e 2248 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7e0713b1 2249 builtin_optab = fma_optab; break;
2250 default:
2251 gcc_unreachable ();
2252 }
2253
2254 /* Make a suitable register to place result in. */
2255 mode = TYPE_MODE (TREE_TYPE (exp));
2256
2257 /* Before working hard, check whether the instruction is available. */
2258 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2259 return NULL_RTX;
2260
de2e453e 2261 result = gen_reg_rtx (mode);
7e0713b1 2262
2263 /* Always stabilize the argument list. */
2264 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2265 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2266 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2267
2268 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2269 op1 = expand_normal (arg1);
2270 op2 = expand_normal (arg2);
2271
2272 start_sequence ();
2273
de2e453e 2274 /* Compute into RESULT.
2275 Set RESULT to wherever the result comes back. */
2276 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2277 result, 0);
7e0713b1 2278
2279 /* If we were unable to expand via the builtin, stop the sequence
2280 (without outputting the insns) and call to the library function
2281 with the stabilized argument list. */
de2e453e 2282 if (result == 0)
7e0713b1 2283 {
2284 end_sequence ();
2285 return expand_call (exp, target, target == const0_rtx);
2286 }
2287
2288 /* Output the entire sequence. */
2289 insns = get_insns ();
2290 end_sequence ();
2291 emit_insn (insns);
2292
de2e453e 2293 return result;
7e0713b1 2294}
2295
6b43bae4 2296/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2297 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2298 function in-line. EXP is the expression that is a call to the builtin
2299 function; if convenient, the result should be placed in TARGET.
2300 SUBTARGET may be used as the target for computing one of EXP's
2301 operands. */
2302
2303static rtx
2304expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2305{
2306 optab builtin_optab;
1e0c0b35 2307 rtx op0;
2308 rtx_insn *insns;
6b43bae4 2309 tree fndecl = get_callee_fndecl (exp);
3754d046 2310 machine_mode mode;
abfea505 2311 tree arg;
6b43bae4 2312
c2f47e15 2313 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2314 return NULL_RTX;
6b43bae4 2315
c2f47e15 2316 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2317
2318 switch (DECL_FUNCTION_CODE (fndecl))
2319 {
4f35b1fc 2320 CASE_FLT_FN (BUILT_IN_SIN):
2321 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2322 builtin_optab = sincos_optab; break;
2323 default:
64db345d 2324 gcc_unreachable ();
6b43bae4 2325 }
2326
2327 /* Make a suitable register to place result in. */
2328 mode = TYPE_MODE (TREE_TYPE (exp));
2329
6b43bae4 2330 /* Check if sincos insn is available, otherwise fallback
0bed3869 2331 to sin or cos insn. */
d6bf3b14 2332 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2333 switch (DECL_FUNCTION_CODE (fndecl))
2334 {
4f35b1fc 2335 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2336 builtin_optab = sin_optab; break;
4f35b1fc 2337 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2338 builtin_optab = cos_optab; break;
2339 default:
64db345d 2340 gcc_unreachable ();
6b43bae4 2341 }
6b43bae4 2342
2343 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2344 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2345 {
de2e453e 2346 rtx result = gen_reg_rtx (mode);
6b43bae4 2347
2348 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2349 need to expand the argument again. This way, we will not perform
2350 side-effects more the once. */
abfea505 2351 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2352
1db6d067 2353 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2354
6b43bae4 2355 start_sequence ();
2356
de2e453e 2357 /* Compute into RESULT.
2358 Set RESULT to wherever the result comes back. */
6b43bae4 2359 if (builtin_optab == sincos_optab)
2360 {
de2e453e 2361 int ok;
7d3f6cc7 2362
6b43bae4 2363 switch (DECL_FUNCTION_CODE (fndecl))
2364 {
4f35b1fc 2365 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2366 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2367 break;
4f35b1fc 2368 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2369 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2370 break;
2371 default:
64db345d 2372 gcc_unreachable ();
6b43bae4 2373 }
de2e453e 2374 gcc_assert (ok);
6b43bae4 2375 }
2376 else
de2e453e 2377 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2378
de2e453e 2379 if (result != 0)
6b43bae4 2380 {
6b43bae4 2381 /* Output the entire sequence. */
2382 insns = get_insns ();
2383 end_sequence ();
2384 emit_insn (insns);
de2e453e 2385 return result;
6b43bae4 2386 }
2387
2388 /* If we were unable to expand via the builtin, stop the sequence
2389 (without outputting the insns) and call to the library function
2390 with the stabilized argument list. */
2391 end_sequence ();
2392 }
2393
de2e453e 2394 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2395}
2396
a65c4d64 2397/* Given an interclass math builtin decl FNDECL and it's argument ARG
2398 return an RTL instruction code that implements the functionality.
2399 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2400
a65c4d64 2401static enum insn_code
2402interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2403{
a65c4d64 2404 bool errno_set = false;
6cdd383a 2405 optab builtin_optab = unknown_optab;
3754d046 2406 machine_mode mode;
a67a90e5 2407
2408 switch (DECL_FUNCTION_CODE (fndecl))
2409 {
2410 CASE_FLT_FN (BUILT_IN_ILOGB):
12f08300 2411 errno_set = true; builtin_optab = ilogb_optab; break;
2412 CASE_FLT_FN (BUILT_IN_ISINF):
2413 builtin_optab = isinf_optab; break;
2414 case BUILT_IN_ISNORMAL:
2415 case BUILT_IN_ISFINITE:
2416 CASE_FLT_FN (BUILT_IN_FINITE):
2417 case BUILT_IN_FINITED32:
2418 case BUILT_IN_FINITED64:
2419 case BUILT_IN_FINITED128:
2420 case BUILT_IN_ISINFD32:
2421 case BUILT_IN_ISINFD64:
2422 case BUILT_IN_ISINFD128:
2423 /* These builtins have no optabs (yet). */
cde061c1 2424 break;
a67a90e5 2425 default:
2426 gcc_unreachable ();
2427 }
2428
2429 /* There's no easy way to detect the case we need to set EDOM. */
2430 if (flag_errno_math && errno_set)
a65c4d64 2431 return CODE_FOR_nothing;
a67a90e5 2432
2433 /* Optab mode depends on the mode of the input argument. */
2434 mode = TYPE_MODE (TREE_TYPE (arg));
2435
cde061c1 2436 if (builtin_optab)
d6bf3b14 2437 return optab_handler (builtin_optab, mode);
a65c4d64 2438 return CODE_FOR_nothing;
2439}
2440
2441/* Expand a call to one of the builtin math functions that operate on
12f08300 2442 floating point argument and output an integer result (ilogb, isinf,
2443 isnan, etc).
a65c4d64 2444 Return 0 if a normal call should be emitted rather than expanding the
2445 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2446 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2447
2448static rtx
f97eea22 2449expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2450{
2451 enum insn_code icode = CODE_FOR_nothing;
2452 rtx op0;
2453 tree fndecl = get_callee_fndecl (exp);
3754d046 2454 machine_mode mode;
a65c4d64 2455 tree arg;
2456
2457 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2458 return NULL_RTX;
2459
2460 arg = CALL_EXPR_ARG (exp, 0);
2461 icode = interclass_mathfn_icode (arg, fndecl);
2462 mode = TYPE_MODE (TREE_TYPE (arg));
2463
a67a90e5 2464 if (icode != CODE_FOR_nothing)
2465 {
8786db1e 2466 struct expand_operand ops[1];
1e0c0b35 2467 rtx_insn *last = get_last_insn ();
4e2a2fb4 2468 tree orig_arg = arg;
a67a90e5 2469
2470 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2471 need to expand the argument again. This way, we will not perform
2472 side-effects more the once. */
abfea505 2473 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2474
f97eea22 2475 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2476
2477 if (mode != GET_MODE (op0))
2478 op0 = convert_to_mode (mode, op0, 0);
2479
8786db1e 2480 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2481 if (maybe_legitimize_operands (icode, 0, 1, ops)
2482 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2483 return ops[0].value;
2484
4e2a2fb4 2485 delete_insns_since (last);
2486 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2487 }
2488
a65c4d64 2489 return NULL_RTX;
a67a90e5 2490}
2491
c3147c1a 2492/* Expand a call to the builtin sincos math function.
c2f47e15 2493 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2494 function in-line. EXP is the expression that is a call to the builtin
2495 function. */
2496
2497static rtx
2498expand_builtin_sincos (tree exp)
2499{
2500 rtx op0, op1, op2, target1, target2;
3754d046 2501 machine_mode mode;
c3147c1a 2502 tree arg, sinp, cosp;
2503 int result;
389dd41b 2504 location_t loc = EXPR_LOCATION (exp);
be5575b2 2505 tree alias_type, alias_off;
c3147c1a 2506
c2f47e15 2507 if (!validate_arglist (exp, REAL_TYPE,
2508 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2509 return NULL_RTX;
c3147c1a 2510
c2f47e15 2511 arg = CALL_EXPR_ARG (exp, 0);
2512 sinp = CALL_EXPR_ARG (exp, 1);
2513 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2514
2515 /* Make a suitable register to place result in. */
2516 mode = TYPE_MODE (TREE_TYPE (arg));
2517
2518 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2519 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2520 return NULL_RTX;
2521
2522 target1 = gen_reg_rtx (mode);
2523 target2 = gen_reg_rtx (mode);
2524
8ec3c5c2 2525 op0 = expand_normal (arg);
be5575b2 2526 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2527 alias_off = build_int_cst (alias_type, 0);
2528 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2529 sinp, alias_off));
2530 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2531 cosp, alias_off));
c3147c1a 2532
2533 /* Compute into target1 and target2.
2534 Set TARGET to wherever the result comes back. */
2535 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2536 gcc_assert (result);
2537
2538 /* Move target1 and target2 to the memory locations indicated
2539 by op1 and op2. */
2540 emit_move_insn (op1, target1);
2541 emit_move_insn (op2, target2);
2542
2543 return const0_rtx;
2544}
2545
d735c391 2546/* Expand a call to the internal cexpi builtin to the sincos math function.
2547 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2548 the result should be placed in TARGET. */
d735c391 2549
2550static rtx
f97eea22 2551expand_builtin_cexpi (tree exp, rtx target)
d735c391 2552{
2553 tree fndecl = get_callee_fndecl (exp);
d735c391 2554 tree arg, type;
3754d046 2555 machine_mode mode;
d735c391 2556 rtx op0, op1, op2;
389dd41b 2557 location_t loc = EXPR_LOCATION (exp);
d735c391 2558
c2f47e15 2559 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2560 return NULL_RTX;
d735c391 2561
c2f47e15 2562 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2563 type = TREE_TYPE (arg);
2564 mode = TYPE_MODE (TREE_TYPE (arg));
2565
2566 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2567 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2568 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2569 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2570 {
2571 op1 = gen_reg_rtx (mode);
2572 op2 = gen_reg_rtx (mode);
2573
f97eea22 2574 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2575
2576 /* Compute into op1 and op2. */
2577 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2578 }
30f690e0 2579 else if (targetm.libc_has_function (function_sincos))
d735c391 2580 {
c2f47e15 2581 tree call, fn = NULL_TREE;
d735c391 2582 tree top1, top2;
2583 rtx op1a, op2a;
2584
2585 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2586 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2587 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2588 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2589 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2590 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2591 else
2592 gcc_unreachable ();
48e1416a 2593
0ab48139 2594 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2595 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2596 op1a = copy_addr_to_reg (XEXP (op1, 0));
2597 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2598 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2599 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2600
d735c391 2601 /* Make sure not to fold the sincos call again. */
2602 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2603 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2604 call, 3, arg, top1, top2));
d735c391 2605 }
18b8d8ae 2606 else
2607 {
0ecbc158 2608 tree call, fn = NULL_TREE, narg;
18b8d8ae 2609 tree ctype = build_complex_type (type);
2610
0ecbc158 2611 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2612 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2613 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2614 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2615 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2616 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2617 else
2618 gcc_unreachable ();
fc0dfa6e 2619
2620 /* If we don't have a decl for cexp create one. This is the
2621 friendliest fallback if the user calls __builtin_cexpi
2622 without full target C99 function support. */
2623 if (fn == NULL_TREE)
2624 {
2625 tree fntype;
2626 const char *name = NULL;
2627
2628 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2629 name = "cexpf";
2630 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2631 name = "cexp";
2632 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2633 name = "cexpl";
2634
2635 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2636 fn = build_fn_decl (name, fntype);
2637 }
2638
389dd41b 2639 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2640 build_real (type, dconst0), arg);
2641
2642 /* Make sure not to fold the cexp call again. */
2643 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2644 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2645 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2646 }
d735c391 2647
2648 /* Now build the proper return type. */
2649 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2650 make_tree (TREE_TYPE (arg), op2),
2651 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2652 target, VOIDmode, EXPAND_NORMAL);
d735c391 2653}
2654
a65c4d64 2655/* Conveniently construct a function call expression. FNDECL names the
2656 function to be called, N is the number of arguments, and the "..."
2657 parameters are the argument expressions. Unlike build_call_exr
2658 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2659
2660static tree
2661build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2662{
2663 va_list ap;
2664 tree fntype = TREE_TYPE (fndecl);
2665 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2666
2667 va_start (ap, n);
2668 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2669 va_end (ap);
2670 SET_EXPR_LOCATION (fn, loc);
2671 return fn;
2672}
a65c4d64 2673
7d3afc77 2674/* Expand a call to one of the builtin rounding functions gcc defines
2675 as an extension (lfloor and lceil). As these are gcc extensions we
2676 do not need to worry about setting errno to EDOM.
ad52b9b7 2677 If expanding via optab fails, lower expression to (int)(floor(x)).
2678 EXP is the expression that is a call to the builtin function;
ff1b14e4 2679 if convenient, the result should be placed in TARGET. */
ad52b9b7 2680
2681static rtx
ff1b14e4 2682expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2683{
9c42dd28 2684 convert_optab builtin_optab;
1e0c0b35 2685 rtx op0, tmp;
2686 rtx_insn *insns;
ad52b9b7 2687 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2688 enum built_in_function fallback_fn;
2689 tree fallback_fndecl;
3754d046 2690 machine_mode mode;
4de0924f 2691 tree arg;
ad52b9b7 2692
c2f47e15 2693 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2694 gcc_unreachable ();
2695
c2f47e15 2696 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2697
2698 switch (DECL_FUNCTION_CODE (fndecl))
2699 {
80ff6494 2700 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2701 CASE_FLT_FN (BUILT_IN_LCEIL):
2702 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2703 builtin_optab = lceil_optab;
2704 fallback_fn = BUILT_IN_CEIL;
2705 break;
2706
80ff6494 2707 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2708 CASE_FLT_FN (BUILT_IN_LFLOOR):
2709 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2710 builtin_optab = lfloor_optab;
2711 fallback_fn = BUILT_IN_FLOOR;
2712 break;
2713
2714 default:
2715 gcc_unreachable ();
2716 }
2717
2718 /* Make a suitable register to place result in. */
2719 mode = TYPE_MODE (TREE_TYPE (exp));
2720
9c42dd28 2721 target = gen_reg_rtx (mode);
ad52b9b7 2722
9c42dd28 2723 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2724 need to expand the argument again. This way, we will not perform
2725 side-effects more the once. */
abfea505 2726 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2727
ff1b14e4 2728 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2729
9c42dd28 2730 start_sequence ();
ad52b9b7 2731
9c42dd28 2732 /* Compute into TARGET. */
2733 if (expand_sfix_optab (target, op0, builtin_optab))
2734 {
2735 /* Output the entire sequence. */
2736 insns = get_insns ();
ad52b9b7 2737 end_sequence ();
9c42dd28 2738 emit_insn (insns);
2739 return target;
ad52b9b7 2740 }
2741
9c42dd28 2742 /* If we were unable to expand via the builtin, stop the sequence
2743 (without outputting the insns). */
2744 end_sequence ();
2745
ad52b9b7 2746 /* Fall back to floating point rounding optab. */
2747 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2748
2749 /* For non-C99 targets we may end up without a fallback fndecl here
2750 if the user called __builtin_lfloor directly. In this case emit
2751 a call to the floor/ceil variants nevertheless. This should result
2752 in the best user experience for not full C99 targets. */
2753 if (fallback_fndecl == NULL_TREE)
2754 {
2755 tree fntype;
2756 const char *name = NULL;
2757
2758 switch (DECL_FUNCTION_CODE (fndecl))
2759 {
80ff6494 2760 case BUILT_IN_ICEIL:
fc0dfa6e 2761 case BUILT_IN_LCEIL:
2762 case BUILT_IN_LLCEIL:
2763 name = "ceil";
2764 break;
80ff6494 2765 case BUILT_IN_ICEILF:
fc0dfa6e 2766 case BUILT_IN_LCEILF:
2767 case BUILT_IN_LLCEILF:
2768 name = "ceilf";
2769 break;
80ff6494 2770 case BUILT_IN_ICEILL:
fc0dfa6e 2771 case BUILT_IN_LCEILL:
2772 case BUILT_IN_LLCEILL:
2773 name = "ceill";
2774 break;
80ff6494 2775 case BUILT_IN_IFLOOR:
fc0dfa6e 2776 case BUILT_IN_LFLOOR:
2777 case BUILT_IN_LLFLOOR:
2778 name = "floor";
2779 break;
80ff6494 2780 case BUILT_IN_IFLOORF:
fc0dfa6e 2781 case BUILT_IN_LFLOORF:
2782 case BUILT_IN_LLFLOORF:
2783 name = "floorf";
2784 break;
80ff6494 2785 case BUILT_IN_IFLOORL:
fc0dfa6e 2786 case BUILT_IN_LFLOORL:
2787 case BUILT_IN_LLFLOORL:
2788 name = "floorl";
2789 break;
2790 default:
2791 gcc_unreachable ();
2792 }
2793
2794 fntype = build_function_type_list (TREE_TYPE (arg),
2795 TREE_TYPE (arg), NULL_TREE);
2796 fallback_fndecl = build_fn_decl (name, fntype);
2797 }
2798
0568e9c1 2799 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2800
d4c690af 2801 tmp = expand_normal (exp);
933eb13a 2802 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2803
2804 /* Truncate the result of floating point optab to integer
2805 via expand_fix (). */
2806 target = gen_reg_rtx (mode);
2807 expand_fix (target, tmp, 0);
2808
2809 return target;
2810}
2811
7d3afc77 2812/* Expand a call to one of the builtin math functions doing integer
2813 conversion (lrint).
2814 Return 0 if a normal call should be emitted rather than expanding the
2815 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2816 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2817
2818static rtx
ff1b14e4 2819expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2820{
5f51ee59 2821 convert_optab builtin_optab;
1e0c0b35 2822 rtx op0;
2823 rtx_insn *insns;
7d3afc77 2824 tree fndecl = get_callee_fndecl (exp);
4de0924f 2825 tree arg;
3754d046 2826 machine_mode mode;
e951f9a4 2827 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2828
c2f47e15 2829 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2830 gcc_unreachable ();
48e1416a 2831
c2f47e15 2832 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2833
2834 switch (DECL_FUNCTION_CODE (fndecl))
2835 {
80ff6494 2836 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2837 fallback_fn = BUILT_IN_LRINT;
3c77f69c 2838 gcc_fallthrough ();
7d3afc77 2839 CASE_FLT_FN (BUILT_IN_LRINT):
2840 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2841 builtin_optab = lrint_optab;
2842 break;
80ff6494 2843
2844 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2845 fallback_fn = BUILT_IN_LROUND;
3c77f69c 2846 gcc_fallthrough ();
ef2f1a10 2847 CASE_FLT_FN (BUILT_IN_LROUND):
2848 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2849 builtin_optab = lround_optab;
2850 break;
80ff6494 2851
7d3afc77 2852 default:
2853 gcc_unreachable ();
2854 }
2855
e951f9a4 2856 /* There's no easy way to detect the case we need to set EDOM. */
2857 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2858 return NULL_RTX;
2859
7d3afc77 2860 /* Make a suitable register to place result in. */
2861 mode = TYPE_MODE (TREE_TYPE (exp));
2862
e951f9a4 2863 /* There's no easy way to detect the case we need to set EDOM. */
2864 if (!flag_errno_math)
2865 {
de2e453e 2866 rtx result = gen_reg_rtx (mode);
7d3afc77 2867
e951f9a4 2868 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2869 need to expand the argument again. This way, we will not perform
2870 side-effects more the once. */
2871 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2872
e951f9a4 2873 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2874
e951f9a4 2875 start_sequence ();
7d3afc77 2876
de2e453e 2877 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2878 {
2879 /* Output the entire sequence. */
2880 insns = get_insns ();
2881 end_sequence ();
2882 emit_insn (insns);
de2e453e 2883 return result;
e951f9a4 2884 }
2885
2886 /* If we were unable to expand via the builtin, stop the sequence
2887 (without outputting the insns) and call to the library function
2888 with the stabilized argument list. */
7d3afc77 2889 end_sequence ();
2890 }
2891
e951f9a4 2892 if (fallback_fn != BUILT_IN_NONE)
2893 {
2894 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2895 targets, (int) round (x) should never be transformed into
2896 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2897 a call to lround in the hope that the target provides at least some
2898 C99 functions. This should result in the best user experience for
2899 not full C99 targets. */
e3240774 2900 tree fallback_fndecl = mathfn_built_in_1
2901 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
e951f9a4 2902
2903 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2904 fallback_fndecl, 1, arg);
2905
2906 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2907 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2908 return convert_to_mode (mode, target, 0);
2909 }
5f51ee59 2910
de2e453e 2911 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2912}
2913
c2f47e15 2914/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2915 a normal call should be emitted rather than expanding the function
2916 in-line. EXP is the expression that is a call to the builtin
2917 function; if convenient, the result should be placed in TARGET. */
2918
2919static rtx
f97eea22 2920expand_builtin_powi (tree exp, rtx target)
757c219d 2921{
757c219d 2922 tree arg0, arg1;
2923 rtx op0, op1;
3754d046 2924 machine_mode mode;
2925 machine_mode mode2;
757c219d 2926
c2f47e15 2927 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2928 return NULL_RTX;
757c219d 2929
c2f47e15 2930 arg0 = CALL_EXPR_ARG (exp, 0);
2931 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2932 mode = TYPE_MODE (TREE_TYPE (exp));
2933
757c219d 2934 /* Emit a libcall to libgcc. */
2935
c2f47e15 2936 /* Mode of the 2nd argument must match that of an int. */
517be012 2937 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
d0405f40 2938
757c219d 2939 if (target == NULL_RTX)
2940 target = gen_reg_rtx (mode);
2941
f97eea22 2942 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2943 if (GET_MODE (op0) != mode)
2944 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2945 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2946 if (GET_MODE (op1) != mode2)
2947 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2948
f36b9f69 2949 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
9e9e5c15 2950 target, LCT_CONST, mode,
d0405f40 2951 op0, mode, op1, mode2);
757c219d 2952
2953 return target;
2954}
2955
48e1416a 2956/* Expand expression EXP which is a call to the strlen builtin. Return
864bd5de 2957 NULL_RTX if we failed and the caller should emit a normal call, otherwise
aed0bd19 2958 try to get the result in TARGET, if convenient. */
f7c44134 2959
53800dbe 2960static rtx
c2f47e15 2961expand_builtin_strlen (tree exp, rtx target,
3754d046 2962 machine_mode target_mode)
53800dbe 2963{
c2f47e15 2964 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2965 return NULL_RTX;
6248e345 2966
5c5d012b 2967 struct expand_operand ops[4];
2968 rtx pat;
2969 tree len;
2970 tree src = CALL_EXPR_ARG (exp, 0);
2971 rtx src_reg;
2972 rtx_insn *before_strlen;
2973 machine_mode insn_mode;
2974 enum insn_code icode = CODE_FOR_nothing;
2975 unsigned int align;
681fab1e 2976
5c5d012b 2977 /* If the length can be computed at compile-time, return it. */
2978 len = c_strlen (src, 0);
2979 if (len)
2980 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2981
2982 /* If the length can be computed at compile-time and is constant
2983 integer, but there are side-effects in src, evaluate
2984 src for side-effects, then return len.
2985 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2986 can be optimized into: i++; x = 3; */
2987 len = c_strlen (src, 1);
2988 if (len && TREE_CODE (len) == INTEGER_CST)
2989 {
2990 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2991 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2992 }
53800dbe 2993
5c5d012b 2994 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 2995
5c5d012b 2996 /* If SRC is not a pointer type, don't do this operation inline. */
2997 if (align == 0)
2998 return NULL_RTX;
2999
3000 /* Bail out if we can't compute strlen in the right mode. */
3001 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3002 {
3003 icode = optab_handler (strlen_optab, insn_mode);
3004 if (icode != CODE_FOR_nothing)
3005 break;
3006 }
3007 if (insn_mode == VOIDmode)
3008 return NULL_RTX;
53800dbe 3009
5c5d012b 3010 /* Make a place to hold the source address. We will not expand
3011 the actual source until we are sure that the expansion will
3012 not fail -- there are trees that cannot be expanded twice. */
3013 src_reg = gen_reg_rtx (Pmode);
53800dbe 3014
5c5d012b 3015 /* Mark the beginning of the strlen sequence so we can emit the
3016 source operand later. */
3017 before_strlen = get_last_insn ();
53800dbe 3018
5c5d012b 3019 create_output_operand (&ops[0], target, insn_mode);
3020 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3021 create_integer_operand (&ops[2], 0);
3022 create_integer_operand (&ops[3], align);
3023 if (!maybe_expand_insn (icode, 4, ops))
3024 return NULL_RTX;
911c0150 3025
5c5d012b 3026 /* Check to see if the argument was declared attribute nonstring
3027 and if so, issue a warning since at this point it's not known
3028 to be nul-terminated. */
3029 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
0c45740b 3030
5c5d012b 3031 /* Now that we are assured of success, expand the source. */
3032 start_sequence ();
3033 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3034 if (pat != src_reg)
3035 {
499eee58 3036#ifdef POINTERS_EXTEND_UNSIGNED
5c5d012b 3037 if (GET_MODE (pat) != Pmode)
3038 pat = convert_to_mode (Pmode, pat,
3039 POINTERS_EXTEND_UNSIGNED);
499eee58 3040#endif
5c5d012b 3041 emit_move_insn (src_reg, pat);
3042 }
3043 pat = get_insns ();
3044 end_sequence ();
bceb0d1f 3045
5c5d012b 3046 if (before_strlen)
3047 emit_insn_after (pat, before_strlen);
3048 else
3049 emit_insn_before (pat, get_insns ());
53800dbe 3050
5c5d012b 3051 /* Return the value in the proper mode for this function. */
3052 if (GET_MODE (ops[0].value) == target_mode)
3053 target = ops[0].value;
3054 else if (target != 0)
3055 convert_move (target, ops[0].value, 0);
3056 else
3057 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 3058
5c5d012b 3059 return target;
53800dbe 3060}
3061
864bd5de 3062/* Expand call EXP to the strnlen built-in, returning the result
3063 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3064
3065static rtx
3066expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3067{
3068 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3069 return NULL_RTX;
3070
3071 tree src = CALL_EXPR_ARG (exp, 0);
3072 tree bound = CALL_EXPR_ARG (exp, 1);
3073
3074 if (!bound)
3075 return NULL_RTX;
3076
3077 location_t loc = UNKNOWN_LOCATION;
3078 if (EXPR_HAS_LOCATION (exp))
3079 loc = EXPR_LOCATION (exp);
3080
3081 tree maxobjsize = max_object_size ();
3082 tree func = get_callee_fndecl (exp);
3083
55769ed6 3084 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3085 so these conversions aren't necessary. */
98d5ba5d 3086 c_strlen_data lendata = { };
3087 tree len = c_strlen (src, 0, &lendata, 1);
55769ed6 3088 if (len)
3089 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
864bd5de 3090
3091 if (TREE_CODE (bound) == INTEGER_CST)
3092 {
3093 if (!TREE_NO_WARNING (exp)
3094 && tree_int_cst_lt (maxobjsize, bound)
3095 && warning_at (loc, OPT_Wstringop_overflow_,
3096 "%K%qD specified bound %E "
3097 "exceeds maximum object size %E",
3098 exp, func, bound, maxobjsize))
3099 TREE_NO_WARNING (exp) = true;
3100
fec27bf2 3101 bool exact = true;
864bd5de 3102 if (!len || TREE_CODE (len) != INTEGER_CST)
fec27bf2 3103 {
3104 /* Clear EXACT if LEN may be less than SRC suggests,
3105 such as in
3106 strnlen (&a[i], sizeof a)
3107 where the value of i is unknown. Unless i's value is
3108 zero, the call is unsafe because the bound is greater. */
98d5ba5d 3109 lendata.decl = unterminated_array (src, &len, &exact);
3110 if (!lendata.decl)
fec27bf2 3111 return NULL_RTX;
3112 }
3113
98d5ba5d 3114 if (lendata.decl
fec27bf2 3115 && !TREE_NO_WARNING (exp)
3116 && ((tree_int_cst_lt (len, bound))
3117 || !exact))
3118 {
3119 location_t warnloc
3120 = expansion_point_location_if_in_system_header (loc);
3121
3122 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3123 exact
3124 ? G_("%K%qD specified bound %E exceeds the size %E "
3125 "of unterminated array")
3126 : G_("%K%qD specified bound %E may exceed the size "
3127 "of at most %E of unterminated array"),
3128 exp, func, bound, len))
3129 {
98d5ba5d 3130 inform (DECL_SOURCE_LOCATION (lendata.decl),
fec27bf2 3131 "referenced argument declared here");
3132 TREE_NO_WARNING (exp) = true;
3133 return NULL_RTX;
3134 }
3135 }
3136
3137 if (!len)
864bd5de 3138 return NULL_RTX;
3139
864bd5de 3140 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3141 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3142 }
3143
3144 if (TREE_CODE (bound) != SSA_NAME)
3145 return NULL_RTX;
3146
3147 wide_int min, max;
be44111e 3148 enum value_range_kind rng = get_range_info (bound, &min, &max);
864bd5de 3149 if (rng != VR_RANGE)
3150 return NULL_RTX;
3151
3152 if (!TREE_NO_WARNING (exp)
3153 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3154 && warning_at (loc, OPT_Wstringop_overflow_,
3155 "%K%qD specified bound [%wu, %wu] "
3156 "exceeds maximum object size %E",
3157 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3158 TREE_NO_WARNING (exp) = true;
3159
179f1960 3160 bool exact = true;
864bd5de 3161 if (!len || TREE_CODE (len) != INTEGER_CST)
179f1960 3162 {
98d5ba5d 3163 lendata.decl = unterminated_array (src, &len, &exact);
3164 if (!lendata.decl)
179f1960 3165 return NULL_RTX;
3166 }
864bd5de 3167
98d5ba5d 3168 if (lendata.decl
179f1960 3169 && !TREE_NO_WARNING (exp)
3170 && (wi::ltu_p (wi::to_wide (len), min)
3171 || !exact))
fec27bf2 3172 {
179f1960 3173 location_t warnloc
3174 = expansion_point_location_if_in_system_header (loc);
3175
3176 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3177 exact
3178 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3179 "the size %E of unterminated array")
3180 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3181 "the size of at most %E of unterminated array"),
3182 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3183 {
98d5ba5d 3184 inform (DECL_SOURCE_LOCATION (lendata.decl),
179f1960 3185 "referenced argument declared here");
3186 TREE_NO_WARNING (exp) = true;
3187 }
fec27bf2 3188 }
3189
98d5ba5d 3190 if (lendata.decl)
179f1960 3191 return NULL_RTX;
3192
864bd5de 3193 if (wi::gtu_p (min, wi::to_wide (len)))
3194 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3195
3196 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3197 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3198}
3199
6840589f 3200/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3201 bytes from constant string DATA + OFFSET and return it as target
3202 constant. */
3203
3204static rtx
aecda0d6 3205builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
f77c4496 3206 scalar_int_mode mode)
6840589f 3207{
3208 const char *str = (const char *) data;
3209
64db345d 3210 gcc_assert (offset >= 0
3211 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3212 <= strlen (str) + 1));
6840589f 3213
3214 return c_readstr (str + offset, mode);
3215}
3216
36d63243 3217/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 3218 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3219 In some cases we can make very likely guess on max size, then we
3220 set it into PROBABLE_MAX_SIZE. */
36d63243 3221
3222static void
3223determine_block_size (tree len, rtx len_rtx,
3224 unsigned HOST_WIDE_INT *min_size,
9db0f34d 3225 unsigned HOST_WIDE_INT *max_size,
3226 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 3227{
3228 if (CONST_INT_P (len_rtx))
3229 {
4e140a5c 3230 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 3231 return;
3232 }
3233 else
3234 {
9c1be15e 3235 wide_int min, max;
be44111e 3236 enum value_range_kind range_type = VR_UNDEFINED;
9db0f34d 3237
3238 /* Determine bounds from the type. */
3239 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3240 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3241 else
3242 *min_size = 0;
3243 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 3244 *probable_max_size = *max_size
3245 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 3246 else
3247 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3248
3249 if (TREE_CODE (len) == SSA_NAME)
3250 range_type = get_range_info (len, &min, &max);
3251 if (range_type == VR_RANGE)
36d63243 3252 {
fe5ad926 3253 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 3254 *min_size = min.to_uhwi ();
fe5ad926 3255 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 3256 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 3257 }
9db0f34d 3258 else if (range_type == VR_ANTI_RANGE)
36d63243 3259 {
4a474a5a 3260 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 3261 if (min == 0)
9db0f34d 3262 {
9c1be15e 3263 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3264 *min_size = max.to_uhwi () + 1;
9db0f34d 3265 }
3266 /* Code like
3267
3268 int n;
3269 if (n < 100)
4a474a5a 3270 memcpy (a, b, n)
9db0f34d 3271
3272 Produce anti range allowing negative values of N. We still
3273 can use the information and make a guess that N is not negative.
3274 */
fe5ad926 3275 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3276 *probable_max_size = min.to_uhwi () - 1;
36d63243 3277 }
3278 }
3279 gcc_checking_assert (*max_size <=
3280 (unsigned HOST_WIDE_INT)
3281 GET_MODE_MASK (GET_MODE (len_rtx)));
3282}
3283
5aef8938 3284/* Try to verify that the sizes and lengths of the arguments to a string
3285 manipulation function given by EXP are within valid bounds and that
e6a18b5a 3286 the operation does not lead to buffer overflow or read past the end.
3287 Arguments other than EXP may be null. When non-null, the arguments
3288 have the following meaning:
3289 DST is the destination of a copy call or NULL otherwise.
3290 SRC is the source of a copy call or NULL otherwise.
3291 DSTWRITE is the number of bytes written into the destination obtained
3292 from the user-supplied size argument to the function (such as in
3293 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3294 MAXREAD is the user-supplied bound on the length of the source sequence
5aef8938 3295 (such as in strncat(d, s, N). It specifies the upper limit on the number
e6a18b5a 3296 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3297 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3298 expression EXP is a string function call (as opposed to a memory call
3299 like memcpy). As an exception, SRCSTR can also be an integer denoting
3300 the precomputed size of the source string or object (for functions like
3301 memcpy).
3302 DSTSIZE is the size of the destination object specified by the last
5aef8938 3303 argument to the _chk builtins, typically resulting from the expansion
e6a18b5a 3304 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3305 DSTSIZE).
5aef8938 3306
e6a18b5a 3307 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
5aef8938 3308 SIZE_MAX.
3309
e6a18b5a 3310 If the call is successfully verified as safe return true, otherwise
3311 return false. */
5aef8938 3312
3313static bool
e6a18b5a 3314check_access (tree exp, tree, tree, tree dstwrite,
3315 tree maxread, tree srcstr, tree dstsize)
5aef8938 3316{
e6a18b5a 3317 int opt = OPT_Wstringop_overflow_;
3318
5aef8938 3319 /* The size of the largest object is half the address space, or
e6a18b5a 3320 PTRDIFF_MAX. (This is way too permissive.) */
3321 tree maxobjsize = max_object_size ();
5aef8938 3322
e6a18b5a 3323 /* Either the length of the source string for string functions or
3324 the size of the source object for raw memory functions. */
5aef8938 3325 tree slen = NULL_TREE;
3326
8d6c6ef5 3327 tree range[2] = { NULL_TREE, NULL_TREE };
3328
5aef8938 3329 /* Set to true when the exact number of bytes written by a string
3330 function like strcpy is not known and the only thing that is
3331 known is that it must be at least one (for the terminating nul). */
3332 bool at_least_one = false;
e6a18b5a 3333 if (srcstr)
5aef8938 3334 {
e6a18b5a 3335 /* SRCSTR is normally a pointer to string but as a special case
5aef8938 3336 it can be an integer denoting the length of a string. */
e6a18b5a 3337 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
5aef8938 3338 {
3339 /* Try to determine the range of lengths the source string
8d6c6ef5 3340 refers to. If it can be determined and is less than
e6a18b5a 3341 the upper bound given by MAXREAD add one to it for
5aef8938 3342 the terminating nul. Otherwise, set it to one for
e6a18b5a 3343 the same reason, or to MAXREAD as appropriate. */
3344 get_range_strlen (srcstr, range);
3345 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
8d6c6ef5 3346 {
e6a18b5a 3347 if (maxread && tree_int_cst_le (maxread, range[0]))
3348 range[0] = range[1] = maxread;
8d6c6ef5 3349 else
3350 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3351 range[0], size_one_node);
3352
e6a18b5a 3353 if (maxread && tree_int_cst_le (maxread, range[1]))
3354 range[1] = maxread;
8d6c6ef5 3355 else if (!integer_all_onesp (range[1]))
3356 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3357 range[1], size_one_node);
3358
3359 slen = range[0];
3360 }
5aef8938 3361 else
3362 {
3363 at_least_one = true;
3364 slen = size_one_node;
3365 }
3366 }
3367 else
e6a18b5a 3368 slen = srcstr;
5aef8938 3369 }
3370
e6a18b5a 3371 if (!dstwrite && !maxread)
5aef8938 3372 {
3373 /* When the only available piece of data is the object size
3374 there is nothing to do. */
3375 if (!slen)
3376 return true;
3377
3378 /* Otherwise, when the length of the source sequence is known
e6a18b5a 3379 (as with strlen), set DSTWRITE to it. */
8d6c6ef5 3380 if (!range[0])
e6a18b5a 3381 dstwrite = slen;
5aef8938 3382 }
3383
e6a18b5a 3384 if (!dstsize)
3385 dstsize = maxobjsize;
5aef8938 3386
e6a18b5a 3387 if (dstwrite)
3388 get_size_range (dstwrite, range);
5aef8938 3389
e6a18b5a 3390 tree func = get_callee_fndecl (exp);
5aef8938 3391
3392 /* First check the number of bytes to be written against the maximum
3393 object size. */
c4183f31 3394 if (range[0]
3395 && TREE_CODE (range[0]) == INTEGER_CST
3396 && tree_int_cst_lt (maxobjsize, range[0]))
5aef8938 3397 {
864bd5de 3398 if (TREE_NO_WARNING (exp))
3399 return false;
3400
5aef8938 3401 location_t loc = tree_nonartificial_location (exp);
4d317237 3402 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3403
864bd5de 3404 bool warned;
5aef8938 3405 if (range[0] == range[1])
864bd5de 3406 warned = warning_at (loc, opt,
3407 "%K%qD specified size %E "
3408 "exceeds maximum object size %E",
3409 exp, func, range[0], maxobjsize);
3410 else
3411 warned = warning_at (loc, opt,
3412 "%K%qD specified size between %E and %E "
3413 "exceeds maximum object size %E",
3414 exp, func,
3415 range[0], range[1], maxobjsize);
3416 if (warned)
3417 TREE_NO_WARNING (exp) = true;
3418
5aef8938 3419 return false;
3420 }
3421
e6a18b5a 3422 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3423 constant, and in range of unsigned HOST_WIDE_INT. */
3424 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3425
5aef8938 3426 /* Next check the number of bytes to be written against the destination
3427 object size. */
e6a18b5a 3428 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
5aef8938 3429 {
3430 if (range[0]
c4183f31 3431 && TREE_CODE (range[0]) == INTEGER_CST
e6a18b5a 3432 && ((tree_fits_uhwi_p (dstsize)
3433 && tree_int_cst_lt (dstsize, range[0]))
c4183f31 3434 || (dstwrite
3435 && tree_fits_uhwi_p (dstwrite)
e6a18b5a 3436 && tree_int_cst_lt (dstwrite, range[0]))))
5aef8938 3437 {
080a1363 3438 if (TREE_NO_WARNING (exp))
3439 return false;
3440
5aef8938 3441 location_t loc = tree_nonartificial_location (exp);
4d317237 3442 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3443
e6a18b5a 3444 if (dstwrite == slen && at_least_one)
8d6c6ef5 3445 {
3446 /* This is a call to strcpy with a destination of 0 size
3447 and a source of unknown length. The call will write
3448 at least one byte past the end of the destination. */
3449 warning_at (loc, opt,
9098b938 3450 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3451 "of size %E overflows the destination",
e6a18b5a 3452 exp, func, range[0], dstsize);
8d6c6ef5 3453 }
3454 else if (tree_int_cst_equal (range[0], range[1]))
625a4dfc 3455 warning_n (loc, opt, tree_to_uhwi (range[0]),
3456 "%K%qD writing %E byte into a region "
3457 "of size %E overflows the destination",
3458 "%K%qD writing %E bytes into a region "
3459 "of size %E overflows the destination",
3460 exp, func, range[0], dstsize);
8d6c6ef5 3461 else if (tree_int_cst_sign_bit (range[1]))
3462 {
3463 /* Avoid printing the upper bound if it's invalid. */
3464 warning_at (loc, opt,
9098b938 3465 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3466 "of size %E overflows the destination",
e6a18b5a 3467 exp, func, range[0], dstsize);
8d6c6ef5 3468 }
5aef8938 3469 else
3470 warning_at (loc, opt,
9098b938 3471 "%K%qD writing between %E and %E bytes into "
8d6c6ef5 3472 "a region of size %E overflows the destination",
e6a18b5a 3473 exp, func, range[0], range[1],
3474 dstsize);
5aef8938 3475
3476 /* Return error when an overflow has been detected. */
3477 return false;
3478 }
3479 }
3480
3481 /* Check the maximum length of the source sequence against the size
3482 of the destination object if known, or against the maximum size
3483 of an object. */
e6a18b5a 3484 if (maxread)
5aef8938 3485 {
e6a18b5a 3486 get_size_range (maxread, range);
3487
3488 /* Use the lower end for MAXREAD from now on. */
3489 if (range[0])
3490 maxread = range[0];
5aef8938 3491
e6a18b5a 3492 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
5aef8938 3493 {
3494 location_t loc = tree_nonartificial_location (exp);
4d317237 3495 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3496
3497 if (tree_int_cst_lt (maxobjsize, range[0]))
3498 {
080a1363 3499 if (TREE_NO_WARNING (exp))
3500 return false;
3501
5aef8938 3502 /* Warn about crazy big sizes first since that's more
3503 likely to be meaningful than saying that the bound
3504 is greater than the object size if both are big. */
3505 if (range[0] == range[1])
3506 warning_at (loc, opt,
9098b938 3507 "%K%qD specified bound %E "
8d6c6ef5 3508 "exceeds maximum object size %E",
e6a18b5a 3509 exp, func,
8d6c6ef5 3510 range[0], maxobjsize);
5aef8938 3511 else
3512 warning_at (loc, opt,
9098b938 3513 "%K%qD specified bound between %E and %E "
8d6c6ef5 3514 "exceeds maximum object size %E",
e6a18b5a 3515 exp, func,
8d6c6ef5 3516 range[0], range[1], maxobjsize);
5aef8938 3517
3518 return false;
3519 }
3520
e6a18b5a 3521 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
5aef8938 3522 {
080a1363 3523 if (TREE_NO_WARNING (exp))
3524 return false;
3525
8d6c6ef5 3526 if (tree_int_cst_equal (range[0], range[1]))
5aef8938 3527 warning_at (loc, opt,
9098b938 3528 "%K%qD specified bound %E "
8d6c6ef5 3529 "exceeds destination size %E",
e6a18b5a 3530 exp, func,
3531 range[0], dstsize);
5aef8938 3532 else
3533 warning_at (loc, opt,
9098b938 3534 "%K%qD specified bound between %E and %E "
8d6c6ef5 3535 "exceeds destination size %E",
e6a18b5a 3536 exp, func,
3537 range[0], range[1], dstsize);
5aef8938 3538 return false;
3539 }
3540 }
3541 }
3542
e6a18b5a 3543 /* Check for reading past the end of SRC. */
8d6c6ef5 3544 if (slen
e6a18b5a 3545 && slen == srcstr
3546 && dstwrite && range[0]
8d6c6ef5 3547 && tree_int_cst_lt (slen, range[0]))
3548 {
080a1363 3549 if (TREE_NO_WARNING (exp))
3550 return false;
3551
8d6c6ef5 3552 location_t loc = tree_nonartificial_location (exp);
3553
3554 if (tree_int_cst_equal (range[0], range[1]))
625a4dfc 3555 warning_n (loc, opt, tree_to_uhwi (range[0]),
3556 "%K%qD reading %E byte from a region of size %E",
3557 "%K%qD reading %E bytes from a region of size %E",
e6a18b5a 3558 exp, func, range[0], slen);
8d6c6ef5 3559 else if (tree_int_cst_sign_bit (range[1]))
3560 {
3561 /* Avoid printing the upper bound if it's invalid. */
3562 warning_at (loc, opt,
9098b938 3563 "%K%qD reading %E or more bytes from a region "
8d6c6ef5 3564 "of size %E",
e6a18b5a 3565 exp, func, range[0], slen);
8d6c6ef5 3566 }
3567 else
3568 warning_at (loc, opt,
9098b938 3569 "%K%qD reading between %E and %E bytes from a region "
8d6c6ef5 3570 "of size %E",
e6a18b5a 3571 exp, func, range[0], range[1], slen);
8d6c6ef5 3572 return false;
3573 }
3574
5aef8938 3575 return true;
3576}
3577
3578/* Helper to compute the size of the object referenced by the DEST
d8aad786 3579 expression which must have pointer type, using Object Size type
5aef8938 3580 OSTYPE (only the least significant 2 bits are used). Return
24e3b821 3581 an estimate of the size of the object if successful or NULL when
3582 the size cannot be determined. When the referenced object involves
3583 a non-constant offset in some range the returned value represents
3584 the largest size given the smallest non-negative offset in the
3585 range. The function is intended for diagnostics and should not
3586 be used to influence code generation or optimization. */
5aef8938 3587
d8aad786 3588tree
8d6c6ef5 3589compute_objsize (tree dest, int ostype)
5aef8938 3590{
3591 unsigned HOST_WIDE_INT size;
d8aad786 3592
3593 /* Only the two least significant bits are meaningful. */
3594 ostype &= 3;
3595
3596 if (compute_builtin_object_size (dest, ostype, &size))
5aef8938 3597 return build_int_cst (sizetype, size);
3598
d8aad786 3599 if (TREE_CODE (dest) == SSA_NAME)
3600 {
3601 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3602 if (!is_gimple_assign (stmt))
3603 return NULL_TREE;
3604
24e3b821 3605 dest = gimple_assign_rhs1 (stmt);
3606
d8aad786 3607 tree_code code = gimple_assign_rhs_code (stmt);
24e3b821 3608 if (code == POINTER_PLUS_EXPR)
3609 {
3610 /* compute_builtin_object_size fails for addresses with
3611 non-constant offsets. Try to determine the range of
c1a0c86c 3612 such an offset here and use it to adjust the constant
24e3b821 3613 size. */
3614 tree off = gimple_assign_rhs2 (stmt);
c1a0c86c 3615 if (TREE_CODE (off) == INTEGER_CST)
3616 {
3617 if (tree size = compute_objsize (dest, ostype))
3618 {
3619 wide_int wioff = wi::to_wide (off);
3620 wide_int wisiz = wi::to_wide (size);
3621
3622 /* Ignore negative offsets for now. For others,
3623 use the lower bound as the most optimistic
3624 estimate of the (remaining) size. */
3625 if (wi::sign_mask (wioff))
3626 ;
3627 else if (wi::ltu_p (wioff, wisiz))
3628 return wide_int_to_tree (TREE_TYPE (size),
3629 wi::sub (wisiz, wioff));
3630 else
3631 return size_zero_node;
3632 }
3633 }
3634 else if (TREE_CODE (off) == SSA_NAME
24e3b821 3635 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3636 {
3637 wide_int min, max;
be44111e 3638 enum value_range_kind rng = get_range_info (off, &min, &max);
24e3b821 3639
3640 if (rng == VR_RANGE)
3641 {
3642 if (tree size = compute_objsize (dest, ostype))
3643 {
3644 wide_int wisiz = wi::to_wide (size);
3645
3646 /* Ignore negative offsets for now. For others,
3647 use the lower bound as the most optimistic
3648 estimate of the (remaining)size. */
3649 if (wi::sign_mask (min))
3650 ;
3651 else if (wi::ltu_p (min, wisiz))
3652 return wide_int_to_tree (TREE_TYPE (size),
3653 wi::sub (wisiz, min));
3654 else
3655 return size_zero_node;
3656 }
3657 }
3658 }
3659 }
3660 else if (code != ADDR_EXPR)
d8aad786 3661 return NULL_TREE;
d8aad786 3662 }
3663
24e3b821 3664 /* Unless computing the largest size (for memcpy and other raw memory
3665 functions), try to determine the size of the object from its type. */
3666 if (!ostype)
3667 return NULL_TREE;
3668
d8aad786 3669 if (TREE_CODE (dest) != ADDR_EXPR)
3670 return NULL_TREE;
3671
3672 tree type = TREE_TYPE (dest);
3673 if (TREE_CODE (type) == POINTER_TYPE)
3674 type = TREE_TYPE (type);
3675
3676 type = TYPE_MAIN_VARIANT (type);
3677
3678 if (TREE_CODE (type) == ARRAY_TYPE
d4ad98ea 3679 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
d8aad786 3680 {
3681 /* Return the constant size unless it's zero (that's a zero-length
3682 array likely at the end of a struct). */
3683 tree size = TYPE_SIZE_UNIT (type);
3684 if (size && TREE_CODE (size) == INTEGER_CST
3685 && !integer_zerop (size))
3686 return size;
3687 }
3688
5aef8938 3689 return NULL_TREE;
3690}
3691
3692/* Helper to determine and check the sizes of the source and the destination
8d6c6ef5 3693 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3694 call expression, DEST is the destination argument, SRC is the source
3695 argument or null, and LEN is the number of bytes. Use Object Size type-0
3696 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5aef8938 3697 (no overflow or invalid sizes), false otherwise. */
3698
3699static bool
e6a18b5a 3700check_memop_access (tree exp, tree dest, tree src, tree size)
5aef8938 3701{
5aef8938 3702 /* For functions like memset and memcpy that operate on raw memory
8d6c6ef5 3703 try to determine the size of the largest source and destination
3704 object using type-0 Object Size regardless of the object size
3705 type specified by the option. */
3706 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3707 tree dstsize = compute_objsize (dest, 0);
5aef8938 3708
e6a18b5a 3709 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3710 srcsize, dstsize);
8d6c6ef5 3711}
3712
3713/* Validate memchr arguments without performing any expansion.
3714 Return NULL_RTX. */
3715
3716static rtx
3717expand_builtin_memchr (tree exp, rtx)
3718{
3719 if (!validate_arglist (exp,
3720 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3721 return NULL_RTX;
3722
3723 tree arg1 = CALL_EXPR_ARG (exp, 0);
3724 tree len = CALL_EXPR_ARG (exp, 2);
3725
3726 /* Diagnose calls where the specified length exceeds the size
3727 of the object. */
3728 if (warn_stringop_overflow)
3729 {
3730 tree size = compute_objsize (arg1, 0);
e6a18b5a 3731 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3732 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
8d6c6ef5 3733 }
3734
3735 return NULL_RTX;
5aef8938 3736}
3737
c2f47e15 3738/* Expand a call EXP to the memcpy builtin.
3739 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3740 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3741 mode MODE if that's convenient). */
c2f47e15 3742
53800dbe 3743static rtx
a65c4d64 3744expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3745{
c2f47e15 3746 if (!validate_arglist (exp,
3747 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3748 return NULL_RTX;
5aef8938 3749
3750 tree dest = CALL_EXPR_ARG (exp, 0);
3751 tree src = CALL_EXPR_ARG (exp, 1);
3752 tree len = CALL_EXPR_ARG (exp, 2);
3753
e6a18b5a 3754 check_memop_access (exp, dest, src, len);
5aef8938 3755
d0fbba1a 3756 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
02aa6d73 3757 /*retmode=*/ RETURN_BEGIN);
f21337ef 3758}
6840589f 3759
4d317237 3760/* Check a call EXP to the memmove built-in for validity.
3761 Return NULL_RTX on both success and failure. */
3762
3763static rtx
3764expand_builtin_memmove (tree exp, rtx)
3765{
3766 if (!validate_arglist (exp,
3767 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3768 return NULL_RTX;
3769
3770 tree dest = CALL_EXPR_ARG (exp, 0);
8d6c6ef5 3771 tree src = CALL_EXPR_ARG (exp, 1);
4d317237 3772 tree len = CALL_EXPR_ARG (exp, 2);
3773
e6a18b5a 3774 check_memop_access (exp, dest, src, len);
4d317237 3775
3776 return NULL_RTX;
3777}
3778
c2f47e15 3779/* Expand a call EXP to the mempcpy builtin.
3780 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3781 otherwise try to get the result in TARGET, if convenient (and in
02aa6d73 3782 mode MODE if that's convenient). */
647661c6 3783
3784static rtx
d0fbba1a 3785expand_builtin_mempcpy (tree exp, rtx target)
647661c6 3786{
c2f47e15 3787 if (!validate_arglist (exp,
3788 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3789 return NULL_RTX;
5aef8938 3790
3791 tree dest = CALL_EXPR_ARG (exp, 0);
3792 tree src = CALL_EXPR_ARG (exp, 1);
3793 tree len = CALL_EXPR_ARG (exp, 2);
3794
24e3b821 3795 /* Policy does not generally allow using compute_objsize (which
3796 is used internally by check_memop_size) to change code generation
3797 or drive optimization decisions.
3798
3799 In this instance it is safe because the code we generate has
3800 the same semantics regardless of the return value of
3801 check_memop_sizes. Exactly the same amount of data is copied
3802 and the return value is exactly the same in both cases.
3803
3804 Furthermore, check_memop_size always uses mode 0 for the call to
3805 compute_objsize, so the imprecise nature of compute_objsize is
3806 avoided. */
3807
5aef8938 3808 /* Avoid expanding mempcpy into memcpy when the call is determined
3809 to overflow the buffer. This also prevents the same overflow
3810 from being diagnosed again when expanding memcpy. */
e6a18b5a 3811 if (!check_memop_access (exp, dest, src, len))
5aef8938 3812 return NULL_RTX;
3813
3814 return expand_builtin_mempcpy_args (dest, src, len,
02aa6d73 3815 target, exp, /*retmode=*/ RETURN_END);
f21337ef 3816}
3817
d0fbba1a 3818/* Helper function to do the actual work for expand of memory copy family
3819 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
02aa6d73 3820 of memory from SRC to DEST and assign to TARGET if convenient. Return
3821 value is based on RETMODE argument. */
c2f47e15 3822
3823static rtx
d0fbba1a 3824expand_builtin_memory_copy_args (tree dest, tree src, tree len,
02aa6d73 3825 rtx target, tree exp, memop_ret retmode)
c2f47e15 3826{
d0fbba1a 3827 const char *src_str;
3828 unsigned int src_align = get_pointer_alignment (src);
3829 unsigned int dest_align = get_pointer_alignment (dest);
3830 rtx dest_mem, src_mem, dest_addr, len_rtx;
3831 HOST_WIDE_INT expected_size = -1;
3832 unsigned int expected_align = 0;
3833 unsigned HOST_WIDE_INT min_size;
3834 unsigned HOST_WIDE_INT max_size;
3835 unsigned HOST_WIDE_INT probable_max_size;
f21337ef 3836
d0fbba1a 3837 /* If DEST is not a pointer type, call the normal function. */
3838 if (dest_align == 0)
3839 return NULL_RTX;
a0c938f0 3840
d0fbba1a 3841 /* If either SRC is not a pointer type, don't do this
3842 operation in-line. */
3843 if (src_align == 0)
3844 return NULL_RTX;
9fe0e1b8 3845
d0fbba1a 3846 if (currently_expanding_gimple_stmt)
3847 stringop_block_profile (currently_expanding_gimple_stmt,
3848 &expected_align, &expected_size);
0862b7e9 3849
d0fbba1a 3850 if (expected_align < dest_align)
3851 expected_align = dest_align;
3852 dest_mem = get_memory_rtx (dest, len);
3853 set_mem_align (dest_mem, dest_align);
3854 len_rtx = expand_normal (len);
3855 determine_block_size (len, len_rtx, &min_size, &max_size,
3856 &probable_max_size);
3857 src_str = c_getstr (src);
647661c6 3858
d0fbba1a 3859 /* If SRC is a string constant and block move would be done
3860 by pieces, we can avoid loading the string from memory
3861 and only stored the computed constants. */
3862 if (src_str
3863 && CONST_INT_P (len_rtx)
3864 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3865 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3866 CONST_CAST (char *, src_str),
3867 dest_align, false))
3868 {
3869 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3870 builtin_memcpy_read_str,
d72123ce 3871 CONST_CAST (char *, src_str),
02aa6d73 3872 dest_align, false, retmode);
d0fbba1a 3873 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3874 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3875 return dest_mem;
3876 }
647661c6 3877
d0fbba1a 3878 src_mem = get_memory_rtx (src, len);
3879 set_mem_align (src_mem, src_align);
9fe0e1b8 3880
d0fbba1a 3881 /* Copy word part most expediently. */
21781799 3882 enum block_op_methods method = BLOCK_OP_NORMAL;
02aa6d73 3883 if (CALL_EXPR_TAILCALL (exp)
3884 && (retmode == RETURN_BEGIN || target == const0_rtx))
21781799 3885 method = BLOCK_OP_TAILCALL;
02aa6d73 3886 if (retmode == RETURN_END && target != const0_rtx)
21781799 3887 method = BLOCK_OP_NO_LIBCALL_RET;
3888 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
d0fbba1a 3889 expected_align, expected_size,
3890 min_size, max_size, probable_max_size);
21781799 3891 if (dest_addr == pc_rtx)
3892 return NULL_RTX;
d0fbba1a 3893
3894 if (dest_addr == 0)
3895 {
3896 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3897 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3898 }
3899
02aa6d73 3900 if (retmode != RETURN_BEGIN && target != const0_rtx)
d0fbba1a 3901 {
3902 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3903 /* stpcpy pointer to last byte. */
02aa6d73 3904 if (retmode == RETURN_END_MINUS_ONE)
d0fbba1a 3905 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
d72123ce 3906 }
d0fbba1a 3907
3908 return dest_addr;
3909}
3910
3911static rtx
3912expand_builtin_mempcpy_args (tree dest, tree src, tree len,
02aa6d73 3913 rtx target, tree orig_exp, memop_ret retmode)
d0fbba1a 3914{
3915 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
02aa6d73 3916 retmode);
647661c6 3917}
3918
c2f47e15 3919/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3920 we failed, the caller should emit a normal call, otherwise try to
02aa6d73 3921 get the result in TARGET, if convenient.
3922 Return value is based on RETMODE argument. */
727c62dd 3923
3924static rtx
02aa6d73 3925expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
727c62dd 3926{
8786db1e 3927 struct expand_operand ops[3];
727c62dd 3928 rtx dest_mem;
3929 rtx src_mem;
727c62dd 3930
8d74dc42 3931 if (!targetm.have_movstr ())
c2f47e15 3932 return NULL_RTX;
727c62dd 3933
d8ae1baa 3934 dest_mem = get_memory_rtx (dest, NULL);
3935 src_mem = get_memory_rtx (src, NULL);
74a1cc74 3936 if (retmode == RETURN_BEGIN)
727c62dd 3937 {
3938 target = force_reg (Pmode, XEXP (dest_mem, 0));
3939 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3940 }
3941
24c9d723 3942 create_output_operand (&ops[0],
3943 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
8786db1e 3944 create_fixed_operand (&ops[1], dest_mem);
3945 create_fixed_operand (&ops[2], src_mem);
8d74dc42 3946 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
1e1d5623 3947 return NULL_RTX;
727c62dd 3948
02aa6d73 3949 if (retmode != RETURN_BEGIN && target != const0_rtx)
c5aba89c 3950 {
8786db1e 3951 target = ops[0].value;
3952 /* movstr is supposed to set end to the address of the NUL
3953 terminator. If the caller requested a mempcpy-like return value,
3954 adjust it. */
02aa6d73 3955 if (retmode == RETURN_END)
8786db1e 3956 {
29c05e22 3957 rtx tem = plus_constant (GET_MODE (target),
3958 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3959 emit_move_insn (target, force_operand (tem, NULL_RTX));
3960 }
c5aba89c 3961 }
727c62dd 3962 return target;
3963}
3964
5aef8938 3965/* Do some very basic size validation of a call to the strcpy builtin
3966 given by EXP. Return NULL_RTX to have the built-in expand to a call
3967 to the library function. */
3968
3969static rtx
3970expand_builtin_strcat (tree exp, rtx)
3971{
3972 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3973 || !warn_stringop_overflow)
3974 return NULL_RTX;
3975
3976 tree dest = CALL_EXPR_ARG (exp, 0);
3977 tree src = CALL_EXPR_ARG (exp, 1);
3978
3979 /* There is no way here to determine the length of the string in
3980 the destination to which the SRC string is being appended so
3981 just diagnose cases when the souce string is longer than
3982 the destination object. */
3983
8d6c6ef5 3984 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3985
e6a18b5a 3986 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3987 destsize);
5aef8938 3988
3989 return NULL_RTX;
3990}
3991
48e1416a 3992/* Expand expression EXP, which is a call to the strcpy builtin. Return
3993 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3994 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3995 convenient). */
902de8ed 3996
53800dbe 3997static rtx
a65c4d64 3998expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3999{
5aef8938 4000 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4001 return NULL_RTX;
4002
4003 tree dest = CALL_EXPR_ARG (exp, 0);
4004 tree src = CALL_EXPR_ARG (exp, 1);
4005
4006 if (warn_stringop_overflow)
4007 {
8d6c6ef5 4008 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
e6a18b5a 4009 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4010 src, destsize);
5aef8938 4011 }
4012
a788aa5f 4013 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
0b39ade8 4014 {
4015 /* Check to see if the argument was declared attribute nonstring
4016 and if so, issue a warning since at this point it's not known
4017 to be nul-terminated. */
4018 tree fndecl = get_callee_fndecl (exp);
4019 maybe_warn_nonstring_arg (fndecl, exp);
4020 return ret;
4021 }
4022
4023 return NULL_RTX;
c2f47e15 4024}
4025
4026/* Helper function to do the actual work for expand_builtin_strcpy. The
4027 arguments to the builtin_strcpy call DEST and SRC are broken out
4028 so that this can also be called without constructing an actual CALL_EXPR.
4029 The other arguments and return value are the same as for
4030 expand_builtin_strcpy. */
4031
4032static rtx
a788aa5f 4033expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
c2f47e15 4034{
a788aa5f 4035 /* Detect strcpy calls with unterminated arrays.. */
4036 if (tree nonstr = unterminated_array (src))
4037 {
4038 /* NONSTR refers to the non-nul terminated constant array. */
4039 if (!TREE_NO_WARNING (exp))
4040 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4041 return NULL_RTX;
4042 }
4043
02aa6d73 4044 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
53800dbe 4045}
4046
c2f47e15 4047/* Expand a call EXP to the stpcpy builtin.
4048 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 4049 otherwise try to get the result in TARGET, if convenient (and in
4050 mode MODE if that's convenient). */
4051
4052static rtx
df6e8b42 4053expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3b824fa6 4054{
c2f47e15 4055 tree dst, src;
389dd41b 4056 location_t loc = EXPR_LOCATION (exp);
c2f47e15 4057
4058 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4059 return NULL_RTX;
4060
4061 dst = CALL_EXPR_ARG (exp, 0);
4062 src = CALL_EXPR_ARG (exp, 1);
4063
4d317237 4064 if (warn_stringop_overflow)
4065 {
8d6c6ef5 4066 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
e6a18b5a 4067 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4068 src, destsize);
4d317237 4069 }
4070
727c62dd 4071 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 4072 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 4073 {
b9a16870 4074 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 4075 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 4076 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 4077 }
3b824fa6 4078 else
4079 {
c2f47e15 4080 tree len, lenp1;
727c62dd 4081 rtx ret;
647661c6 4082
9fe0e1b8 4083 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 4084 compile-time, not an expression containing a string. This is
4085 because the latter will potentially produce pessimized code
4086 when used to produce the return value. */
98d5ba5d 4087 c_strlen_data lendata = { };
50e57712 4088 if (!c_getstr (src, NULL)
98d5ba5d 4089 || !(len = c_strlen (src, 0, &lendata, 1)))
02aa6d73 4090 return expand_movstr (dst, src, target,
4091 /*retmode=*/ RETURN_END_MINUS_ONE);
3b824fa6 4092
98d5ba5d 4093 if (lendata.decl && !TREE_NO_WARNING (exp))
4094 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
50e57712 4095
389dd41b 4096 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 4097 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
02aa6d73 4098 target, exp,
4099 /*retmode=*/ RETURN_END_MINUS_ONE);
727c62dd 4100
4101 if (ret)
4102 return ret;
4103
4104 if (TREE_CODE (len) == INTEGER_CST)
4105 {
8ec3c5c2 4106 rtx len_rtx = expand_normal (len);
727c62dd 4107
971ba038 4108 if (CONST_INT_P (len_rtx))
727c62dd 4109 {
a788aa5f 4110 ret = expand_builtin_strcpy_args (exp, dst, src, target);
727c62dd 4111
4112 if (ret)
4113 {
4114 if (! target)
7ac87324 4115 {
4116 if (mode != VOIDmode)
4117 target = gen_reg_rtx (mode);
4118 else
4119 target = gen_reg_rtx (GET_MODE (ret));
4120 }
727c62dd 4121 if (GET_MODE (target) != GET_MODE (ret))
4122 ret = gen_lowpart (GET_MODE (target), ret);
4123
29c05e22 4124 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 4125 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 4126 gcc_assert (ret);
727c62dd 4127
4128 return target;
4129 }
4130 }
4131 }
4132
02aa6d73 4133 return expand_movstr (dst, src, target,
4134 /*retmode=*/ RETURN_END_MINUS_ONE);
3b824fa6 4135 }
4136}
4137
df6e8b42 4138/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4139 arguments while being careful to avoid duplicate warnings (which could
4140 be issued if the expander were to expand the call, resulting in it
4141 being emitted in expand_call(). */
4142
4143static rtx
4144expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4145{
4146 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4147 {
4148 /* The call has been successfully expanded. Check for nonstring
4149 arguments and issue warnings as appropriate. */
4150 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4151 return ret;
4152 }
4153
4154 return NULL_RTX;
4155}
4156
4d317237 4157/* Check a call EXP to the stpncpy built-in for validity.
4158 Return NULL_RTX on both success and failure. */
4159
4160static rtx
4161expand_builtin_stpncpy (tree exp, rtx)
4162{
4163 if (!validate_arglist (exp,
4164 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4165 || !warn_stringop_overflow)
4166 return NULL_RTX;
4167
aca1a787 4168 /* The source and destination of the call. */
4d317237 4169 tree dest = CALL_EXPR_ARG (exp, 0);
4170 tree src = CALL_EXPR_ARG (exp, 1);
4171
aca1a787 4172 /* The exact number of bytes to write (not the maximum). */
4d317237 4173 tree len = CALL_EXPR_ARG (exp, 2);
4d317237 4174
aca1a787 4175 /* The size of the destination object. */
8d6c6ef5 4176 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4d317237 4177
e6a18b5a 4178 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4d317237 4179
4180 return NULL_RTX;
4181}
4182
6840589f 4183/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4184 bytes from constant string DATA + OFFSET and return it as target
4185 constant. */
4186
09879952 4187rtx
aecda0d6 4188builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
f77c4496 4189 scalar_int_mode mode)
6840589f 4190{
4191 const char *str = (const char *) data;
4192
4193 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4194 return const0_rtx;
4195
4196 return c_readstr (str + offset, mode);
4197}
4198
5aef8938 4199/* Helper to check the sizes of sequences and the destination of calls
4200 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4201 success (no overflow or invalid sizes), false otherwise. */
4202
4203static bool
4204check_strncat_sizes (tree exp, tree objsize)
4205{
4206 tree dest = CALL_EXPR_ARG (exp, 0);
4207 tree src = CALL_EXPR_ARG (exp, 1);
e6a18b5a 4208 tree maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 4209
4210 /* Try to determine the range of lengths that the source expression
4211 refers to. */
4212 tree lenrange[2];
4213 get_range_strlen (src, lenrange);
4214
4215 /* Try to verify that the destination is big enough for the shortest
4216 string. */
4217
4218 if (!objsize && warn_stringop_overflow)
4219 {
4220 /* If it hasn't been provided by __strncat_chk, try to determine
4221 the size of the destination object into which the source is
4222 being copied. */
8d6c6ef5 4223 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 4224 }
4225
4226 /* Add one for the terminating nul. */
4227 tree srclen = (lenrange[0]
4228 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4229 size_one_node)
4230 : NULL_TREE);
4231
e6a18b5a 4232 /* The strncat function copies at most MAXREAD bytes and always appends
4233 the terminating nul so the specified upper bound should never be equal
4234 to (or greater than) the size of the destination. */
4235 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4236 && tree_int_cst_equal (objsize, maxread))
5aef8938 4237 {
4d317237 4238 location_t loc = tree_nonartificial_location (exp);
4239 loc = expansion_point_location_if_in_system_header (loc);
4240
4241 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 4242 "%K%qD specified bound %E equals destination size",
e6a18b5a 4243 exp, get_callee_fndecl (exp), maxread);
5aef8938 4244
4245 return false;
4246 }
4247
4248 if (!srclen
e6a18b5a 4249 || (maxread && tree_fits_uhwi_p (maxread)
5aef8938 4250 && tree_fits_uhwi_p (srclen)
e6a18b5a 4251 && tree_int_cst_lt (maxread, srclen)))
4252 srclen = maxread;
5aef8938 4253
e6a18b5a 4254 /* The number of bytes to write is LEN but check_access will also
5aef8938 4255 check SRCLEN if LEN's value isn't known. */
e6a18b5a 4256 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4257 objsize);
5aef8938 4258}
4259
4260/* Similar to expand_builtin_strcat, do some very basic size validation
4261 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4262 the built-in expand to a call to the library function. */
4263
4264static rtx
4265expand_builtin_strncat (tree exp, rtx)
4266{
4267 if (!validate_arglist (exp,
4268 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4269 || !warn_stringop_overflow)
4270 return NULL_RTX;
4271
4272 tree dest = CALL_EXPR_ARG (exp, 0);
4273 tree src = CALL_EXPR_ARG (exp, 1);
4274 /* The upper bound on the number of bytes to write. */
e6a18b5a 4275 tree maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 4276 /* The length of the source sequence. */
4277 tree slen = c_strlen (src, 1);
4278
4279 /* Try to determine the range of lengths that the source expression
4280 refers to. */
4281 tree lenrange[2];
4282 if (slen)
4283 lenrange[0] = lenrange[1] = slen;
4284 else
4285 get_range_strlen (src, lenrange);
4286
4287 /* Try to verify that the destination is big enough for the shortest
4288 string. First try to determine the size of the destination object
4289 into which the source is being copied. */
8d6c6ef5 4290 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 4291
4292 /* Add one for the terminating nul. */
4293 tree srclen = (lenrange[0]
4294 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4295 size_one_node)
4296 : NULL_TREE);
4297
e6a18b5a 4298 /* The strncat function copies at most MAXREAD bytes and always appends
4299 the terminating nul so the specified upper bound should never be equal
4300 to (or greater than) the size of the destination. */
4301 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4302 && tree_int_cst_equal (destsize, maxread))
5aef8938 4303 {
4d317237 4304 location_t loc = tree_nonartificial_location (exp);
4305 loc = expansion_point_location_if_in_system_header (loc);
4306
4307 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 4308 "%K%qD specified bound %E equals destination size",
e6a18b5a 4309 exp, get_callee_fndecl (exp), maxread);
5aef8938 4310
4311 return NULL_RTX;
4312 }
4313
4314 if (!srclen
e6a18b5a 4315 || (maxread && tree_fits_uhwi_p (maxread)
5aef8938 4316 && tree_fits_uhwi_p (srclen)
e6a18b5a 4317 && tree_int_cst_lt (maxread, srclen)))
4318 srclen = maxread;
5aef8938 4319
e6a18b5a 4320 /* The number of bytes to write is SRCLEN. */
4321 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
5aef8938 4322
4323 return NULL_RTX;
4324}
4325
48e1416a 4326/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 4327 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 4328
4329static rtx
a65c4d64 4330expand_builtin_strncpy (tree exp, rtx target)
ed09096d 4331{
389dd41b 4332 location_t loc = EXPR_LOCATION (exp);
c2f47e15 4333
4334 if (validate_arglist (exp,
4335 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 4336 {
c2f47e15 4337 tree dest = CALL_EXPR_ARG (exp, 0);
4338 tree src = CALL_EXPR_ARG (exp, 1);
5aef8938 4339 /* The number of bytes to write (not the maximum). */
c2f47e15 4340 tree len = CALL_EXPR_ARG (exp, 2);
5aef8938 4341 /* The length of the source sequence. */
c2f47e15 4342 tree slen = c_strlen (src, 1);
6840589f 4343
e6a18b5a 4344 if (warn_stringop_overflow)
4345 {
4346 tree destsize = compute_objsize (dest,
4347 warn_stringop_overflow - 1);
4348
4349 /* The number of bytes to write is LEN but check_access will also
4350 check SLEN if LEN's value isn't known. */
4351 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4352 destsize);
4353 }
5aef8938 4354
8ff6a5cd 4355 /* We must be passed a constant len and src parameter. */
e913b5cd 4356 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 4357 return NULL_RTX;
ed09096d 4358
389dd41b 4359 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 4360
4361 /* We're required to pad with trailing zeros if the requested
a0c938f0 4362 len is greater than strlen(s2)+1. In that case try to
6840589f 4363 use store_by_pieces, if it fails, punt. */
ed09096d 4364 if (tree_int_cst_lt (slen, len))
6840589f 4365 {
957d0361 4366 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 4367 const char *p = c_getstr (src);
6840589f 4368 rtx dest_mem;
4369
e913b5cd 4370 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4371 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 4372 builtin_strncpy_read_str,
364c0c59 4373 CONST_CAST (char *, p),
4374 dest_align, false))
c2f47e15 4375 return NULL_RTX;
6840589f 4376
d8ae1baa 4377 dest_mem = get_memory_rtx (dest, len);
e913b5cd 4378 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 4379 builtin_strncpy_read_str,
02aa6d73 4380 CONST_CAST (char *, p), dest_align, false,
4381 RETURN_BEGIN);
a65c4d64 4382 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 4383 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 4384 return dest_mem;
6840589f 4385 }
ed09096d 4386 }
c2f47e15 4387 return NULL_RTX;
ed09096d 4388}
4389
ecc318ff 4390/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4391 bytes from constant string DATA + OFFSET and return it as target
4392 constant. */
4393
f656b751 4394rtx
aecda0d6 4395builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
f77c4496 4396 scalar_int_mode mode)
ecc318ff 4397{
4398 const char *c = (const char *) data;
364c0c59 4399 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 4400
4401 memset (p, *c, GET_MODE_SIZE (mode));
4402
4403 return c_readstr (p, mode);
4404}
4405
a7ec6974 4406/* Callback routine for store_by_pieces. Return the RTL of a register
4407 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4408 char value given in the RTL register data. For example, if mode is
4409 4 bytes wide, return the RTL for 0x01010101*data. */
4410
4411static rtx
aecda0d6 4412builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
f77c4496 4413 scalar_int_mode mode)
a7ec6974 4414{
4415 rtx target, coeff;
4416 size_t size;
4417 char *p;
4418
4419 size = GET_MODE_SIZE (mode);
f0ce3b1f 4420 if (size == 1)
4421 return (rtx) data;
a7ec6974 4422
364c0c59 4423 p = XALLOCAVEC (char, size);
a7ec6974 4424 memset (p, 1, size);
4425 coeff = c_readstr (p, mode);
4426
f0ce3b1f 4427 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 4428 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4429 return force_reg (mode, target);
4430}
4431
48e1416a 4432/* Expand expression EXP, which is a call to the memset builtin. Return
4433 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 4434 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 4435 convenient). */
902de8ed 4436
53800dbe 4437static rtx
3754d046 4438expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 4439{
c2f47e15 4440 if (!validate_arglist (exp,
4441 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4442 return NULL_RTX;
5aef8938 4443
4444 tree dest = CALL_EXPR_ARG (exp, 0);
4445 tree val = CALL_EXPR_ARG (exp, 1);
4446 tree len = CALL_EXPR_ARG (exp, 2);
4447
e6a18b5a 4448 check_memop_access (exp, dest, NULL_TREE, len);
5aef8938 4449
4450 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
c2f47e15 4451}
53800dbe 4452
c2f47e15 4453/* Helper function to do the actual work for expand_builtin_memset. The
4454 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4455 so that this can also be called without constructing an actual CALL_EXPR.
4456 The other arguments and return value are the same as for
4457 expand_builtin_memset. */
6b961939 4458
c2f47e15 4459static rtx
4460expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 4461 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 4462{
4463 tree fndecl, fn;
4464 enum built_in_function fcode;
3754d046 4465 machine_mode val_mode;
c2f47e15 4466 char c;
4467 unsigned int dest_align;
4468 rtx dest_mem, dest_addr, len_rtx;
4469 HOST_WIDE_INT expected_size = -1;
4470 unsigned int expected_align = 0;
36d63243 4471 unsigned HOST_WIDE_INT min_size;
4472 unsigned HOST_WIDE_INT max_size;
9db0f34d 4473 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 4474
957d0361 4475 dest_align = get_pointer_alignment (dest);
162719b3 4476
c2f47e15 4477 /* If DEST is not a pointer type, don't do this operation in-line. */
4478 if (dest_align == 0)
4479 return NULL_RTX;
6f428e8b 4480
8cee8dc0 4481 if (currently_expanding_gimple_stmt)
4482 stringop_block_profile (currently_expanding_gimple_stmt,
4483 &expected_align, &expected_size);
75a70cf9 4484
c2f47e15 4485 if (expected_align < dest_align)
4486 expected_align = dest_align;
6b961939 4487
c2f47e15 4488 /* If the LEN parameter is zero, return DEST. */
4489 if (integer_zerop (len))
4490 {
4491 /* Evaluate and ignore VAL in case it has side-effects. */
4492 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4493 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4494 }
7a3e5564 4495
c2f47e15 4496 /* Stabilize the arguments in case we fail. */
4497 dest = builtin_save_expr (dest);
4498 val = builtin_save_expr (val);
4499 len = builtin_save_expr (len);
a7ec6974 4500
c2f47e15 4501 len_rtx = expand_normal (len);
9db0f34d 4502 determine_block_size (len, len_rtx, &min_size, &max_size,
4503 &probable_max_size);
c2f47e15 4504 dest_mem = get_memory_rtx (dest, len);
03a5dda9 4505 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 4506
c2f47e15 4507 if (TREE_CODE (val) != INTEGER_CST)
4508 {
4509 rtx val_rtx;
a7ec6974 4510
c2f47e15 4511 val_rtx = expand_normal (val);
03a5dda9 4512 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 4513
c2f47e15 4514 /* Assume that we can memset by pieces if we can store
4515 * the coefficients by pieces (in the required modes).
4516 * We can't pass builtin_memset_gen_str as that emits RTL. */
4517 c = 1;
e913b5cd 4518 if (tree_fits_uhwi_p (len)
4519 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4520 builtin_memset_read_str, &c, dest_align,
4521 true))
c2f47e15 4522 {
03a5dda9 4523 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 4524 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4525 builtin_memset_gen_str, val_rtx, dest_align,
02aa6d73 4526 true, RETURN_BEGIN);
c2f47e15 4527 }
4528 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4529 dest_align, expected_align,
9db0f34d 4530 expected_size, min_size, max_size,
4531 probable_max_size))
6b961939 4532 goto do_libcall;
48e1416a 4533
c2f47e15 4534 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4535 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4536 return dest_mem;
4537 }
53800dbe 4538
c2f47e15 4539 if (target_char_cast (val, &c))
4540 goto do_libcall;
ecc318ff 4541
c2f47e15 4542 if (c)
4543 {
e913b5cd 4544 if (tree_fits_uhwi_p (len)
4545 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4546 builtin_memset_read_str, &c, dest_align,
4547 true))
e913b5cd 4548 store_by_pieces (dest_mem, tree_to_uhwi (len),
02aa6d73 4549 builtin_memset_read_str, &c, dest_align, true,
4550 RETURN_BEGIN);
03a5dda9 4551 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4552 gen_int_mode (c, val_mode),
c2f47e15 4553 dest_align, expected_align,
9db0f34d 4554 expected_size, min_size, max_size,
4555 probable_max_size))
c2f47e15 4556 goto do_libcall;
48e1416a 4557
c2f47e15 4558 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4559 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4560 return dest_mem;
4561 }
ecc318ff 4562
c2f47e15 4563 set_mem_align (dest_mem, dest_align);
4564 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4565 CALL_EXPR_TAILCALL (orig_exp)
4566 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 4567 expected_align, expected_size,
9db0f34d 4568 min_size, max_size,
4569 probable_max_size);
53800dbe 4570
c2f47e15 4571 if (dest_addr == 0)
4572 {
4573 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4574 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4575 }
53800dbe 4576
c2f47e15 4577 return dest_addr;
6b961939 4578
c2f47e15 4579 do_libcall:
4580 fndecl = get_callee_fndecl (orig_exp);
4581 fcode = DECL_FUNCTION_CODE (fndecl);
1e42d5c6 4582 if (fcode == BUILT_IN_MEMSET)
0568e9c1 4583 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4584 dest, val, len);
c2f47e15 4585 else if (fcode == BUILT_IN_BZERO)
0568e9c1 4586 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4587 dest, len);
c2f47e15 4588 else
4589 gcc_unreachable ();
a65c4d64 4590 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4591 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 4592 return expand_call (fn, target, target == const0_rtx);
53800dbe 4593}
4594
48e1416a 4595/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 4596 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 4597
ffc83088 4598static rtx
0b25db21 4599expand_builtin_bzero (tree exp)
ffc83088 4600{
c2f47e15 4601 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 4602 return NULL_RTX;
ffc83088 4603
5aef8938 4604 tree dest = CALL_EXPR_ARG (exp, 0);
4605 tree size = CALL_EXPR_ARG (exp, 1);
4606
e6a18b5a 4607 check_memop_access (exp, dest, NULL_TREE, size);
bf8e3599 4608
7369e7ba 4609 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 4610 memset(ptr x, int 0, size_t y). This is done this way
4611 so that if it isn't expanded inline, we fallback to
4612 calling bzero instead of memset. */
bf8e3599 4613
5aef8938 4614 location_t loc = EXPR_LOCATION (exp);
4615
c2f47e15 4616 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 4617 fold_convert_loc (loc,
4618 size_type_node, size),
c2f47e15 4619 const0_rtx, VOIDmode, exp);
ffc83088 4620}
4621
d6f01a40 4622/* Try to expand cmpstr operation ICODE with the given operands.
4623 Return the result rtx on success, otherwise return null. */
4624
4625static rtx
4626expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4627 HOST_WIDE_INT align)
4628{
4629 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4630
4631 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4632 target = NULL_RTX;
4633
4634 struct expand_operand ops[4];
4635 create_output_operand (&ops[0], target, insn_mode);
4636 create_fixed_operand (&ops[1], arg1_rtx);
4637 create_fixed_operand (&ops[2], arg2_rtx);
4638 create_integer_operand (&ops[3], align);
4639 if (maybe_expand_insn (icode, 4, ops))
4640 return ops[0].value;
4641 return NULL_RTX;
4642}
4643
7a3f89b5 4644/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 4645 Return NULL_RTX if we failed and the caller should emit a normal call,
3e346f54 4646 otherwise try to get the result in TARGET, if convenient.
4647 RESULT_EQ is true if we can relax the returned value to be either zero
4648 or nonzero, without caring about the sign. */
27d0c333 4649
53800dbe 4650static rtx
3e346f54 4651expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
53800dbe 4652{
c2f47e15 4653 if (!validate_arglist (exp,
4654 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4655 return NULL_RTX;
6f428e8b 4656
ea368aac 4657 tree arg1 = CALL_EXPR_ARG (exp, 0);
4658 tree arg2 = CALL_EXPR_ARG (exp, 1);
4659 tree len = CALL_EXPR_ARG (exp, 2);
a950155e 4660 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4661 bool no_overflow = true;
8d6c6ef5 4662
4663 /* Diagnose calls where the specified length exceeds the size of either
4664 object. */
a950155e 4665 tree size = compute_objsize (arg1, 0);
4666 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4667 len, /*maxread=*/NULL_TREE, size,
4668 /*objsize=*/NULL_TREE);
b3e6ae76 4669 if (no_overflow)
a950155e 4670 {
4671 size = compute_objsize (arg2, 0);
4672 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4673 len, /*maxread=*/NULL_TREE, size,
4674 /*objsize=*/NULL_TREE);
b3e6ae76 4675 }
a950155e 4676
ee1b788e 4677 /* If the specified length exceeds the size of either object,
4678 call the function. */
4679 if (!no_overflow)
4680 return NULL_RTX;
4681
b3e6ae76 4682 /* Due to the performance benefit, always inline the calls first
a950155e 4683 when result_eq is false. */
4684 rtx result = NULL_RTX;
b3e6ae76 4685
ee1b788e 4686 if (!result_eq && fcode != BUILT_IN_BCMP)
8d6c6ef5 4687 {
0dbefa15 4688 result = inline_expand_builtin_string_cmp (exp, target);
a950155e 4689 if (result)
4690 return result;
8d6c6ef5 4691 }
4692
3e346f54 4693 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4694 location_t loc = EXPR_LOCATION (exp);
b428c0a5 4695
ea368aac 4696 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4697 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
53800dbe 4698
ea368aac 4699 /* If we don't have POINTER_TYPE, call the function. */
4700 if (arg1_align == 0 || arg2_align == 0)
4701 return NULL_RTX;
53800dbe 4702
ea368aac 4703 rtx arg1_rtx = get_memory_rtx (arg1, len);
4704 rtx arg2_rtx = get_memory_rtx (arg2, len);
3e346f54 4705 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
53800dbe 4706
ea368aac 4707 /* Set MEM_SIZE as appropriate. */
3e346f54 4708 if (CONST_INT_P (len_rtx))
ea368aac 4709 {
3e346f54 4710 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4711 set_mem_size (arg2_rtx, INTVAL (len_rtx));
ea368aac 4712 }
83f88f8e 4713
3e346f54 4714 by_pieces_constfn constfn = NULL;
4715
719f3058 4716 const char *src_str = c_getstr (arg2);
4717 if (result_eq && src_str == NULL)
4718 {
4719 src_str = c_getstr (arg1);
4720 if (src_str != NULL)
092db747 4721 std::swap (arg1_rtx, arg2_rtx);
719f3058 4722 }
3e346f54 4723
4724 /* If SRC is a string constant and block move would be done
4725 by pieces, we can avoid loading the string from memory
4726 and only stored the computed constants. */
4727 if (src_str
4728 && CONST_INT_P (len_rtx)
4729 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4730 constfn = builtin_memcpy_read_str;
4731
a950155e 4732 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4733 TREE_TYPE (len), target,
4734 result_eq, constfn,
4735 CONST_CAST (char *, src_str));
3e346f54 4736
ea368aac 4737 if (result)
4738 {
4739 /* Return the value in the proper mode for this function. */
4740 if (GET_MODE (result) == mode)
4741 return result;
83f88f8e 4742
ea368aac 4743 if (target != 0)
4744 {
4745 convert_move (target, result, 0);
4746 return target;
4747 }
0cd832f0 4748
53800dbe 4749 return convert_to_mode (mode, result, 0);
ea368aac 4750 }
53800dbe 4751
61ffc71a 4752 return NULL_RTX;
6f428e8b 4753}
4754
c2f47e15 4755/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4756 if we failed the caller should emit a normal call, otherwise try to get
4757 the result in TARGET, if convenient. */
902de8ed 4758
53800dbe 4759static rtx
a65c4d64 4760expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4761{
c2f47e15 4762 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4763 return NULL_RTX;
bf8e3599 4764
a950155e 4765 /* Due to the performance benefit, always inline the calls first. */
4766 rtx result = NULL_RTX;
0dbefa15 4767 result = inline_expand_builtin_string_cmp (exp, target);
a950155e 4768 if (result)
4769 return result;
4770
d6f01a40 4771 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4772 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5c5d012b 4773 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4774 return NULL_RTX;
a0c938f0 4775
5c5d012b 4776 tree arg1 = CALL_EXPR_ARG (exp, 0);
4777 tree arg2 = CALL_EXPR_ARG (exp, 1);
6ac5504b 4778
5c5d012b 4779 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4780 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7a3f89b5 4781
5c5d012b 4782 /* If we don't have POINTER_TYPE, call the function. */
4783 if (arg1_align == 0 || arg2_align == 0)
4784 return NULL_RTX;
7a3f89b5 4785
5c5d012b 4786 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4787 arg1 = builtin_save_expr (arg1);
4788 arg2 = builtin_save_expr (arg2);
53800dbe 4789
5c5d012b 4790 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4791 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
d6f01a40 4792
5c5d012b 4793 /* Try to call cmpstrsi. */
4794 if (cmpstr_icode != CODE_FOR_nothing)
4795 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4796 MIN (arg1_align, arg2_align));
6ac5504b 4797
5c5d012b 4798 /* Try to determine at least one length and call cmpstrnsi. */
4799 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4800 {
4801 tree len;
4802 rtx arg3_rtx;
4803
4804 tree len1 = c_strlen (arg1, 1);
4805 tree len2 = c_strlen (arg2, 1);
4806
4807 if (len1)
4808 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4809 if (len2)
4810 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4811
4812 /* If we don't have a constant length for the first, use the length
4813 of the second, if we know it. We don't require a constant for
4814 this case; some cost analysis could be done if both are available
4815 but neither is constant. For now, assume they're equally cheap,
4816 unless one has side effects. If both strings have constant lengths,
4817 use the smaller. */
4818
4819 if (!len1)
4820 len = len2;
4821 else if (!len2)
4822 len = len1;
4823 else if (TREE_SIDE_EFFECTS (len1))
4824 len = len2;
4825 else if (TREE_SIDE_EFFECTS (len2))
4826 len = len1;
4827 else if (TREE_CODE (len1) != INTEGER_CST)
4828 len = len2;
4829 else if (TREE_CODE (len2) != INTEGER_CST)
4830 len = len1;
4831 else if (tree_int_cst_lt (len1, len2))
4832 len = len1;
4833 else
4834 len = len2;
3f8aefe2 4835
5c5d012b 4836 /* If both arguments have side effects, we cannot optimize. */
4837 if (len && !TREE_SIDE_EFFECTS (len))
6ac5504b 4838 {
5c5d012b 4839 arg3_rtx = expand_normal (len);
4840 result = expand_cmpstrn_or_cmpmem
4841 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4842 arg3_rtx, MIN (arg1_align, arg2_align));
6ac5504b 4843 }
5c5d012b 4844 }
4845
5c5d012b 4846 tree fndecl = get_callee_fndecl (exp);
5c5d012b 4847 if (result)
4848 {
0b39ade8 4849 /* Check to see if the argument was declared attribute nonstring
4850 and if so, issue a warning since at this point it's not known
4851 to be nul-terminated. */
4852 maybe_warn_nonstring_arg (fndecl, exp);
4853
5c5d012b 4854 /* Return the value in the proper mode for this function. */
4855 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4856 if (GET_MODE (result) == mode)
4857 return result;
4858 if (target == 0)
4859 return convert_to_mode (mode, result, 0);
4860 convert_move (target, result, 0);
4861 return target;
6ac5504b 4862 }
5c5d012b 4863
4864 /* Expand the library call ourselves using a stabilized argument
4865 list to avoid re-evaluating the function's arguments twice. */
4866 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4867 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4868 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4869 return expand_call (fn, target, target == const0_rtx);
83d79705 4870}
53800dbe 4871
48e1416a 4872/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4873 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4874 the result in TARGET, if convenient. */
27d0c333 4875
ed09096d 4876static rtx
a65c4d64 4877expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4878 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4879{
c2f47e15 4880 if (!validate_arglist (exp,
4881 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4882 return NULL_RTX;
ed09096d 4883
a950155e 4884 /* Due to the performance benefit, always inline the calls first. */
4885 rtx result = NULL_RTX;
0dbefa15 4886 result = inline_expand_builtin_string_cmp (exp, target);
a950155e 4887 if (result)
4888 return result;
4889
6e34e617 4890 /* If c_strlen can determine an expression for one of the string
6ac5504b 4891 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4892 using length MIN(strlen(string)+1, arg3). */
d6f01a40 4893 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5c5d012b 4894 if (cmpstrn_icode == CODE_FOR_nothing)
4895 return NULL_RTX;
27d0c333 4896
5c5d012b 4897 tree len;
4898
4899 tree arg1 = CALL_EXPR_ARG (exp, 0);
4900 tree arg2 = CALL_EXPR_ARG (exp, 1);
4901 tree arg3 = CALL_EXPR_ARG (exp, 2);
4902
4903 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4904 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4905
4906 tree len1 = c_strlen (arg1, 1);
4907 tree len2 = c_strlen (arg2, 1);
4908
4909 location_t loc = EXPR_LOCATION (exp);
4910
4911 if (len1)
4912 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4913 if (len2)
4914 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4915
4916 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4917
4918 /* If we don't have a constant length for the first, use the length
4919 of the second, if we know it. If neither string is constant length,
4920 use the given length argument. We don't require a constant for
4921 this case; some cost analysis could be done if both are available
4922 but neither is constant. For now, assume they're equally cheap,
4923 unless one has side effects. If both strings have constant lengths,
4924 use the smaller. */
4925
4926 if (!len1 && !len2)
4927 len = len3;
4928 else if (!len1)
4929 len = len2;
4930 else if (!len2)
4931 len = len1;
4932 else if (TREE_SIDE_EFFECTS (len1))
4933 len = len2;
4934 else if (TREE_SIDE_EFFECTS (len2))
4935 len = len1;
4936 else if (TREE_CODE (len1) != INTEGER_CST)
4937 len = len2;
4938 else if (TREE_CODE (len2) != INTEGER_CST)
4939 len = len1;
4940 else if (tree_int_cst_lt (len1, len2))
4941 len = len1;
4942 else
4943 len = len2;
4944
4945 /* If we are not using the given length, we must incorporate it here.
4946 The actual new length parameter will be MIN(len,arg3) in this case. */
4947 if (len != len3)
a55f0871 4948 {
4949 len = fold_convert_loc (loc, sizetype, len);
4950 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4951 }
5c5d012b 4952 rtx arg1_rtx = get_memory_rtx (arg1, len);
4953 rtx arg2_rtx = get_memory_rtx (arg2, len);
4954 rtx arg3_rtx = expand_normal (len);
a950155e 4955 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4956 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4957 MIN (arg1_align, arg2_align));
5c5d012b 4958
5c5d012b 4959 tree fndecl = get_callee_fndecl (exp);
5c5d012b 4960 if (result)
4961 {
0b39ade8 4962 /* Check to see if the argument was declared attribute nonstring
4963 and if so, issue a warning since at this point it's not known
4964 to be nul-terminated. */
4965 maybe_warn_nonstring_arg (fndecl, exp);
4966
5c5d012b 4967 /* Return the value in the proper mode for this function. */
4968 mode = TYPE_MODE (TREE_TYPE (exp));
4969 if (GET_MODE (result) == mode)
4970 return result;
4971 if (target == 0)
4972 return convert_to_mode (mode, result, 0);
4973 convert_move (target, result, 0);
4974 return target;
4975 }
4976
4977 /* Expand the library call ourselves using a stabilized argument
4978 list to avoid re-evaluating the function's arguments twice. */
4979 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4980 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4981 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4982 return expand_call (fn, target, target == const0_rtx);
49f0327b 4983}
4984
a66c9326 4985/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4986 if that's convenient. */
902de8ed 4987
a66c9326 4988rtx
aecda0d6 4989expand_builtin_saveregs (void)
53800dbe 4990{
1e0c0b35 4991 rtx val;
4992 rtx_insn *seq;
53800dbe 4993
4994 /* Don't do __builtin_saveregs more than once in a function.
4995 Save the result of the first call and reuse it. */
4996 if (saveregs_value != 0)
4997 return saveregs_value;
53800dbe 4998
a66c9326 4999 /* When this function is called, it means that registers must be
5000 saved on entry to this function. So we migrate the call to the
5001 first insn of this function. */
5002
5003 start_sequence ();
53800dbe 5004
a66c9326 5005 /* Do whatever the machine needs done in this case. */
45550790 5006 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 5007
a66c9326 5008 seq = get_insns ();
5009 end_sequence ();
53800dbe 5010
a66c9326 5011 saveregs_value = val;
53800dbe 5012
31d3e01c 5013 /* Put the insns after the NOTE that starts the function. If this
5014 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 5015 the code is placed at the start of the function. */
5016 push_topmost_sequence ();
0ec80471 5017 emit_insn_after (seq, entry_of_function ());
a66c9326 5018 pop_topmost_sequence ();
5019
5020 return val;
53800dbe 5021}
5022
79012a9d 5023/* Expand a call to __builtin_next_arg. */
27d0c333 5024
53800dbe 5025static rtx
79012a9d 5026expand_builtin_next_arg (void)
53800dbe 5027{
79012a9d 5028 /* Checking arguments is already done in fold_builtin_next_arg
5029 that must be called before this function. */
940ddc5c 5030 return expand_binop (ptr_mode, add_optab,
abe32cce 5031 crtl->args.internal_arg_pointer,
5032 crtl->args.arg_offset_rtx,
53800dbe 5033 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5034}
5035
a66c9326 5036/* Make it easier for the backends by protecting the valist argument
5037 from multiple evaluations. */
5038
5039static tree
389dd41b 5040stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 5041{
5f57a8b1 5042 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5043
182cf5a9 5044 /* The current way of determining the type of valist is completely
5045 bogus. We should have the information on the va builtin instead. */
5046 if (!vatype)
5047 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 5048
5049 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 5050 {
2d47cc32 5051 if (TREE_SIDE_EFFECTS (valist))
5052 valist = save_expr (valist);
11a61dea 5053
2d47cc32 5054 /* For this case, the backends will be expecting a pointer to
5f57a8b1 5055 vatype, but it's possible we've actually been given an array
5056 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 5057 So fix it. */
5058 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 5059 {
5f57a8b1 5060 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 5061 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 5062 }
a66c9326 5063 }
11a61dea 5064 else
a66c9326 5065 {
182cf5a9 5066 tree pt = build_pointer_type (vatype);
11a61dea 5067
2d47cc32 5068 if (! needs_lvalue)
5069 {
11a61dea 5070 if (! TREE_SIDE_EFFECTS (valist))
5071 return valist;
bf8e3599 5072
389dd41b 5073 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 5074 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 5075 }
2d47cc32 5076
11a61dea 5077 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 5078 valist = save_expr (valist);
182cf5a9 5079 valist = fold_build2_loc (loc, MEM_REF,
5080 vatype, valist, build_int_cst (pt, 0));
a66c9326 5081 }
5082
5083 return valist;
5084}
5085
2e15d750 5086/* The "standard" definition of va_list is void*. */
5087
5088tree
5089std_build_builtin_va_list (void)
5090{
5091 return ptr_type_node;
5092}
5093
5f57a8b1 5094/* The "standard" abi va_list is va_list_type_node. */
5095
5096tree
5097std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5098{
5099 return va_list_type_node;
5100}
5101
5102/* The "standard" type of va_list is va_list_type_node. */
5103
5104tree
5105std_canonical_va_list_type (tree type)
5106{
5107 tree wtype, htype;
5108
5f57a8b1 5109 wtype = va_list_type_node;
5110 htype = type;
b6da2e41 5111
5112 if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 5113 {
5114 /* If va_list is an array type, the argument may have decayed
5115 to a pointer type, e.g. by being passed to another function.
5116 In that case, unwrap both types so that we can compare the
5117 underlying records. */
5118 if (TREE_CODE (htype) == ARRAY_TYPE
5119 || POINTER_TYPE_P (htype))
5120 {
5121 wtype = TREE_TYPE (wtype);
5122 htype = TREE_TYPE (htype);
5123 }
5124 }
5125 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5126 return va_list_type_node;
5127
5128 return NULL_TREE;
5129}
5130
a66c9326 5131/* The "standard" implementation of va_start: just assign `nextarg' to
5132 the variable. */
27d0c333 5133
a66c9326 5134void
aecda0d6 5135std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 5136{
f03c17bc 5137 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5138 convert_move (va_r, nextarg, 0);
a66c9326 5139}
5140
c2f47e15 5141/* Expand EXP, a call to __builtin_va_start. */
27d0c333 5142
a66c9326 5143static rtx
c2f47e15 5144expand_builtin_va_start (tree exp)
a66c9326 5145{
5146 rtx nextarg;
c2f47e15 5147 tree valist;
389dd41b 5148 location_t loc = EXPR_LOCATION (exp);
a66c9326 5149
c2f47e15 5150 if (call_expr_nargs (exp) < 2)
cb166087 5151 {
389dd41b 5152 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 5153 return const0_rtx;
5154 }
a66c9326 5155
c2f47e15 5156 if (fold_builtin_next_arg (exp, true))
79012a9d 5157 return const0_rtx;
7c2f0500 5158
79012a9d 5159 nextarg = expand_builtin_next_arg ();
389dd41b 5160 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 5161
8a58ed0a 5162 if (targetm.expand_builtin_va_start)
5163 targetm.expand_builtin_va_start (valist, nextarg);
5164 else
5165 std_expand_builtin_va_start (valist, nextarg);
a66c9326 5166
5167 return const0_rtx;
5168}
5169
c2f47e15 5170/* Expand EXP, a call to __builtin_va_end. */
f7c44134 5171
a66c9326 5172static rtx
c2f47e15 5173expand_builtin_va_end (tree exp)
a66c9326 5174{
c2f47e15 5175 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 5176
8a15c04a 5177 /* Evaluate for side effects, if needed. I hate macros that don't
5178 do that. */
5179 if (TREE_SIDE_EFFECTS (valist))
5180 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 5181
5182 return const0_rtx;
5183}
5184
c2f47e15 5185/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 5186 builtin rather than just as an assignment in stdarg.h because of the
5187 nastiness of array-type va_list types. */
f7c44134 5188
a66c9326 5189static rtx
c2f47e15 5190expand_builtin_va_copy (tree exp)
a66c9326 5191{
5192 tree dst, src, t;
389dd41b 5193 location_t loc = EXPR_LOCATION (exp);
a66c9326 5194
c2f47e15 5195 dst = CALL_EXPR_ARG (exp, 0);
5196 src = CALL_EXPR_ARG (exp, 1);
a66c9326 5197
389dd41b 5198 dst = stabilize_va_list_loc (loc, dst, 1);
5199 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 5200
5f57a8b1 5201 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5202
5203 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 5204 {
5f57a8b1 5205 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 5206 TREE_SIDE_EFFECTS (t) = 1;
5207 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5208 }
5209 else
5210 {
11a61dea 5211 rtx dstb, srcb, size;
5212
5213 /* Evaluate to pointers. */
5214 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5215 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 5216 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5217 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 5218
85d654dd 5219 dstb = convert_memory_address (Pmode, dstb);
5220 srcb = convert_memory_address (Pmode, srcb);
726ec87c 5221
11a61dea 5222 /* "Dereference" to BLKmode memories. */
5223 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 5224 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 5225 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 5226 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 5227 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 5228 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 5229
5230 /* Copy. */
0378dbdc 5231 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 5232 }
5233
5234 return const0_rtx;
5235}
5236
53800dbe 5237/* Expand a call to one of the builtin functions __builtin_frame_address or
5238 __builtin_return_address. */
27d0c333 5239
53800dbe 5240static rtx
c2f47e15 5241expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 5242{
53800dbe 5243 /* The argument must be a nonnegative integer constant.
5244 It counts the number of frames to scan up the stack.
5b252e95 5245 The value is either the frame pointer value or the return
5246 address saved in that frame. */
c2f47e15 5247 if (call_expr_nargs (exp) == 0)
53800dbe 5248 /* Warning about missing arg was already issued. */
5249 return const0_rtx;
e913b5cd 5250 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 5251 {
5b252e95 5252 error ("invalid argument to %qD", fndecl);
53800dbe 5253 return const0_rtx;
5254 }
5255 else
5256 {
5b252e95 5257 /* Number of frames to scan up the stack. */
5258 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5259
5260 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
53800dbe 5261
5262 /* Some ports cannot access arbitrary stack frames. */
5263 if (tem == NULL)
5264 {
5b252e95 5265 warning (0, "unsupported argument to %qD", fndecl);
53800dbe 5266 return const0_rtx;
5267 }
5268
5b252e95 5269 if (count)
5270 {
5271 /* Warn since no effort is made to ensure that any frame
5272 beyond the current one exists or can be safely reached. */
5273 warning (OPT_Wframe_address, "calling %qD with "
5274 "a nonzero argument is unsafe", fndecl);
5275 }
5276
53800dbe 5277 /* For __builtin_frame_address, return what we've got. */
5278 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5279 return tem;
5280
8ad4c111 5281 if (!REG_P (tem)
53800dbe 5282 && ! CONSTANT_P (tem))
99182918 5283 tem = copy_addr_to_reg (tem);
53800dbe 5284 return tem;
5285 }
5286}
5287
990495a7 5288/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
2b29cc6a 5289 failed and the caller should emit a normal call. */
15c6cf6b 5290
53800dbe 5291static rtx
2b29cc6a 5292expand_builtin_alloca (tree exp)
53800dbe 5293{
5294 rtx op0;
15c6cf6b 5295 rtx result;
581bf1c2 5296 unsigned int align;
370e45b9 5297 tree fndecl = get_callee_fndecl (exp);
2b34677f 5298 HOST_WIDE_INT max_size;
5299 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
2b29cc6a 5300 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
370e45b9 5301 bool valid_arglist
2b34677f 5302 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5303 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5304 VOID_TYPE)
5305 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5306 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5307 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
581bf1c2 5308
5309 if (!valid_arglist)
c2f47e15 5310 return NULL_RTX;
53800dbe 5311
8e18705e 5312 if ((alloca_for_var
5313 && warn_vla_limit >= HOST_WIDE_INT_MAX
5314 && warn_alloc_size_limit < warn_vla_limit)
5315 || (!alloca_for_var
5316 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5317 && warn_alloc_size_limit < warn_alloca_limit
5318 ))
370e45b9 5319 {
8e18705e 5320 /* -Walloca-larger-than and -Wvla-larger-than settings of
5321 less than HOST_WIDE_INT_MAX override the more general
5322 -Walloc-size-larger-than so unless either of the former
5323 options is smaller than the last one (wchich would imply
5324 that the call was already checked), check the alloca
5325 arguments for overflow. */
370e45b9 5326 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5327 int idx[] = { 0, -1 };
5328 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5329 }
5330
53800dbe 5331 /* Compute the argument. */
c2f47e15 5332 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 5333
581bf1c2 5334 /* Compute the alignment. */
2b34677f 5335 align = (fcode == BUILT_IN_ALLOCA
5336 ? BIGGEST_ALIGNMENT
5337 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5338
5339 /* Compute the maximum size. */
5340 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5341 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5342 : -1);
581bf1c2 5343
2b29cc6a 5344 /* Allocate the desired space. If the allocation stems from the declaration
5345 of a variable-sized object, it cannot accumulate. */
2b34677f 5346 result
5347 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
85d654dd 5348 result = convert_memory_address (ptr_mode, result);
15c6cf6b 5349
5350 return result;
53800dbe 5351}
5352
829e6a9b 5353/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5354 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5355 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5356 handle_builtin_stack_restore function. */
d08919a7 5357
5358static rtx
5359expand_asan_emit_allocas_unpoison (tree exp)
5360{
5361 tree arg0 = CALL_EXPR_ARG (exp, 0);
829e6a9b 5362 tree arg1 = CALL_EXPR_ARG (exp, 1);
cd2ee6ee 5363 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
829e6a9b 5364 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5365 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5366 stack_pointer_rtx, NULL_RTX, 0,
5367 OPTAB_LIB_WIDEN);
5368 off = convert_modes (ptr_mode, Pmode, off, 0);
5369 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5370 OPTAB_LIB_WIDEN);
d08919a7 5371 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
9e9e5c15 5372 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5373 top, ptr_mode, bot, ptr_mode);
d08919a7 5374 return ret;
5375}
5376
74bdbe96 5377/* Expand a call to bswap builtin in EXP.
5378 Return NULL_RTX if a normal call should be emitted rather than expanding the
5379 function in-line. If convenient, the result should be placed in TARGET.
5380 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 5381
5382static rtx
3754d046 5383expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 5384 rtx subtarget)
42791117 5385{
42791117 5386 tree arg;
5387 rtx op0;
5388
c2f47e15 5389 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5390 return NULL_RTX;
42791117 5391
c2f47e15 5392 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 5393 op0 = expand_expr (arg,
5394 subtarget && GET_MODE (subtarget) == target_mode
5395 ? subtarget : NULL_RTX,
5396 target_mode, EXPAND_NORMAL);
5397 if (GET_MODE (op0) != target_mode)
5398 op0 = convert_to_mode (target_mode, op0, 1);
42791117 5399
74bdbe96 5400 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 5401
5402 gcc_assert (target);
5403
74bdbe96 5404 return convert_to_mode (target_mode, target, 1);
42791117 5405}
5406
c2f47e15 5407/* Expand a call to a unary builtin in EXP.
5408 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 5409 function in-line. If convenient, the result should be placed in TARGET.
5410 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 5411
53800dbe 5412static rtx
3754d046 5413expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 5414 rtx subtarget, optab op_optab)
53800dbe 5415{
5416 rtx op0;
c2f47e15 5417
5418 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5419 return NULL_RTX;
53800dbe 5420
5421 /* Compute the argument. */
f97eea22 5422 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5423 (subtarget
5424 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5425 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 5426 VOIDmode, EXPAND_NORMAL);
6a08d0ab 5427 /* Compute op, into TARGET if possible.
53800dbe 5428 Set TARGET to wherever the result comes back. */
c2f47e15 5429 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 5430 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 5431 gcc_assert (target);
7d3f6cc7 5432
efb070c8 5433 return convert_to_mode (target_mode, target, 0);
53800dbe 5434}
89cfe6e5 5435
48e1416a 5436/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 5437 as the builtin_expect semantic should've been already executed by
5438 tree branch prediction pass. */
89cfe6e5 5439
5440static rtx
c2f47e15 5441expand_builtin_expect (tree exp, rtx target)
89cfe6e5 5442{
1e4adcfc 5443 tree arg;
89cfe6e5 5444
c2f47e15 5445 if (call_expr_nargs (exp) < 2)
89cfe6e5 5446 return const0_rtx;
c2f47e15 5447 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 5448
c2f47e15 5449 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 5450 /* When guessing was done, the hints should be already stripped away. */
07311427 5451 gcc_assert (!flag_guess_branch_prob
852f689e 5452 || optimize == 0 || seen_error ());
89cfe6e5 5453 return target;
5454}
689df48e 5455
01107f42 5456/* Expand a call to __builtin_expect_with_probability. We just return our
5457 argument as the builtin_expect semantic should've been already executed by
5458 tree branch prediction pass. */
5459
5460static rtx
5461expand_builtin_expect_with_probability (tree exp, rtx target)
5462{
5463 tree arg;
5464
5465 if (call_expr_nargs (exp) < 3)
5466 return const0_rtx;
5467 arg = CALL_EXPR_ARG (exp, 0);
5468
5469 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5470 /* When guessing was done, the hints should be already stripped away. */
5471 gcc_assert (!flag_guess_branch_prob
5472 || optimize == 0 || seen_error ());
5473 return target;
5474}
5475
5476
fca0886c 5477/* Expand a call to __builtin_assume_aligned. We just return our first
5478 argument as the builtin_assume_aligned semantic should've been already
5479 executed by CCP. */
5480
5481static rtx
5482expand_builtin_assume_aligned (tree exp, rtx target)
5483{
5484 if (call_expr_nargs (exp) < 2)
5485 return const0_rtx;
5486 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5487 EXPAND_NORMAL);
5488 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5489 && (call_expr_nargs (exp) < 3
5490 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5491 return target;
5492}
5493
c22de3f0 5494void
aecda0d6 5495expand_builtin_trap (void)
a0ef1725 5496{
4db8dd0c 5497 if (targetm.have_trap ())
f73960eb 5498 {
4db8dd0c 5499 rtx_insn *insn = emit_insn (targetm.gen_trap ());
f73960eb 5500 /* For trap insns when not accumulating outgoing args force
5501 REG_ARGS_SIZE note to prevent crossjumping of calls with
5502 different args sizes. */
5503 if (!ACCUMULATE_OUTGOING_ARGS)
f6a1fc98 5504 add_args_size_note (insn, stack_pointer_delta);
f73960eb 5505 }
a0ef1725 5506 else
61ffc71a 5507 {
5508 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5509 tree call_expr = build_call_expr (fn, 0);
5510 expand_call (call_expr, NULL_RTX, false);
5511 }
5512
a0ef1725 5513 emit_barrier ();
5514}
78a74442 5515
d2b48f0c 5516/* Expand a call to __builtin_unreachable. We do nothing except emit
5517 a barrier saying that control flow will not pass here.
5518
5519 It is the responsibility of the program being compiled to ensure
5520 that control flow does never reach __builtin_unreachable. */
5521static void
5522expand_builtin_unreachable (void)
5523{
5524 emit_barrier ();
5525}
5526
c2f47e15 5527/* Expand EXP, a call to fabs, fabsf or fabsl.
5528 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 5529 the function inline. If convenient, the result should be placed
5530 in TARGET. SUBTARGET may be used as the target for computing
5531 the operand. */
5532
5533static rtx
c2f47e15 5534expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 5535{
3754d046 5536 machine_mode mode;
78a74442 5537 tree arg;
5538 rtx op0;
5539
c2f47e15 5540 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5541 return NULL_RTX;
78a74442 5542
c2f47e15 5543 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 5544 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 5545 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 5546 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 5547 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5548}
5549
c2f47e15 5550/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 5551 Return NULL is a normal call should be emitted rather than expanding the
5552 function inline. If convenient, the result should be placed in TARGET.
5553 SUBTARGET may be used as the target for computing the operand. */
5554
5555static rtx
c2f47e15 5556expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 5557{
5558 rtx op0, op1;
5559 tree arg;
5560
c2f47e15 5561 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5562 return NULL_RTX;
270436f3 5563
c2f47e15 5564 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 5565 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 5566
c2f47e15 5567 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 5568 op1 = expand_normal (arg);
270436f3 5569
5570 return expand_copysign (op0, op1, target);
5571}
5572
ac8fb6db 5573/* Expand a call to __builtin___clear_cache. */
5574
5575static rtx
32e17df0 5576expand_builtin___clear_cache (tree exp)
ac8fb6db 5577{
32e17df0 5578 if (!targetm.code_for_clear_cache)
5579 {
ac8fb6db 5580#ifdef CLEAR_INSN_CACHE
32e17df0 5581 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5582 does something. Just do the default expansion to a call to
5583 __clear_cache(). */
5584 return NULL_RTX;
ac8fb6db 5585#else
32e17df0 5586 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5587 does nothing. There is no need to call it. Do nothing. */
5588 return const0_rtx;
ac8fb6db 5589#endif /* CLEAR_INSN_CACHE */
32e17df0 5590 }
5591
ac8fb6db 5592 /* We have a "clear_cache" insn, and it will handle everything. */
5593 tree begin, end;
5594 rtx begin_rtx, end_rtx;
ac8fb6db 5595
5596 /* We must not expand to a library call. If we did, any
5597 fallback library function in libgcc that might contain a call to
5598 __builtin___clear_cache() would recurse infinitely. */
5599 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5600 {
5601 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5602 return const0_rtx;
5603 }
5604
32e17df0 5605 if (targetm.have_clear_cache ())
ac8fb6db 5606 {
8786db1e 5607 struct expand_operand ops[2];
ac8fb6db 5608
5609 begin = CALL_EXPR_ARG (exp, 0);
5610 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5611
5612 end = CALL_EXPR_ARG (exp, 1);
5613 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5614
8786db1e 5615 create_address_operand (&ops[0], begin_rtx);
5616 create_address_operand (&ops[1], end_rtx);
32e17df0 5617 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8786db1e 5618 return const0_rtx;
ac8fb6db 5619 }
5620 return const0_rtx;
ac8fb6db 5621}
5622
4ee9c684 5623/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5624
5625static rtx
5626round_trampoline_addr (rtx tramp)
5627{
5628 rtx temp, addend, mask;
5629
5630 /* If we don't need too much alignment, we'll have been guaranteed
5631 proper alignment by get_trampoline_type. */
5632 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5633 return tramp;
5634
5635 /* Round address up to desired boundary. */
5636 temp = gen_reg_rtx (Pmode);
0359f9f5 5637 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5638 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 5639
5640 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5641 temp, 0, OPTAB_LIB_WIDEN);
5642 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5643 temp, 0, OPTAB_LIB_WIDEN);
5644
5645 return tramp;
5646}
5647
5648static rtx
c307f106 5649expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 5650{
5651 tree t_tramp, t_func, t_chain;
82c7907c 5652 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 5653
c2f47e15 5654 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 5655 POINTER_TYPE, VOID_TYPE))
5656 return NULL_RTX;
5657
c2f47e15 5658 t_tramp = CALL_EXPR_ARG (exp, 0);
5659 t_func = CALL_EXPR_ARG (exp, 1);
5660 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 5661
8ec3c5c2 5662 r_tramp = expand_normal (t_tramp);
82c7907c 5663 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5664 MEM_NOTRAP_P (m_tramp) = 1;
5665
c307f106 5666 /* If ONSTACK, the TRAMP argument should be the address of a field
5667 within the local function's FRAME decl. Either way, let's see if
5668 we can fill in the MEM_ATTRs for this memory. */
82c7907c 5669 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 5670 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 5671
c307f106 5672 /* Creator of a heap trampoline is responsible for making sure the
5673 address is aligned to at least STACK_BOUNDARY. Normally malloc
5674 will ensure this anyhow. */
82c7907c 5675 tmp = round_trampoline_addr (r_tramp);
5676 if (tmp != r_tramp)
5677 {
5678 m_tramp = change_address (m_tramp, BLKmode, tmp);
5679 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 5680 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 5681 }
5682
5683 /* The FUNC argument should be the address of the nested function.
5684 Extract the actual function decl to pass to the hook. */
5685 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5686 t_func = TREE_OPERAND (t_func, 0);
5687 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5688
8ec3c5c2 5689 r_chain = expand_normal (t_chain);
4ee9c684 5690
5691 /* Generate insns to initialize the trampoline. */
82c7907c 5692 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 5693
c307f106 5694 if (onstack)
5695 {
5696 trampolines_created = 1;
8bc8a8f4 5697
a27e3913 5698 if (targetm.calls.custom_function_descriptors != 0)
5699 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5700 "trampoline generated for nested function %qD", t_func);
c307f106 5701 }
8bc8a8f4 5702
4ee9c684 5703 return const0_rtx;
5704}
5705
5706static rtx
c2f47e15 5707expand_builtin_adjust_trampoline (tree exp)
4ee9c684 5708{
5709 rtx tramp;
5710
c2f47e15 5711 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 5712 return NULL_RTX;
5713
c2f47e15 5714 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 5715 tramp = round_trampoline_addr (tramp);
82c7907c 5716 if (targetm.calls.trampoline_adjust_address)
5717 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 5718
5719 return tramp;
5720}
5721
a27e3913 5722/* Expand a call to the builtin descriptor initialization routine.
5723 A descriptor is made up of a couple of pointers to the static
5724 chain and the code entry in this order. */
5725
5726static rtx
5727expand_builtin_init_descriptor (tree exp)
5728{
5729 tree t_descr, t_func, t_chain;
5730 rtx m_descr, r_descr, r_func, r_chain;
5731
5732 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5733 VOID_TYPE))
5734 return NULL_RTX;
5735
5736 t_descr = CALL_EXPR_ARG (exp, 0);
5737 t_func = CALL_EXPR_ARG (exp, 1);
5738 t_chain = CALL_EXPR_ARG (exp, 2);
5739
5740 r_descr = expand_normal (t_descr);
5741 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5742 MEM_NOTRAP_P (m_descr) = 1;
5743
5744 r_func = expand_normal (t_func);
5745 r_chain = expand_normal (t_chain);
5746
5747 /* Generate insns to initialize the descriptor. */
5748 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5749 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5750 POINTER_SIZE / BITS_PER_UNIT), r_func);
5751
5752 return const0_rtx;
5753}
5754
5755/* Expand a call to the builtin descriptor adjustment routine. */
5756
5757static rtx
5758expand_builtin_adjust_descriptor (tree exp)
5759{
5760 rtx tramp;
5761
5762 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5763 return NULL_RTX;
5764
5765 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5766
5767 /* Unalign the descriptor to allow runtime identification. */
5768 tramp = plus_constant (ptr_mode, tramp,
5769 targetm.calls.custom_function_descriptors);
5770
5771 return force_operand (tramp, NULL_RTX);
5772}
5773
93f564d6 5774/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5775 function. The function first checks whether the back end provides
5776 an insn to implement signbit for the respective mode. If not, it
5777 checks whether the floating point format of the value is such that
10902624 5778 the sign bit can be extracted. If that is not the case, error out.
5779 EXP is the expression that is a call to the builtin function; if
5780 convenient, the result should be placed in TARGET. */
27f261ef 5781static rtx
5782expand_builtin_signbit (tree exp, rtx target)
5783{
5784 const struct real_format *fmt;
299dd9fa 5785 scalar_float_mode fmode;
f77c4496 5786 scalar_int_mode rmode, imode;
c2f47e15 5787 tree arg;
ca4f1f5b 5788 int word, bitpos;
27eda240 5789 enum insn_code icode;
27f261ef 5790 rtx temp;
389dd41b 5791 location_t loc = EXPR_LOCATION (exp);
27f261ef 5792
c2f47e15 5793 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5794 return NULL_RTX;
27f261ef 5795
c2f47e15 5796 arg = CALL_EXPR_ARG (exp, 0);
299dd9fa 5797 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
03b7a719 5798 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
27f261ef 5799 fmt = REAL_MODE_FORMAT (fmode);
5800
93f564d6 5801 arg = builtin_save_expr (arg);
5802
5803 /* Expand the argument yielding a RTX expression. */
5804 temp = expand_normal (arg);
5805
5806 /* Check if the back end provides an insn that handles signbit for the
5807 argument's mode. */
d6bf3b14 5808 icode = optab_handler (signbit_optab, fmode);
27eda240 5809 if (icode != CODE_FOR_nothing)
93f564d6 5810 {
1e0c0b35 5811 rtx_insn *last = get_last_insn ();
93f564d6 5812 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 5813 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5814 return target;
5815 delete_insns_since (last);
93f564d6 5816 }
5817
27f261ef 5818 /* For floating point formats without a sign bit, implement signbit
5819 as "ARG < 0.0". */
8d564692 5820 bitpos = fmt->signbit_ro;
ca4f1f5b 5821 if (bitpos < 0)
27f261ef 5822 {
5823 /* But we can't do this if the format supports signed zero. */
10902624 5824 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
27f261ef 5825
389dd41b 5826 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 5827 build_real (TREE_TYPE (arg), dconst0));
27f261ef 5828 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5829 }
5830
ca4f1f5b 5831 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 5832 {
2cf1bb25 5833 imode = int_mode_for_mode (fmode).require ();
ca4f1f5b 5834 temp = gen_lowpart (imode, temp);
24fd4260 5835 }
5836 else
5837 {
ca4f1f5b 5838 imode = word_mode;
5839 /* Handle targets with different FP word orders. */
5840 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5841 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5842 else
a0c938f0 5843 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5844 temp = operand_subword_force (temp, word, fmode);
5845 bitpos = bitpos % BITS_PER_WORD;
5846 }
5847
44b0f1d0 5848 /* Force the intermediate word_mode (or narrower) result into a
5849 register. This avoids attempting to create paradoxical SUBREGs
5850 of floating point modes below. */
5851 temp = force_reg (imode, temp);
5852
ca4f1f5b 5853 /* If the bitpos is within the "result mode" lowpart, the operation
5854 can be implement with a single bitwise AND. Otherwise, we need
5855 a right shift and an AND. */
5856
5857 if (bitpos < GET_MODE_BITSIZE (rmode))
5858 {
796b6678 5859 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5860
4a46f016 5861 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5862 temp = gen_lowpart (rmode, temp);
24fd4260 5863 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5864 immed_wide_int_const (mask, rmode),
ca4f1f5b 5865 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5866 }
ca4f1f5b 5867 else
5868 {
5869 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5870 significant bit, then truncate the result to the desired mode
ca4f1f5b 5871 and mask just this bit. */
f5ff0b21 5872 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5873 temp = gen_lowpart (rmode, temp);
5874 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5875 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5876 }
5877
27f261ef 5878 return temp;
5879}
73673831 5880
5881/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5882 call. EXP is the call. FN is the
73673831 5883 identificator of the actual function. IGNORE is nonzero if the
5884 value is to be ignored. */
5885
5886static rtx
c2f47e15 5887expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5888{
5889 tree id, decl;
5890 tree call;
5891
5892 /* If we are not profiling, just call the function. */
5893 if (!profile_arc_flag)
5894 return NULL_RTX;
5895
5896 /* Otherwise call the wrapper. This should be equivalent for the rest of
5897 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5898 code necessary for keeping the profiling sane. */
73673831 5899
5900 switch (DECL_FUNCTION_CODE (fn))
5901 {
5902 case BUILT_IN_FORK:
5903 id = get_identifier ("__gcov_fork");
5904 break;
5905
5906 case BUILT_IN_EXECL:
5907 id = get_identifier ("__gcov_execl");
5908 break;
5909
5910 case BUILT_IN_EXECV:
5911 id = get_identifier ("__gcov_execv");
5912 break;
5913
5914 case BUILT_IN_EXECLP:
5915 id = get_identifier ("__gcov_execlp");
5916 break;
5917
5918 case BUILT_IN_EXECLE:
5919 id = get_identifier ("__gcov_execle");
5920 break;
5921
5922 case BUILT_IN_EXECVP:
5923 id = get_identifier ("__gcov_execvp");
5924 break;
5925
5926 case BUILT_IN_EXECVE:
5927 id = get_identifier ("__gcov_execve");
5928 break;
5929
5930 default:
64db345d 5931 gcc_unreachable ();
73673831 5932 }
5933
e60a6f7b 5934 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5935 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5936 DECL_EXTERNAL (decl) = 1;
5937 TREE_PUBLIC (decl) = 1;
5938 DECL_ARTIFICIAL (decl) = 1;
5939 TREE_NOTHROW (decl) = 1;
e82d310b 5940 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5941 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5942 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5943 return expand_call (call, target, ignore);
c2f47e15 5944 }
48e1416a 5945
b6a5fc45 5946
5947\f
3e272de8 5948/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5949 the pointer in these functions is void*, the tree optimizers may remove
5950 casts. The mode computed in expand_builtin isn't reliable either, due
5951 to __sync_bool_compare_and_swap.
5952
5953 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5954 group of builtins. This gives us log2 of the mode size. */
5955
3754d046 5956static inline machine_mode
3e272de8 5957get_builtin_sync_mode (int fcode_diff)
5958{
ad3a13b5 5959 /* The size is not negotiable, so ask not to get BLKmode in return
5960 if the target indicates that a smaller size would be better. */
517be012 5961 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
3e272de8 5962}
5963
041e0215 5964/* Expand the memory expression LOC and return the appropriate memory operand
5965 for the builtin_sync operations. */
5966
5967static rtx
3754d046 5968get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5969{
5970 rtx addr, mem;
fcbc2234 5971 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5972 ? TREE_TYPE (TREE_TYPE (loc))
5973 : TREE_TYPE (loc));
5974 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
041e0215 5975
fcbc2234 5976 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
ed825d83 5977 addr = convert_memory_address (addr_mode, addr);
041e0215 5978
5979 /* Note that we explicitly do not want any alias information for this
5980 memory, so that we kill all other live memories. Otherwise we don't
5981 satisfy the full barrier semantics of the intrinsic. */
fcbc2234 5982 mem = gen_rtx_MEM (mode, addr);
5983
5984 set_mem_addr_space (mem, addr_space);
5985
5986 mem = validize_mem (mem);
041e0215 5987
153c3b50 5988 /* The alignment needs to be at least according to that of the mode. */
5989 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 5990 get_pointer_alignment (loc)));
c94cfd1c 5991 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 5992 MEM_VOLATILE_P (mem) = 1;
5993
5994 return mem;
5995}
5996
1cd6e20d 5997/* Make sure an argument is in the right mode.
5998 EXP is the tree argument.
5999 MODE is the mode it should be in. */
6000
6001static rtx
3754d046 6002expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 6003{
6004 rtx val;
3754d046 6005 machine_mode old_mode;
1cd6e20d 6006
6007 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6008 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6009 of CONST_INTs, where we know the old_mode only from the call argument. */
6010
6011 old_mode = GET_MODE (val);
6012 if (old_mode == VOIDmode)
6013 old_mode = TYPE_MODE (TREE_TYPE (exp));
6014 val = convert_modes (mode, old_mode, val, 1);
6015 return val;
6016}
6017
6018
b6a5fc45 6019/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 6020 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 6021 that corresponds to the arithmetic or logical operation from the name;
6022 an exception here is that NOT actually means NAND. TARGET is an optional
6023 place for us to store the results; AFTER is true if this is the
1cd6e20d 6024 fetch_and_xxx form. */
b6a5fc45 6025
6026static rtx
3754d046 6027expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 6028 enum rtx_code code, bool after,
1cd6e20d 6029 rtx target)
b6a5fc45 6030{
041e0215 6031 rtx val, mem;
e60a6f7b 6032 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 6033
cf73e559 6034 if (code == NOT && warn_sync_nand)
6035 {
6036 tree fndecl = get_callee_fndecl (exp);
6037 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6038
6039 static bool warned_f_a_n, warned_n_a_f;
6040
6041 switch (fcode)
6042 {
2797f13a 6043 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6044 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6045 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6046 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6047 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 6048 if (warned_f_a_n)
6049 break;
6050
b9a16870 6051 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 6052 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 6053 warned_f_a_n = true;
6054 break;
6055
2797f13a 6056 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6057 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6058 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6059 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6060 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 6061 if (warned_n_a_f)
6062 break;
6063
b9a16870 6064 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 6065 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 6066 warned_n_a_f = true;
6067 break;
6068
6069 default:
6070 gcc_unreachable ();
6071 }
6072 }
6073
b6a5fc45 6074 /* Expand the operands. */
c2f47e15 6075 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 6076 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 6077
a372f7ca 6078 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 6079 after);
b6a5fc45 6080}
6081
6082/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 6083 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 6084 true if this is the boolean form. TARGET is a place for us to store the
6085 results; this is NOT optional if IS_BOOL is true. */
6086
6087static rtx
3754d046 6088expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 6089 bool is_bool, rtx target)
b6a5fc45 6090{
041e0215 6091 rtx old_val, new_val, mem;
ba885f6a 6092 rtx *pbool, *poval;
b6a5fc45 6093
6094 /* Expand the operands. */
c2f47e15 6095 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 6096 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6097 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 6098
ba885f6a 6099 pbool = poval = NULL;
6100 if (target != const0_rtx)
6101 {
6102 if (is_bool)
6103 pbool = &target;
6104 else
6105 poval = &target;
6106 }
6107 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 6108 false, MEMMODEL_SYNC_SEQ_CST,
6109 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 6110 return NULL_RTX;
c2f47e15 6111
1cd6e20d 6112 return target;
b6a5fc45 6113}
6114
6115/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6116 general form is actually an atomic exchange, and some targets only
6117 support a reduced form with the second argument being a constant 1.
48e1416a 6118 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 6119 the results. */
b6a5fc45 6120
6121static rtx
3754d046 6122expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 6123 rtx target)
b6a5fc45 6124{
041e0215 6125 rtx val, mem;
b6a5fc45 6126
6127 /* Expand the operands. */
c2f47e15 6128 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 6129 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6130
7821cde1 6131 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 6132}
6133
6134/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6135
6136static void
3754d046 6137expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 6138{
6139 rtx mem;
6140
6141 /* Expand the operands. */
6142 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6143
a372f7ca 6144 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 6145}
6146
6147/* Given an integer representing an ``enum memmodel'', verify its
6148 correctness and return the memory model enum. */
6149
6150static enum memmodel
6151get_memmodel (tree exp)
6152{
6153 rtx op;
7f738025 6154 unsigned HOST_WIDE_INT val;
be1e7283 6155 location_t loc
2cb724f9 6156 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 6157
6158 /* If the parameter is not a constant, it's a run time value so we'll just
6159 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6160 if (TREE_CODE (exp) != INTEGER_CST)
6161 return MEMMODEL_SEQ_CST;
6162
6163 op = expand_normal (exp);
7f738025 6164
6165 val = INTVAL (op);
6166 if (targetm.memmodel_check)
6167 val = targetm.memmodel_check (val);
6168 else if (val & ~MEMMODEL_MASK)
6169 {
2cb724f9 6170 warning_at (loc, OPT_Winvalid_memory_model,
6171 "unknown architecture specifier in memory model to builtin");
7f738025 6172 return MEMMODEL_SEQ_CST;
6173 }
6174
a372f7ca 6175 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6176 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 6177 {
2cb724f9 6178 warning_at (loc, OPT_Winvalid_memory_model,
6179 "invalid memory model argument to builtin");
1cd6e20d 6180 return MEMMODEL_SEQ_CST;
6181 }
7f738025 6182
3070f133 6183 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6184 be conservative and promote consume to acquire. */
6185 if (val == MEMMODEL_CONSUME)
6186 val = MEMMODEL_ACQUIRE;
6187
7f738025 6188 return (enum memmodel) val;
1cd6e20d 6189}
6190
6191/* Expand the __atomic_exchange intrinsic:
6192 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6193 EXP is the CALL_EXPR.
6194 TARGET is an optional place for us to store the results. */
6195
6196static rtx
3754d046 6197expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 6198{
6199 rtx val, mem;
6200 enum memmodel model;
6201
6202 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 6203
6204 if (!flag_inline_atomics)
6205 return NULL_RTX;
6206
6207 /* Expand the operands. */
6208 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6209 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6210
7821cde1 6211 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 6212}
6213
6214/* Expand the __atomic_compare_exchange intrinsic:
6215 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6216 TYPE desired, BOOL weak,
6217 enum memmodel success,
6218 enum memmodel failure)
6219 EXP is the CALL_EXPR.
6220 TARGET is an optional place for us to store the results. */
6221
6222static rtx
3754d046 6223expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 6224 rtx target)
6225{
1e0c0b35 6226 rtx expect, desired, mem, oldval;
6227 rtx_code_label *label;
1cd6e20d 6228 enum memmodel success, failure;
6229 tree weak;
6230 bool is_weak;
be1e7283 6231 location_t loc
2cb724f9 6232 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 6233
6234 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6235 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6236
086f4e33 6237 if (failure > success)
6238 {
2cb724f9 6239 warning_at (loc, OPT_Winvalid_memory_model,
6240 "failure memory model cannot be stronger than success "
6241 "memory model for %<__atomic_compare_exchange%>");
086f4e33 6242 success = MEMMODEL_SEQ_CST;
6243 }
6244
a372f7ca 6245 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 6246 {
2cb724f9 6247 warning_at (loc, OPT_Winvalid_memory_model,
6248 "invalid failure memory model for "
6249 "%<__atomic_compare_exchange%>");
086f4e33 6250 failure = MEMMODEL_SEQ_CST;
6251 success = MEMMODEL_SEQ_CST;
1cd6e20d 6252 }
6253
086f4e33 6254
1cd6e20d 6255 if (!flag_inline_atomics)
6256 return NULL_RTX;
6257
6258 /* Expand the operands. */
6259 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6260
6261 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6262 expect = convert_memory_address (Pmode, expect);
c401b131 6263 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 6264 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6265
6266 weak = CALL_EXPR_ARG (exp, 3);
6267 is_weak = false;
e913b5cd 6268 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 6269 is_weak = true;
6270
d86e3752 6271 if (target == const0_rtx)
6272 target = NULL;
d86e3752 6273
3c29a9ea 6274 /* Lest the rtl backend create a race condition with an imporoper store
6275 to memory, always create a new pseudo for OLDVAL. */
6276 oldval = NULL;
6277
6278 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 6279 is_weak, success, failure))
1cd6e20d 6280 return NULL_RTX;
6281
d86e3752 6282 /* Conditionally store back to EXPECT, lest we create a race condition
6283 with an improper store to memory. */
6284 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6285 the normal case where EXPECT is totally private, i.e. a register. At
6286 which point the store can be unconditional. */
6287 label = gen_label_rtx ();
62589f76 6288 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6289 GET_MODE (target), 1, label);
d86e3752 6290 emit_move_insn (expect, oldval);
6291 emit_label (label);
c401b131 6292
1cd6e20d 6293 return target;
6294}
6295
5a5ef659 6296/* Helper function for expand_ifn_atomic_compare_exchange - expand
6297 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6298 call. The weak parameter must be dropped to match the expected parameter
6299 list and the expected argument changed from value to pointer to memory
6300 slot. */
6301
6302static void
6303expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6304{
6305 unsigned int z;
6306 vec<tree, va_gc> *vec;
6307
6308 vec_alloc (vec, 5);
6309 vec->quick_push (gimple_call_arg (call, 0));
6310 tree expected = gimple_call_arg (call, 1);
6311 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6312 TREE_TYPE (expected));
6313 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6314 if (expd != x)
6315 emit_move_insn (x, expd);
6316 tree v = make_tree (TREE_TYPE (expected), x);
6317 vec->quick_push (build1 (ADDR_EXPR,
6318 build_pointer_type (TREE_TYPE (expected)), v));
6319 vec->quick_push (gimple_call_arg (call, 2));
6320 /* Skip the boolean weak parameter. */
6321 for (z = 4; z < 6; z++)
6322 vec->quick_push (gimple_call_arg (call, z));
5eaf31bb 6323 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
52acb7ae 6324 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5eaf31bb 6325 gcc_assert (bytes_log2 < 5);
5a5ef659 6326 built_in_function fncode
6327 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5eaf31bb 6328 + bytes_log2);
5a5ef659 6329 tree fndecl = builtin_decl_explicit (fncode);
6330 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6331 fndecl);
6332 tree exp = build_call_vec (boolean_type_node, fn, vec);
6333 tree lhs = gimple_call_lhs (call);
6334 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6335 if (lhs)
6336 {
6337 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6338 if (GET_MODE (boolret) != mode)
6339 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6340 x = force_reg (mode, x);
6341 write_complex_part (target, boolret, true);
6342 write_complex_part (target, x, false);
6343 }
6344}
6345
6346/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6347
6348void
6349expand_ifn_atomic_compare_exchange (gcall *call)
6350{
6351 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6352 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
517be012 6353 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5a5ef659 6354 rtx expect, desired, mem, oldval, boolret;
6355 enum memmodel success, failure;
6356 tree lhs;
6357 bool is_weak;
be1e7283 6358 location_t loc
5a5ef659 6359 = expansion_point_location_if_in_system_header (gimple_location (call));
6360
6361 success = get_memmodel (gimple_call_arg (call, 4));
6362 failure = get_memmodel (gimple_call_arg (call, 5));
6363
6364 if (failure > success)
6365 {
6366 warning_at (loc, OPT_Winvalid_memory_model,
6367 "failure memory model cannot be stronger than success "
6368 "memory model for %<__atomic_compare_exchange%>");
6369 success = MEMMODEL_SEQ_CST;
6370 }
6371
6372 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6373 {
6374 warning_at (loc, OPT_Winvalid_memory_model,
6375 "invalid failure memory model for "
6376 "%<__atomic_compare_exchange%>");
6377 failure = MEMMODEL_SEQ_CST;
6378 success = MEMMODEL_SEQ_CST;
6379 }
6380
6381 if (!flag_inline_atomics)
6382 {
6383 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6384 return;
6385 }
6386
6387 /* Expand the operands. */
6388 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6389
6390 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6391 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6392
6393 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6394
6395 boolret = NULL;
6396 oldval = NULL;
6397
6398 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6399 is_weak, success, failure))
6400 {
6401 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6402 return;
6403 }
6404
6405 lhs = gimple_call_lhs (call);
6406 if (lhs)
6407 {
6408 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6409 if (GET_MODE (boolret) != mode)
6410 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6411 write_complex_part (target, boolret, true);
6412 write_complex_part (target, oldval, false);
6413 }
6414}
6415
1cd6e20d 6416/* Expand the __atomic_load intrinsic:
6417 TYPE __atomic_load (TYPE *object, enum memmodel)
6418 EXP is the CALL_EXPR.
6419 TARGET is an optional place for us to store the results. */
6420
6421static rtx
3754d046 6422expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 6423{
6424 rtx mem;
6425 enum memmodel model;
6426
6427 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 6428 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 6429 {
be1e7283 6430 location_t loc
2cb724f9 6431 = expansion_point_location_if_in_system_header (input_location);
6432 warning_at (loc, OPT_Winvalid_memory_model,
6433 "invalid memory model for %<__atomic_load%>");
086f4e33 6434 model = MEMMODEL_SEQ_CST;
1cd6e20d 6435 }
6436
6437 if (!flag_inline_atomics)
6438 return NULL_RTX;
6439
6440 /* Expand the operand. */
6441 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6442
6443 return expand_atomic_load (target, mem, model);
6444}
6445
6446
6447/* Expand the __atomic_store intrinsic:
6448 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6449 EXP is the CALL_EXPR.
6450 TARGET is an optional place for us to store the results. */
6451
6452static rtx
3754d046 6453expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 6454{
6455 rtx mem, val;
6456 enum memmodel model;
6457
6458 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 6459 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6460 || is_mm_release (model)))
1cd6e20d 6461 {
be1e7283 6462 location_t loc
2cb724f9 6463 = expansion_point_location_if_in_system_header (input_location);
6464 warning_at (loc, OPT_Winvalid_memory_model,
6465 "invalid memory model for %<__atomic_store%>");
086f4e33 6466 model = MEMMODEL_SEQ_CST;
1cd6e20d 6467 }
6468
6469 if (!flag_inline_atomics)
6470 return NULL_RTX;
6471
6472 /* Expand the operands. */
6473 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6474 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6475
8808bf16 6476 return expand_atomic_store (mem, val, model, false);
1cd6e20d 6477}
6478
6479/* Expand the __atomic_fetch_XXX intrinsic:
6480 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6481 EXP is the CALL_EXPR.
6482 TARGET is an optional place for us to store the results.
6483 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6484 FETCH_AFTER is true if returning the result of the operation.
6485 FETCH_AFTER is false if returning the value before the operation.
6486 IGNORE is true if the result is not used.
6487 EXT_CALL is the correct builtin for an external call if this cannot be
6488 resolved to an instruction sequence. */
6489
6490static rtx
3754d046 6491expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 6492 enum rtx_code code, bool fetch_after,
6493 bool ignore, enum built_in_function ext_call)
6494{
6495 rtx val, mem, ret;
6496 enum memmodel model;
6497 tree fndecl;
6498 tree addr;
6499
6500 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6501
6502 /* Expand the operands. */
6503 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6504 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6505
6506 /* Only try generating instructions if inlining is turned on. */
6507 if (flag_inline_atomics)
6508 {
6509 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6510 if (ret)
6511 return ret;
6512 }
6513
6514 /* Return if a different routine isn't needed for the library call. */
6515 if (ext_call == BUILT_IN_NONE)
6516 return NULL_RTX;
6517
6518 /* Change the call to the specified function. */
6519 fndecl = get_callee_fndecl (exp);
6520 addr = CALL_EXPR_FN (exp);
6521 STRIP_NOPS (addr);
6522
6523 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 6524 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 6525
a2f95d97 6526 /* If we will emit code after the call, the call can not be a tail call.
6527 If it is emitted as a tail call, a barrier is emitted after it, and
6528 then all trailing code is removed. */
6529 if (!ignore)
6530 CALL_EXPR_TAILCALL (exp) = 0;
6531
1cd6e20d 6532 /* Expand the call here so we can emit trailing code. */
6533 ret = expand_call (exp, target, ignore);
6534
6535 /* Replace the original function just in case it matters. */
6536 TREE_OPERAND (addr, 0) = fndecl;
6537
6538 /* Then issue the arithmetic correction to return the right result. */
6539 if (!ignore)
c449f851 6540 {
6541 if (code == NOT)
6542 {
6543 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6544 OPTAB_LIB_WIDEN);
6545 ret = expand_simple_unop (mode, NOT, ret, target, true);
6546 }
6547 else
6548 ret = expand_simple_binop (mode, code, ret, val, target, true,
6549 OPTAB_LIB_WIDEN);
6550 }
1cd6e20d 6551 return ret;
6552}
6553
9c1a31e4 6554/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6555
6556void
6557expand_ifn_atomic_bit_test_and (gcall *call)
6558{
6559 tree ptr = gimple_call_arg (call, 0);
6560 tree bit = gimple_call_arg (call, 1);
6561 tree flag = gimple_call_arg (call, 2);
6562 tree lhs = gimple_call_lhs (call);
6563 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6564 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6565 enum rtx_code code;
6566 optab optab;
6567 struct expand_operand ops[5];
6568
6569 gcc_assert (flag_inline_atomics);
6570
6571 if (gimple_call_num_args (call) == 4)
6572 model = get_memmodel (gimple_call_arg (call, 3));
6573
6574 rtx mem = get_builtin_sync_mem (ptr, mode);
6575 rtx val = expand_expr_force_mode (bit, mode);
6576
6577 switch (gimple_call_internal_fn (call))
6578 {
6579 case IFN_ATOMIC_BIT_TEST_AND_SET:
6580 code = IOR;
6581 optab = atomic_bit_test_and_set_optab;
6582 break;
6583 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6584 code = XOR;
6585 optab = atomic_bit_test_and_complement_optab;
6586 break;
6587 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6588 code = AND;
6589 optab = atomic_bit_test_and_reset_optab;
6590 break;
6591 default:
6592 gcc_unreachable ();
6593 }
6594
6595 if (lhs == NULL_TREE)
6596 {
6597 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6598 val, NULL_RTX, true, OPTAB_DIRECT);
6599 if (code == AND)
6600 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6601 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6602 return;
6603 }
6604
6605 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6606 enum insn_code icode = direct_optab_handler (optab, mode);
6607 gcc_assert (icode != CODE_FOR_nothing);
6608 create_output_operand (&ops[0], target, mode);
6609 create_fixed_operand (&ops[1], mem);
6610 create_convert_operand_to (&ops[2], val, mode, true);
6611 create_integer_operand (&ops[3], model);
6612 create_integer_operand (&ops[4], integer_onep (flag));
6613 if (maybe_expand_insn (icode, 5, ops))
6614 return;
6615
6616 rtx bitval = val;
6617 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6618 val, NULL_RTX, true, OPTAB_DIRECT);
6619 rtx maskval = val;
6620 if (code == AND)
6621 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6622 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6623 code, model, false);
6624 if (integer_onep (flag))
6625 {
6626 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6627 NULL_RTX, true, OPTAB_DIRECT);
6628 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6629 true, OPTAB_DIRECT);
6630 }
6631 else
6632 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6633 OPTAB_DIRECT);
6634 if (result != target)
6635 emit_move_insn (target, result);
6636}
6637
10b744a3 6638/* Expand an atomic clear operation.
6639 void _atomic_clear (BOOL *obj, enum memmodel)
6640 EXP is the call expression. */
6641
6642static rtx
6643expand_builtin_atomic_clear (tree exp)
6644{
3754d046 6645 machine_mode mode;
10b744a3 6646 rtx mem, ret;
6647 enum memmodel model;
6648
517be012 6649 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
10b744a3 6650 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6651 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6652
a372f7ca 6653 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 6654 {
be1e7283 6655 location_t loc
2cb724f9 6656 = expansion_point_location_if_in_system_header (input_location);
6657 warning_at (loc, OPT_Winvalid_memory_model,
6658 "invalid memory model for %<__atomic_store%>");
086f4e33 6659 model = MEMMODEL_SEQ_CST;
10b744a3 6660 }
6661
6662 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6663 Failing that, a store is issued by __atomic_store. The only way this can
6664 fail is if the bool type is larger than a word size. Unlikely, but
6665 handle it anyway for completeness. Assume a single threaded model since
6666 there is no atomic support in this case, and no barriers are required. */
6667 ret = expand_atomic_store (mem, const0_rtx, model, true);
6668 if (!ret)
6669 emit_move_insn (mem, const0_rtx);
6670 return const0_rtx;
6671}
6672
6673/* Expand an atomic test_and_set operation.
6674 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6675 EXP is the call expression. */
6676
6677static rtx
7821cde1 6678expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 6679{
7821cde1 6680 rtx mem;
10b744a3 6681 enum memmodel model;
3754d046 6682 machine_mode mode;
10b744a3 6683
517be012 6684 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
10b744a3 6685 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6686 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6687
7821cde1 6688 return expand_atomic_test_and_set (target, mem, model);
10b744a3 6689}
6690
6691
1cd6e20d 6692/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6693 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6694
6695static tree
6696fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6697{
6698 int size;
3754d046 6699 machine_mode mode;
1cd6e20d 6700 unsigned int mode_align, type_align;
6701
6702 if (TREE_CODE (arg0) != INTEGER_CST)
6703 return NULL_TREE;
b6a5fc45 6704
517be012 6705 /* We need a corresponding integer mode for the access to be lock-free. */
1cd6e20d 6706 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
517be012 6707 if (!int_mode_for_size (size, 0).exists (&mode))
6708 return boolean_false_node;
6709
1cd6e20d 6710 mode_align = GET_MODE_ALIGNMENT (mode);
6711
4ca99588 6712 if (TREE_CODE (arg1) == INTEGER_CST)
6713 {
6714 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6715
6716 /* Either this argument is null, or it's a fake pointer encoding
6717 the alignment of the object. */
ac29ece2 6718 val = least_bit_hwi (val);
4ca99588 6719 val *= BITS_PER_UNIT;
6720
6721 if (val == 0 || mode_align < val)
6722 type_align = mode_align;
6723 else
6724 type_align = val;
6725 }
1cd6e20d 6726 else
6727 {
6728 tree ttype = TREE_TYPE (arg1);
6729
6730 /* This function is usually invoked and folded immediately by the front
6731 end before anything else has a chance to look at it. The pointer
6732 parameter at this point is usually cast to a void *, so check for that
6733 and look past the cast. */
2f8a2ead 6734 if (CONVERT_EXPR_P (arg1)
6735 && POINTER_TYPE_P (ttype)
6736 && VOID_TYPE_P (TREE_TYPE (ttype))
6737 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
1cd6e20d 6738 arg1 = TREE_OPERAND (arg1, 0);
6739
6740 ttype = TREE_TYPE (arg1);
6741 gcc_assert (POINTER_TYPE_P (ttype));
6742
6743 /* Get the underlying type of the object. */
6744 ttype = TREE_TYPE (ttype);
6745 type_align = TYPE_ALIGN (ttype);
6746 }
6747
47ae02b7 6748 /* If the object has smaller alignment, the lock free routines cannot
1cd6e20d 6749 be used. */
6750 if (type_align < mode_align)
06308d2a 6751 return boolean_false_node;
1cd6e20d 6752
6753 /* Check if a compare_and_swap pattern exists for the mode which represents
6754 the required size. The pattern is not allowed to fail, so the existence
d5f5fa27 6755 of the pattern indicates support is present. Also require that an
6756 atomic load exists for the required size. */
6757 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
06308d2a 6758 return boolean_true_node;
1cd6e20d 6759 else
06308d2a 6760 return boolean_false_node;
1cd6e20d 6761}
6762
6763/* Return true if the parameters to call EXP represent an object which will
6764 always generate lock free instructions. The first argument represents the
6765 size of the object, and the second parameter is a pointer to the object
6766 itself. If NULL is passed for the object, then the result is based on
6767 typical alignment for an object of the specified size. Otherwise return
6768 false. */
6769
6770static rtx
6771expand_builtin_atomic_always_lock_free (tree exp)
6772{
6773 tree size;
6774 tree arg0 = CALL_EXPR_ARG (exp, 0);
6775 tree arg1 = CALL_EXPR_ARG (exp, 1);
6776
6777 if (TREE_CODE (arg0) != INTEGER_CST)
6778 {
6779 error ("non-constant argument 1 to __atomic_always_lock_free");
6780 return const0_rtx;
6781 }
6782
6783 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 6784 if (size == boolean_true_node)
1cd6e20d 6785 return const1_rtx;
6786 return const0_rtx;
6787}
6788
6789/* Return a one or zero if it can be determined that object ARG1 of size ARG
6790 is lock free on this architecture. */
6791
6792static tree
6793fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6794{
6795 if (!flag_inline_atomics)
6796 return NULL_TREE;
6797
6798 /* If it isn't always lock free, don't generate a result. */
06308d2a 6799 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6800 return boolean_true_node;
1cd6e20d 6801
6802 return NULL_TREE;
6803}
6804
6805/* Return true if the parameters to call EXP represent an object which will
6806 always generate lock free instructions. The first argument represents the
6807 size of the object, and the second parameter is a pointer to the object
6808 itself. If NULL is passed for the object, then the result is based on
6809 typical alignment for an object of the specified size. Otherwise return
6810 NULL*/
6811
6812static rtx
6813expand_builtin_atomic_is_lock_free (tree exp)
6814{
6815 tree size;
6816 tree arg0 = CALL_EXPR_ARG (exp, 0);
6817 tree arg1 = CALL_EXPR_ARG (exp, 1);
6818
6819 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6820 {
6821 error ("non-integer argument 1 to __atomic_is_lock_free");
6822 return NULL_RTX;
6823 }
6824
6825 if (!flag_inline_atomics)
6826 return NULL_RTX;
6827
6828 /* If the value is known at compile time, return the RTX for it. */
6829 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 6830 if (size == boolean_true_node)
1cd6e20d 6831 return const1_rtx;
6832
6833 return NULL_RTX;
6834}
6835
1cd6e20d 6836/* Expand the __atomic_thread_fence intrinsic:
6837 void __atomic_thread_fence (enum memmodel)
6838 EXP is the CALL_EXPR. */
6839
6840static void
6841expand_builtin_atomic_thread_fence (tree exp)
6842{
fe54c06b 6843 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6844 expand_mem_thread_fence (model);
1cd6e20d 6845}
6846
6847/* Expand the __atomic_signal_fence intrinsic:
6848 void __atomic_signal_fence (enum memmodel)
6849 EXP is the CALL_EXPR. */
6850
6851static void
6852expand_builtin_atomic_signal_fence (tree exp)
6853{
fe54c06b 6854 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6855 expand_mem_signal_fence (model);
b6a5fc45 6856}
6857
6858/* Expand the __sync_synchronize intrinsic. */
6859
6860static void
2797f13a 6861expand_builtin_sync_synchronize (void)
b6a5fc45 6862{
a372f7ca 6863 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 6864}
6865
badaa04c 6866static rtx
6867expand_builtin_thread_pointer (tree exp, rtx target)
6868{
6869 enum insn_code icode;
6870 if (!validate_arglist (exp, VOID_TYPE))
6871 return const0_rtx;
6872 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6873 if (icode != CODE_FOR_nothing)
6874 {
6875 struct expand_operand op;
3ed779c3 6876 /* If the target is not sutitable then create a new target. */
6877 if (target == NULL_RTX
6878 || !REG_P (target)
6879 || GET_MODE (target) != Pmode)
badaa04c 6880 target = gen_reg_rtx (Pmode);
6881 create_output_operand (&op, target, Pmode);
6882 expand_insn (icode, 1, &op);
6883 return target;
6884 }
6885 error ("__builtin_thread_pointer is not supported on this target");
6886 return const0_rtx;
6887}
6888
6889static void
6890expand_builtin_set_thread_pointer (tree exp)
6891{
6892 enum insn_code icode;
6893 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6894 return;
6895 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6896 if (icode != CODE_FOR_nothing)
6897 {
6898 struct expand_operand op;
6899 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6900 Pmode, EXPAND_NORMAL);
6f343c10 6901 create_input_operand (&op, val, Pmode);
badaa04c 6902 expand_insn (icode, 1, &op);
6903 return;
6904 }
6905 error ("__builtin_set_thread_pointer is not supported on this target");
6906}
6907
53800dbe 6908\f
0e80b01d 6909/* Emit code to restore the current value of stack. */
6910
6911static void
6912expand_stack_restore (tree var)
6913{
1e0c0b35 6914 rtx_insn *prev;
6915 rtx sa = expand_normal (var);
0e80b01d 6916
6917 sa = convert_memory_address (Pmode, sa);
6918
6919 prev = get_last_insn ();
6920 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 6921
6922 record_new_stack_level ();
6923
0e80b01d 6924 fixup_args_size_notes (prev, get_last_insn (), 0);
6925}
6926
0e80b01d 6927/* Emit code to save the current value of stack. */
6928
6929static rtx
6930expand_stack_save (void)
6931{
6932 rtx ret = NULL_RTX;
6933
0e80b01d 6934 emit_stack_save (SAVE_BLOCK, &ret);
6935 return ret;
6936}
6937
a7babc1e 6938/* Emit code to get the openacc gang, worker or vector id or size. */
6939
6940static rtx
6941expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6942{
6943 const char *name;
6944 rtx fallback_retval;
6945 rtx_insn *(*gen_fn) (rtx, rtx);
6946 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6947 {
6948 case BUILT_IN_GOACC_PARLEVEL_ID:
6949 name = "__builtin_goacc_parlevel_id";
6950 fallback_retval = const0_rtx;
6951 gen_fn = targetm.gen_oacc_dim_pos;
6952 break;
6953 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6954 name = "__builtin_goacc_parlevel_size";
6955 fallback_retval = const1_rtx;
6956 gen_fn = targetm.gen_oacc_dim_size;
6957 break;
6958 default:
6959 gcc_unreachable ();
6960 }
6961
6962 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6963 {
6964 error ("%qs only supported in OpenACC code", name);
6965 return const0_rtx;
6966 }
6967
6968 tree arg = CALL_EXPR_ARG (exp, 0);
6969 if (TREE_CODE (arg) != INTEGER_CST)
6970 {
6971 error ("non-constant argument 0 to %qs", name);
6972 return const0_rtx;
6973 }
6974
6975 int dim = TREE_INT_CST_LOW (arg);
6976 switch (dim)
6977 {
6978 case GOMP_DIM_GANG:
6979 case GOMP_DIM_WORKER:
6980 case GOMP_DIM_VECTOR:
6981 break;
6982 default:
6983 error ("illegal argument 0 to %qs", name);
6984 return const0_rtx;
6985 }
6986
6987 if (ignore)
6988 return target;
6989
2b895374 6990 if (target == NULL_RTX)
6991 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6992
a7babc1e 6993 if (!targetm.have_oacc_dim_size ())
6994 {
6995 emit_move_insn (target, fallback_retval);
6996 return target;
6997 }
6998
6999 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7000 emit_insn (gen_fn (reg, GEN_INT (dim)));
7001 if (reg != target)
7002 emit_move_insn (target, reg);
7003
7004 return target;
7005}
ca4c3545 7006
b3e6ae76 7007/* Expand a string compare operation using a sequence of char comparison
a950155e 7008 to get rid of the calling overhead, with result going to TARGET if
7009 that's convenient.
7010
7011 VAR_STR is the variable string source;
7012 CONST_STR is the constant string source;
7013 LENGTH is the number of chars to compare;
7014 CONST_STR_N indicates which source string is the constant string;
7015 IS_MEMCMP indicates whether it's a memcmp or strcmp.
b3e6ae76 7016
a950155e 7017 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7018
0dbefa15 7019 target = (int) (unsigned char) var_str[0]
7020 - (int) (unsigned char) const_str[0];
a950155e 7021 if (target != 0)
7022 goto ne_label;
7023 ...
0dbefa15 7024 target = (int) (unsigned char) var_str[length - 2]
7025 - (int) (unsigned char) const_str[length - 2];
a950155e 7026 if (target != 0)
7027 goto ne_label;
0dbefa15 7028 target = (int) (unsigned char) var_str[length - 1]
7029 - (int) (unsigned char) const_str[length - 1];
a950155e 7030 ne_label:
7031 */
7032
7033static rtx
b3e6ae76 7034inline_string_cmp (rtx target, tree var_str, const char *const_str,
a950155e 7035 unsigned HOST_WIDE_INT length,
0dbefa15 7036 int const_str_n, machine_mode mode)
a950155e 7037{
7038 HOST_WIDE_INT offset = 0;
b3e6ae76 7039 rtx var_rtx_array
a950155e 7040 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7041 rtx var_rtx = NULL_RTX;
b3e6ae76 7042 rtx const_rtx = NULL_RTX;
7043 rtx result = target ? target : gen_reg_rtx (mode);
7044 rtx_code_label *ne_label = gen_label_rtx ();
0dbefa15 7045 tree unit_type_node = unsigned_char_type_node;
b3e6ae76 7046 scalar_int_mode unit_mode
7047 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
a950155e 7048
7049 start_sequence ();
7050
7051 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7052 {
b3e6ae76 7053 var_rtx
a950155e 7054 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
b3e6ae76 7055 const_rtx = c_readstr (const_str + offset, unit_mode);
a950155e 7056 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7057 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
b3e6ae76 7058
0dbefa15 7059 op0 = convert_modes (mode, unit_mode, op0, 1);
7060 op1 = convert_modes (mode, unit_mode, op1, 1);
b3e6ae76 7061 result = expand_simple_binop (mode, MINUS, op0, op1,
0dbefa15 7062 result, 1, OPTAB_WIDEN);
b3e6ae76 7063 if (i < length - 1)
7064 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7065 mode, true, ne_label);
7066 offset += GET_MODE_SIZE (unit_mode);
a950155e 7067 }
7068
7069 emit_label (ne_label);
7070 rtx_insn *insns = get_insns ();
7071 end_sequence ();
7072 emit_insn (insns);
7073
7074 return result;
7075}
7076
b3e6ae76 7077/* Inline expansion a call to str(n)cmp, with result going to
a950155e 7078 TARGET if that's convenient.
7079 If the call is not been inlined, return NULL_RTX. */
7080static rtx
0dbefa15 7081inline_expand_builtin_string_cmp (tree exp, rtx target)
a950155e 7082{
7083 tree fndecl = get_callee_fndecl (exp);
7084 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7085 unsigned HOST_WIDE_INT length = 0;
7086 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7087
9c7661c8 7088 /* Do NOT apply this inlining expansion when optimizing for size or
7089 optimization level below 2. */
7090 if (optimize < 2 || optimize_insn_for_size_p ())
7091 return NULL_RTX;
7092
a950155e 7093 gcc_checking_assert (fcode == BUILT_IN_STRCMP
b3e6ae76 7094 || fcode == BUILT_IN_STRNCMP
a950155e 7095 || fcode == BUILT_IN_MEMCMP);
7096
0dbefa15 7097 /* On a target where the type of the call (int) has same or narrower presicion
7098 than unsigned char, give up the inlining expansion. */
7099 if (TYPE_PRECISION (unsigned_char_type_node)
7100 >= TYPE_PRECISION (TREE_TYPE (exp)))
7101 return NULL_RTX;
7102
a950155e 7103 tree arg1 = CALL_EXPR_ARG (exp, 0);
7104 tree arg2 = CALL_EXPR_ARG (exp, 1);
7105 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7106
7107 unsigned HOST_WIDE_INT len1 = 0;
7108 unsigned HOST_WIDE_INT len2 = 0;
7109 unsigned HOST_WIDE_INT len3 = 0;
7110
7111 const char *src_str1 = c_getstr (arg1, &len1);
7112 const char *src_str2 = c_getstr (arg2, &len2);
b3e6ae76 7113
a950155e 7114 /* If neither strings is constant string, the call is not qualify. */
7115 if (!src_str1 && !src_str2)
7116 return NULL_RTX;
7117
7118 /* For strncmp, if the length is not a const, not qualify. */
7119 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
7120 return NULL_RTX;
7121
7122 int const_str_n = 0;
7123 if (!len1)
7124 const_str_n = 2;
7125 else if (!len2)
7126 const_str_n = 1;
7127 else if (len2 > len1)
7128 const_str_n = 1;
7129 else
7130 const_str_n = 2;
7131
7132 gcc_checking_assert (const_str_n > 0);
7133 length = (const_str_n == 1) ? len1 : len2;
7134
7135 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
7136 length = len3;
7137
b3e6ae76 7138 /* If the length of the comparision is larger than the threshold,
a950155e 7139 do nothing. */
b3e6ae76 7140 if (length > (unsigned HOST_WIDE_INT)
a950155e 7141 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7142 return NULL_RTX;
7143
7144 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7145
7146 /* Now, start inline expansion the call. */
b3e6ae76 7147 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
a950155e 7148 (const_str_n == 1) ? src_str1 : src_str2, length,
0dbefa15 7149 const_str_n, mode);
a950155e 7150}
7151
123081ef 7152/* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7153 represents the size of the first argument to that call, or VOIDmode
7154 if the argument is a pointer. IGNORE will be true if the result
7155 isn't used. */
7156static rtx
7157expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7158 bool ignore)
7159{
7160 rtx val, failsafe;
7161 unsigned nargs = call_expr_nargs (exp);
7162
7163 tree arg0 = CALL_EXPR_ARG (exp, 0);
7164
7165 if (mode == VOIDmode)
7166 {
7167 mode = TYPE_MODE (TREE_TYPE (arg0));
7168 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7169 }
7170
7171 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7172
7173 /* An optional second argument can be used as a failsafe value on
7174 some machines. If it isn't present, then the failsafe value is
7175 assumed to be 0. */
7176 if (nargs > 1)
7177 {
7178 tree arg1 = CALL_EXPR_ARG (exp, 1);
7179 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7180 }
7181 else
7182 failsafe = const0_rtx;
7183
7184 /* If the result isn't used, the behavior is undefined. It would be
7185 nice to emit a warning here, but path splitting means this might
7186 happen with legitimate code. So simply drop the builtin
7187 expansion in that case; we've handled any side-effects above. */
7188 if (ignore)
7189 return const0_rtx;
7190
7191 /* If we don't have a suitable target, create one to hold the result. */
7192 if (target == NULL || GET_MODE (target) != mode)
7193 target = gen_reg_rtx (mode);
7194
7195 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7196 val = convert_modes (mode, VOIDmode, val, false);
7197
7198 return targetm.speculation_safe_value (mode, target, val, failsafe);
7199}
7200
53800dbe 7201/* Expand an expression EXP that calls a built-in function,
7202 with result going to TARGET if that's convenient
7203 (and in mode MODE if that's convenient).
7204 SUBTARGET may be used as the target for computing one of EXP's operands.
7205 IGNORE is nonzero if the value is to be ignored. */
7206
7207rtx
3754d046 7208expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 7209 int ignore)
53800dbe 7210{
c6e6ecb1 7211 tree fndecl = get_callee_fndecl (exp);
53800dbe 7212 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 7213 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 7214 int flags;
53800dbe 7215
4e2f4ed5 7216 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7217 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7218
f9acf11a 7219 /* When ASan is enabled, we don't want to expand some memory/string
7220 builtins and rely on libsanitizer's hooks. This allows us to avoid
7221 redundant checks and be sure, that possible overflow will be detected
7222 by ASan. */
7223
7224 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7225 return expand_call (exp, target, ignore);
7226
53800dbe 7227 /* When not optimizing, generate calls to library functions for a certain
7228 set of builtins. */
cd9ff771 7229 if (!optimize
b6a5fc45 7230 && !called_as_built_in (fndecl)
73037a1e 7231 && fcode != BUILT_IN_FORK
7232 && fcode != BUILT_IN_EXECL
7233 && fcode != BUILT_IN_EXECV
7234 && fcode != BUILT_IN_EXECLP
7235 && fcode != BUILT_IN_EXECLE
7236 && fcode != BUILT_IN_EXECVP
7237 && fcode != BUILT_IN_EXECVE
2b34677f 7238 && !ALLOCA_FUNCTION_CODE_P (fcode)
1e42d5c6 7239 && fcode != BUILT_IN_FREE)
cd9ff771 7240 return expand_call (exp, target, ignore);
53800dbe 7241
8d6d7930 7242 /* The built-in function expanders test for target == const0_rtx
7243 to determine whether the function's result will be ignored. */
7244 if (ignore)
7245 target = const0_rtx;
7246
7247 /* If the result of a pure or const built-in function is ignored, and
7248 none of its arguments are volatile, we can avoid expanding the
7249 built-in call and just evaluate the arguments for side-effects. */
7250 if (target == const0_rtx
67fa4078 7251 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7252 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 7253 {
7254 bool volatilep = false;
7255 tree arg;
c2f47e15 7256 call_expr_arg_iterator iter;
8d6d7930 7257
c2f47e15 7258 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7259 if (TREE_THIS_VOLATILE (arg))
8d6d7930 7260 {
7261 volatilep = true;
7262 break;
7263 }
7264
7265 if (! volatilep)
7266 {
c2f47e15 7267 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7268 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 7269 return const0_rtx;
7270 }
7271 }
7272
53800dbe 7273 switch (fcode)
7274 {
4f35b1fc 7275 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 7276 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 7277 case BUILT_IN_FABSD32:
7278 case BUILT_IN_FABSD64:
7279 case BUILT_IN_FABSD128:
c2f47e15 7280 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 7281 if (target)
a0c938f0 7282 return target;
78a74442 7283 break;
7284
4f35b1fc 7285 CASE_FLT_FN (BUILT_IN_COPYSIGN):
012f068a 7286 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
c2f47e15 7287 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 7288 if (target)
7289 return target;
7290 break;
7291
7d3f6cc7 7292 /* Just do a normal library call if we were unable to fold
7293 the values. */
4f35b1fc 7294 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 7295 break;
53800dbe 7296
7e0713b1 7297 CASE_FLT_FN (BUILT_IN_FMA):
8c32188e 7298 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7e0713b1 7299 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7300 if (target)
7301 return target;
7302 break;
7303
a67a90e5 7304 CASE_FLT_FN (BUILT_IN_ILOGB):
7305 if (! flag_unsafe_math_optimizations)
7306 break;
12f08300 7307 gcc_fallthrough ();
7308 CASE_FLT_FN (BUILT_IN_ISINF):
7309 CASE_FLT_FN (BUILT_IN_FINITE):
7310 case BUILT_IN_ISFINITE:
7311 case BUILT_IN_ISNORMAL:
f97eea22 7312 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 7313 if (target)
7314 return target;
7315 break;
7316
80ff6494 7317 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 7318 CASE_FLT_FN (BUILT_IN_LCEIL):
7319 CASE_FLT_FN (BUILT_IN_LLCEIL):
7320 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 7321 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 7322 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 7323 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 7324 if (target)
7325 return target;
7326 break;
7327
80ff6494 7328 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 7329 CASE_FLT_FN (BUILT_IN_LRINT):
7330 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 7331 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 7332 CASE_FLT_FN (BUILT_IN_LROUND):
7333 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 7334 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 7335 if (target)
7336 return target;
7337 break;
7338
4f35b1fc 7339 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 7340 target = expand_builtin_powi (exp, target);
757c219d 7341 if (target)
7342 return target;
7343 break;
7344
d735c391 7345 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 7346 target = expand_builtin_cexpi (exp, target);
d735c391 7347 gcc_assert (target);
7348 return target;
7349
4f35b1fc 7350 CASE_FLT_FN (BUILT_IN_SIN):
7351 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 7352 if (! flag_unsafe_math_optimizations)
7353 break;
7354 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7355 if (target)
7356 return target;
7357 break;
7358
c3147c1a 7359 CASE_FLT_FN (BUILT_IN_SINCOS):
7360 if (! flag_unsafe_math_optimizations)
7361 break;
7362 target = expand_builtin_sincos (exp);
7363 if (target)
7364 return target;
7365 break;
7366
53800dbe 7367 case BUILT_IN_APPLY_ARGS:
7368 return expand_builtin_apply_args ();
7369
7370 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7371 FUNCTION with a copy of the parameters described by
7372 ARGUMENTS, and ARGSIZE. It returns a block of memory
7373 allocated on the stack into which is stored all the registers
7374 that might possibly be used for returning the result of a
7375 function. ARGUMENTS is the value returned by
7376 __builtin_apply_args. ARGSIZE is the number of bytes of
7377 arguments that must be copied. ??? How should this value be
7378 computed? We'll also need a safe worst case value for varargs
7379 functions. */
7380 case BUILT_IN_APPLY:
c2f47e15 7381 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 7382 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 7383 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 7384 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 7385 return const0_rtx;
7386 else
7387 {
53800dbe 7388 rtx ops[3];
7389
c2f47e15 7390 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7391 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7392 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 7393
7394 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7395 }
7396
7397 /* __builtin_return (RESULT) causes the function to return the
7398 value described by RESULT. RESULT is address of the block of
7399 memory returned by __builtin_apply. */
7400 case BUILT_IN_RETURN:
c2f47e15 7401 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7402 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 7403 return const0_rtx;
7404
7405 case BUILT_IN_SAVEREGS:
a66c9326 7406 return expand_builtin_saveregs ();
53800dbe 7407
48dc2227 7408 case BUILT_IN_VA_ARG_PACK:
7409 /* All valid uses of __builtin_va_arg_pack () are removed during
7410 inlining. */
b8c23db3 7411 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 7412 return const0_rtx;
7413
4e1d7ea4 7414 case BUILT_IN_VA_ARG_PACK_LEN:
7415 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7416 inlining. */
b8c23db3 7417 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 7418 return const0_rtx;
7419
53800dbe 7420 /* Return the address of the first anonymous stack arg. */
7421 case BUILT_IN_NEXT_ARG:
c2f47e15 7422 if (fold_builtin_next_arg (exp, false))
a0c938f0 7423 return const0_rtx;
79012a9d 7424 return expand_builtin_next_arg ();
53800dbe 7425
ac8fb6db 7426 case BUILT_IN_CLEAR_CACHE:
7427 target = expand_builtin___clear_cache (exp);
7428 if (target)
7429 return target;
7430 break;
7431
53800dbe 7432 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 7433 return expand_builtin_classify_type (exp);
53800dbe 7434
7435 case BUILT_IN_CONSTANT_P:
4ee9c684 7436 return const0_rtx;
53800dbe 7437
7438 case BUILT_IN_FRAME_ADDRESS:
7439 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 7440 return expand_builtin_frame_address (fndecl, exp);
53800dbe 7441
7442 /* Returns the address of the area where the structure is returned.
7443 0 otherwise. */
7444 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 7445 if (call_expr_nargs (exp) != 0
9342ee68 7446 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 7447 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 7448 return const0_rtx;
53800dbe 7449 else
9342ee68 7450 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 7451
2b34677f 7452 CASE_BUILT_IN_ALLOCA:
2b29cc6a 7453 target = expand_builtin_alloca (exp);
53800dbe 7454 if (target)
7455 return target;
7456 break;
7457
d08919a7 7458 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7459 return expand_asan_emit_allocas_unpoison (exp);
7460
4ee9c684 7461 case BUILT_IN_STACK_SAVE:
7462 return expand_stack_save ();
7463
7464 case BUILT_IN_STACK_RESTORE:
c2f47e15 7465 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 7466 return const0_rtx;
7467
74bdbe96 7468 case BUILT_IN_BSWAP16:
42791117 7469 case BUILT_IN_BSWAP32:
7470 case BUILT_IN_BSWAP64:
74bdbe96 7471 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 7472 if (target)
7473 return target;
7474 break;
7475
4f35b1fc 7476 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 7477 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7478 subtarget, ffs_optab);
6a08d0ab 7479 if (target)
7480 return target;
7481 break;
7482
4f35b1fc 7483 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 7484 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7485 subtarget, clz_optab);
6a08d0ab 7486 if (target)
7487 return target;
7488 break;
7489
4f35b1fc 7490 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 7491 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7492 subtarget, ctz_optab);
6a08d0ab 7493 if (target)
7494 return target;
7495 break;
7496
d8492bd3 7497 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 7498 target = expand_builtin_unop (target_mode, exp, target,
7499 subtarget, clrsb_optab);
7500 if (target)
7501 return target;
7502 break;
7503
4f35b1fc 7504 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 7505 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7506 subtarget, popcount_optab);
6a08d0ab 7507 if (target)
7508 return target;
7509 break;
7510
4f35b1fc 7511 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 7512 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7513 subtarget, parity_optab);
53800dbe 7514 if (target)
7515 return target;
7516 break;
7517
7518 case BUILT_IN_STRLEN:
c2f47e15 7519 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 7520 if (target)
7521 return target;
7522 break;
7523
864bd5de 7524 case BUILT_IN_STRNLEN:
7525 target = expand_builtin_strnlen (exp, target, target_mode);
7526 if (target)
7527 return target;
7528 break;
7529
5aef8938 7530 case BUILT_IN_STRCAT:
7531 target = expand_builtin_strcat (exp, target);
7532 if (target)
7533 return target;
7534 break;
7535
53800dbe 7536 case BUILT_IN_STRCPY:
a65c4d64 7537 target = expand_builtin_strcpy (exp, target);
53800dbe 7538 if (target)
7539 return target;
7540 break;
bf8e3599 7541
5aef8938 7542 case BUILT_IN_STRNCAT:
7543 target = expand_builtin_strncat (exp, target);
7544 if (target)
7545 return target;
7546 break;
7547
ed09096d 7548 case BUILT_IN_STRNCPY:
a65c4d64 7549 target = expand_builtin_strncpy (exp, target);
ed09096d 7550 if (target)
7551 return target;
7552 break;
bf8e3599 7553
3b824fa6 7554 case BUILT_IN_STPCPY:
dc369150 7555 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 7556 if (target)
7557 return target;
7558 break;
7559
4d317237 7560 case BUILT_IN_STPNCPY:
7561 target = expand_builtin_stpncpy (exp, target);
7562 if (target)
7563 return target;
7564 break;
7565
8d6c6ef5 7566 case BUILT_IN_MEMCHR:
7567 target = expand_builtin_memchr (exp, target);
7568 if (target)
7569 return target;
7570 break;
7571
53800dbe 7572 case BUILT_IN_MEMCPY:
a65c4d64 7573 target = expand_builtin_memcpy (exp, target);
3b824fa6 7574 if (target)
7575 return target;
7576 break;
7577
4d317237 7578 case BUILT_IN_MEMMOVE:
7579 target = expand_builtin_memmove (exp, target);
7580 if (target)
7581 return target;
7582 break;
7583
3b824fa6 7584 case BUILT_IN_MEMPCPY:
d0fbba1a 7585 target = expand_builtin_mempcpy (exp, target);
53800dbe 7586 if (target)
7587 return target;
7588 break;
7589
7590 case BUILT_IN_MEMSET:
c2f47e15 7591 target = expand_builtin_memset (exp, target, mode);
53800dbe 7592 if (target)
7593 return target;
7594 break;
7595
ffc83088 7596 case BUILT_IN_BZERO:
0b25db21 7597 target = expand_builtin_bzero (exp);
ffc83088 7598 if (target)
7599 return target;
7600 break;
7601
b3e6ae76 7602 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
72dbc21d 7603 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7604 when changing it to a strcmp call. */
7605 case BUILT_IN_STRCMP_EQ:
7606 target = expand_builtin_memcmp (exp, target, true);
7607 if (target)
7608 return target;
7609
7610 /* Change this call back to a BUILT_IN_STRCMP. */
b3e6ae76 7611 TREE_OPERAND (exp, 1)
72dbc21d 7612 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7613
7614 /* Delete the last parameter. */
7615 unsigned int i;
7616 vec<tree, va_gc> *arg_vec;
7617 vec_alloc (arg_vec, 2);
7618 for (i = 0; i < 2; i++)
7619 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7620 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7621 /* FALLTHROUGH */
7622
53800dbe 7623 case BUILT_IN_STRCMP:
a65c4d64 7624 target = expand_builtin_strcmp (exp, target);
53800dbe 7625 if (target)
7626 return target;
7627 break;
7628
72dbc21d 7629 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7630 back to a BUILT_IN_STRNCMP. */
7631 case BUILT_IN_STRNCMP_EQ:
7632 target = expand_builtin_memcmp (exp, target, true);
7633 if (target)
7634 return target;
7635
7636 /* Change it back to a BUILT_IN_STRNCMP. */
b3e6ae76 7637 TREE_OPERAND (exp, 1)
72dbc21d 7638 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7639 /* FALLTHROUGH */
7640
ed09096d 7641 case BUILT_IN_STRNCMP:
7642 target = expand_builtin_strncmp (exp, target, mode);
7643 if (target)
7644 return target;
7645 break;
7646
071f1696 7647 case BUILT_IN_BCMP:
53800dbe 7648 case BUILT_IN_MEMCMP:
3e346f54 7649 case BUILT_IN_MEMCMP_EQ:
7650 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
53800dbe 7651 if (target)
7652 return target;
3e346f54 7653 if (fcode == BUILT_IN_MEMCMP_EQ)
7654 {
7655 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7656 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7657 }
53800dbe 7658 break;
53800dbe 7659
7660 case BUILT_IN_SETJMP:
12f08300 7661 /* This should have been lowered to the builtins below. */
2c8a1497 7662 gcc_unreachable ();
7663
7664 case BUILT_IN_SETJMP_SETUP:
7665 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7666 and the receiver label. */
c2f47e15 7667 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 7668 {
c2f47e15 7669 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 7670 VOIDmode, EXPAND_NORMAL);
c2f47e15 7671 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 7672 rtx_insn *label_r = label_rtx (label);
2c8a1497 7673
7674 /* This is copied from the handling of non-local gotos. */
7675 expand_builtin_setjmp_setup (buf_addr, label_r);
7676 nonlocal_goto_handler_labels
a4de1c23 7677 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 7678 nonlocal_goto_handler_labels);
7679 /* ??? Do not let expand_label treat us as such since we would
7680 not want to be both on the list of non-local labels and on
7681 the list of forced labels. */
7682 FORCED_LABEL (label) = 0;
7683 return const0_rtx;
7684 }
7685 break;
7686
2c8a1497 7687 case BUILT_IN_SETJMP_RECEIVER:
7688 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 7689 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 7690 {
c2f47e15 7691 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 7692 rtx_insn *label_r = label_rtx (label);
2c8a1497 7693
7694 expand_builtin_setjmp_receiver (label_r);
7695 return const0_rtx;
7696 }
6b7f6858 7697 break;
53800dbe 7698
7699 /* __builtin_longjmp is passed a pointer to an array of five words.
7700 It's similar to the C library longjmp function but works with
7701 __builtin_setjmp above. */
7702 case BUILT_IN_LONGJMP:
c2f47e15 7703 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 7704 {
c2f47e15 7705 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 7706 VOIDmode, EXPAND_NORMAL);
c2f47e15 7707 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 7708
7709 if (value != const1_rtx)
7710 {
1e5fcbe2 7711 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 7712 return const0_rtx;
7713 }
7714
7715 expand_builtin_longjmp (buf_addr, value);
7716 return const0_rtx;
7717 }
2c8a1497 7718 break;
53800dbe 7719
4ee9c684 7720 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 7721 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 7722 if (target)
7723 return target;
7724 break;
7725
843d08a9 7726 /* This updates the setjmp buffer that is its argument with the value
7727 of the current stack pointer. */
7728 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 7729 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 7730 {
7731 rtx buf_addr
c2f47e15 7732 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 7733
7734 expand_builtin_update_setjmp_buf (buf_addr);
7735 return const0_rtx;
7736 }
7737 break;
7738
53800dbe 7739 case BUILT_IN_TRAP:
a0ef1725 7740 expand_builtin_trap ();
53800dbe 7741 return const0_rtx;
7742
d2b48f0c 7743 case BUILT_IN_UNREACHABLE:
7744 expand_builtin_unreachable ();
7745 return const0_rtx;
7746
4f35b1fc 7747 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 7748 case BUILT_IN_SIGNBITD32:
7749 case BUILT_IN_SIGNBITD64:
7750 case BUILT_IN_SIGNBITD128:
27f261ef 7751 target = expand_builtin_signbit (exp, target);
7752 if (target)
7753 return target;
7754 break;
7755
53800dbe 7756 /* Various hooks for the DWARF 2 __throw routine. */
7757 case BUILT_IN_UNWIND_INIT:
7758 expand_builtin_unwind_init ();
7759 return const0_rtx;
7760 case BUILT_IN_DWARF_CFA:
7761 return virtual_cfa_rtx;
7762#ifdef DWARF2_UNWIND_INFO
f8f023a5 7763 case BUILT_IN_DWARF_SP_COLUMN:
7764 return expand_builtin_dwarf_sp_column ();
695e919b 7765 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 7766 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 7767 return const0_rtx;
53800dbe 7768#endif
7769 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 7770 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 7771 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 7772 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 7773 case BUILT_IN_EH_RETURN:
c2f47e15 7774 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7775 CALL_EXPR_ARG (exp, 1));
53800dbe 7776 return const0_rtx;
df4b504c 7777 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 7778 return expand_builtin_eh_return_data_regno (exp);
26093bf4 7779 case BUILT_IN_EXTEND_POINTER:
c2f47e15 7780 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 7781 case BUILT_IN_EH_POINTER:
7782 return expand_builtin_eh_pointer (exp);
7783 case BUILT_IN_EH_FILTER:
7784 return expand_builtin_eh_filter (exp);
7785 case BUILT_IN_EH_COPY_VALUES:
7786 return expand_builtin_eh_copy_values (exp);
26093bf4 7787
7ccc713a 7788 case BUILT_IN_VA_START:
c2f47e15 7789 return expand_builtin_va_start (exp);
a66c9326 7790 case BUILT_IN_VA_END:
c2f47e15 7791 return expand_builtin_va_end (exp);
a66c9326 7792 case BUILT_IN_VA_COPY:
c2f47e15 7793 return expand_builtin_va_copy (exp);
89cfe6e5 7794 case BUILT_IN_EXPECT:
c2f47e15 7795 return expand_builtin_expect (exp, target);
01107f42 7796 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7797 return expand_builtin_expect_with_probability (exp, target);
fca0886c 7798 case BUILT_IN_ASSUME_ALIGNED:
7799 return expand_builtin_assume_aligned (exp, target);
5e3608d8 7800 case BUILT_IN_PREFETCH:
c2f47e15 7801 expand_builtin_prefetch (exp);
5e3608d8 7802 return const0_rtx;
7803
4ee9c684 7804 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 7805 return expand_builtin_init_trampoline (exp, true);
7806 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7807 return expand_builtin_init_trampoline (exp, false);
4ee9c684 7808 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 7809 return expand_builtin_adjust_trampoline (exp);
4ee9c684 7810
a27e3913 7811 case BUILT_IN_INIT_DESCRIPTOR:
7812 return expand_builtin_init_descriptor (exp);
7813 case BUILT_IN_ADJUST_DESCRIPTOR:
7814 return expand_builtin_adjust_descriptor (exp);
7815
73673831 7816 case BUILT_IN_FORK:
7817 case BUILT_IN_EXECL:
7818 case BUILT_IN_EXECV:
7819 case BUILT_IN_EXECLP:
7820 case BUILT_IN_EXECLE:
7821 case BUILT_IN_EXECVP:
7822 case BUILT_IN_EXECVE:
c2f47e15 7823 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 7824 if (target)
7825 return target;
7826 break;
53800dbe 7827
2797f13a 7828 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7829 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7830 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7831 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7832 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7833 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 7834 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 7835 if (target)
7836 return target;
7837 break;
7838
2797f13a 7839 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7840 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7841 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7842 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7843 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7844 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 7845 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 7846 if (target)
7847 return target;
7848 break;
7849
2797f13a 7850 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7851 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7852 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7853 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7854 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7855 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 7856 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 7857 if (target)
7858 return target;
7859 break;
7860
2797f13a 7861 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7862 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7863 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7864 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7865 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7866 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 7867 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 7868 if (target)
7869 return target;
7870 break;
7871
2797f13a 7872 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7873 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7874 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7875 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7876 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7877 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 7878 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 7879 if (target)
7880 return target;
7881 break;
7882
2797f13a 7883 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7884 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7885 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7886 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7887 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7888 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 7889 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 7890 if (target)
7891 return target;
7892 break;
7893
2797f13a 7894 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7895 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7896 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7897 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7898 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7899 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 7900 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 7901 if (target)
7902 return target;
7903 break;
7904
2797f13a 7905 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7906 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7907 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7908 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7909 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7910 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 7911 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 7912 if (target)
7913 return target;
7914 break;
7915
2797f13a 7916 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7917 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7918 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7919 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7920 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7921 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 7922 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 7923 if (target)
7924 return target;
7925 break;
7926
2797f13a 7927 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7928 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7929 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7930 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7931 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7932 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 7933 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 7934 if (target)
7935 return target;
7936 break;
7937
2797f13a 7938 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7939 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7940 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7941 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7942 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7943 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 7944 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 7945 if (target)
7946 return target;
7947 break;
7948
2797f13a 7949 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7950 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7951 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7952 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7953 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7954 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 7955 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 7956 if (target)
7957 return target;
7958 break;
7959
2797f13a 7960 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7961 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7962 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7963 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7964 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 7965 if (mode == VOIDmode)
7966 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 7967 if (!target || !register_operand (target, mode))
7968 target = gen_reg_rtx (mode);
3e272de8 7969
2797f13a 7970 mode = get_builtin_sync_mode
7971 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 7972 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 7973 if (target)
7974 return target;
7975 break;
7976
2797f13a 7977 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7978 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7979 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7980 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7981 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7982 mode = get_builtin_sync_mode
7983 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 7984 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 7985 if (target)
7986 return target;
7987 break;
7988
2797f13a 7989 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7990 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7991 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7992 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7993 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7994 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7995 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 7996 if (target)
7997 return target;
7998 break;
7999
2797f13a 8000 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8001 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8002 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8003 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8004 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8005 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8006 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 8007 return const0_rtx;
8008
2797f13a 8009 case BUILT_IN_SYNC_SYNCHRONIZE:
8010 expand_builtin_sync_synchronize ();
b6a5fc45 8011 return const0_rtx;
8012
1cd6e20d 8013 case BUILT_IN_ATOMIC_EXCHANGE_1:
8014 case BUILT_IN_ATOMIC_EXCHANGE_2:
8015 case BUILT_IN_ATOMIC_EXCHANGE_4:
8016 case BUILT_IN_ATOMIC_EXCHANGE_8:
8017 case BUILT_IN_ATOMIC_EXCHANGE_16:
8018 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8019 target = expand_builtin_atomic_exchange (mode, exp, target);
8020 if (target)
8021 return target;
8022 break;
8023
8024 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8025 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8026 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8027 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8028 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 8029 {
8030 unsigned int nargs, z;
f1f41a6c 8031 vec<tree, va_gc> *vec;
2c201ad1 8032
8033 mode =
8034 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8035 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8036 if (target)
8037 return target;
8038
8039 /* If this is turned into an external library call, the weak parameter
8040 must be dropped to match the expected parameter list. */
8041 nargs = call_expr_nargs (exp);
f1f41a6c 8042 vec_alloc (vec, nargs - 1);
2c201ad1 8043 for (z = 0; z < 3; z++)
f1f41a6c 8044 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 8045 /* Skip the boolean weak parameter. */
8046 for (z = 4; z < 6; z++)
f1f41a6c 8047 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 8048 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8049 break;
8050 }
1cd6e20d 8051
8052 case BUILT_IN_ATOMIC_LOAD_1:
8053 case BUILT_IN_ATOMIC_LOAD_2:
8054 case BUILT_IN_ATOMIC_LOAD_4:
8055 case BUILT_IN_ATOMIC_LOAD_8:
8056 case BUILT_IN_ATOMIC_LOAD_16:
8057 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8058 target = expand_builtin_atomic_load (mode, exp, target);
8059 if (target)
8060 return target;
8061 break;
8062
8063 case BUILT_IN_ATOMIC_STORE_1:
8064 case BUILT_IN_ATOMIC_STORE_2:
8065 case BUILT_IN_ATOMIC_STORE_4:
8066 case BUILT_IN_ATOMIC_STORE_8:
8067 case BUILT_IN_ATOMIC_STORE_16:
8068 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8069 target = expand_builtin_atomic_store (mode, exp);
8070 if (target)
8071 return const0_rtx;
8072 break;
8073
8074 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8075 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8076 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8077 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8078 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8079 {
8080 enum built_in_function lib;
8081 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8082 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8083 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8084 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8085 ignore, lib);
8086 if (target)
8087 return target;
8088 break;
8089 }
8090 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8091 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8092 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8093 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8094 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8095 {
8096 enum built_in_function lib;
8097 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8098 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8099 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8100 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8101 ignore, lib);
8102 if (target)
8103 return target;
8104 break;
8105 }
8106 case BUILT_IN_ATOMIC_AND_FETCH_1:
8107 case BUILT_IN_ATOMIC_AND_FETCH_2:
8108 case BUILT_IN_ATOMIC_AND_FETCH_4:
8109 case BUILT_IN_ATOMIC_AND_FETCH_8:
8110 case BUILT_IN_ATOMIC_AND_FETCH_16:
8111 {
8112 enum built_in_function lib;
8113 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8114 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8115 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8116 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8117 ignore, lib);
8118 if (target)
8119 return target;
8120 break;
8121 }
8122 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8123 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8124 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8125 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8126 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8127 {
8128 enum built_in_function lib;
8129 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8130 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8131 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8132 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8133 ignore, lib);
8134 if (target)
8135 return target;
8136 break;
8137 }
8138 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8139 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8140 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8141 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8142 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8143 {
8144 enum built_in_function lib;
8145 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8146 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8147 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8148 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8149 ignore, lib);
8150 if (target)
8151 return target;
8152 break;
8153 }
8154 case BUILT_IN_ATOMIC_OR_FETCH_1:
8155 case BUILT_IN_ATOMIC_OR_FETCH_2:
8156 case BUILT_IN_ATOMIC_OR_FETCH_4:
8157 case BUILT_IN_ATOMIC_OR_FETCH_8:
8158 case BUILT_IN_ATOMIC_OR_FETCH_16:
8159 {
8160 enum built_in_function lib;
8161 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8162 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8163 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8164 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8165 ignore, lib);
8166 if (target)
8167 return target;
8168 break;
8169 }
8170 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8171 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8172 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8173 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8174 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8175 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8176 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8177 ignore, BUILT_IN_NONE);
8178 if (target)
8179 return target;
8180 break;
8181
8182 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8183 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8184 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8185 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8186 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8187 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8188 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8189 ignore, BUILT_IN_NONE);
8190 if (target)
8191 return target;
8192 break;
8193
8194 case BUILT_IN_ATOMIC_FETCH_AND_1:
8195 case BUILT_IN_ATOMIC_FETCH_AND_2:
8196 case BUILT_IN_ATOMIC_FETCH_AND_4:
8197 case BUILT_IN_ATOMIC_FETCH_AND_8:
8198 case BUILT_IN_ATOMIC_FETCH_AND_16:
8199 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8200 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8201 ignore, BUILT_IN_NONE);
8202 if (target)
8203 return target;
8204 break;
8205
8206 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8207 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8208 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8209 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8210 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8211 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8212 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8213 ignore, BUILT_IN_NONE);
8214 if (target)
8215 return target;
8216 break;
8217
8218 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8219 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8220 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8221 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8222 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8223 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8224 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8225 ignore, BUILT_IN_NONE);
8226 if (target)
8227 return target;
8228 break;
8229
8230 case BUILT_IN_ATOMIC_FETCH_OR_1:
8231 case BUILT_IN_ATOMIC_FETCH_OR_2:
8232 case BUILT_IN_ATOMIC_FETCH_OR_4:
8233 case BUILT_IN_ATOMIC_FETCH_OR_8:
8234 case BUILT_IN_ATOMIC_FETCH_OR_16:
8235 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8236 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8237 ignore, BUILT_IN_NONE);
8238 if (target)
8239 return target;
8240 break;
10b744a3 8241
8242 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 8243 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 8244
8245 case BUILT_IN_ATOMIC_CLEAR:
8246 return expand_builtin_atomic_clear (exp);
1cd6e20d 8247
8248 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8249 return expand_builtin_atomic_always_lock_free (exp);
8250
8251 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8252 target = expand_builtin_atomic_is_lock_free (exp);
8253 if (target)
8254 return target;
8255 break;
8256
8257 case BUILT_IN_ATOMIC_THREAD_FENCE:
8258 expand_builtin_atomic_thread_fence (exp);
8259 return const0_rtx;
8260
8261 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8262 expand_builtin_atomic_signal_fence (exp);
8263 return const0_rtx;
8264
0a39fd54 8265 case BUILT_IN_OBJECT_SIZE:
8266 return expand_builtin_object_size (exp);
8267
8268 case BUILT_IN_MEMCPY_CHK:
8269 case BUILT_IN_MEMPCPY_CHK:
8270 case BUILT_IN_MEMMOVE_CHK:
8271 case BUILT_IN_MEMSET_CHK:
8272 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8273 if (target)
8274 return target;
8275 break;
8276
8277 case BUILT_IN_STRCPY_CHK:
8278 case BUILT_IN_STPCPY_CHK:
8279 case BUILT_IN_STRNCPY_CHK:
1063acde 8280 case BUILT_IN_STPNCPY_CHK:
0a39fd54 8281 case BUILT_IN_STRCAT_CHK:
b356dfef 8282 case BUILT_IN_STRNCAT_CHK:
0a39fd54 8283 case BUILT_IN_SNPRINTF_CHK:
8284 case BUILT_IN_VSNPRINTF_CHK:
8285 maybe_emit_chk_warning (exp, fcode);
8286 break;
8287
8288 case BUILT_IN_SPRINTF_CHK:
8289 case BUILT_IN_VSPRINTF_CHK:
8290 maybe_emit_sprintf_chk_warning (exp, fcode);
8291 break;
8292
2c281b15 8293 case BUILT_IN_FREE:
f74ea1c2 8294 if (warn_free_nonheap_object)
8295 maybe_emit_free_warning (exp);
2c281b15 8296 break;
8297
badaa04c 8298 case BUILT_IN_THREAD_POINTER:
8299 return expand_builtin_thread_pointer (exp, target);
8300
8301 case BUILT_IN_SET_THREAD_POINTER:
8302 expand_builtin_set_thread_pointer (exp);
8303 return const0_rtx;
8304
ca4c3545 8305 case BUILT_IN_ACC_ON_DEVICE:
1ae4e7aa 8306 /* Do library call, if we failed to expand the builtin when
8307 folding. */
ca4c3545 8308 break;
8309
a7babc1e 8310 case BUILT_IN_GOACC_PARLEVEL_ID:
8311 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8312 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8313
123081ef 8314 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8315 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8316
8317 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8318 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8319 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8320 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8321 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8322 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8323 return expand_speculation_safe_value (mode, exp, target, ignore);
8324
92482ee0 8325 default: /* just do library call, if unknown builtin */
146c1b4f 8326 break;
53800dbe 8327 }
8328
8329 /* The switch statement above can drop through to cause the function
8330 to be called normally. */
8331 return expand_call (exp, target, ignore);
8332}
650e4c94 8333
805e22b2 8334/* Determine whether a tree node represents a call to a built-in
52203a9d 8335 function. If the tree T is a call to a built-in function with
8336 the right number of arguments of the appropriate types, return
8337 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8338 Otherwise the return value is END_BUILTINS. */
aecda0d6 8339
805e22b2 8340enum built_in_function
b7bf20db 8341builtin_mathfn_code (const_tree t)
805e22b2 8342{
b7bf20db 8343 const_tree fndecl, arg, parmlist;
8344 const_tree argtype, parmtype;
8345 const_call_expr_arg_iterator iter;
805e22b2 8346
d44e3710 8347 if (TREE_CODE (t) != CALL_EXPR)
805e22b2 8348 return END_BUILTINS;
8349
c6e6ecb1 8350 fndecl = get_callee_fndecl (t);
a0e9bfbb 8351 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8352 return END_BUILTINS;
805e22b2 8353
52203a9d 8354 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 8355 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 8356 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 8357 {
52203a9d 8358 /* If a function doesn't take a variable number of arguments,
8359 the last element in the list will have type `void'. */
8360 parmtype = TREE_VALUE (parmlist);
8361 if (VOID_TYPE_P (parmtype))
8362 {
b7bf20db 8363 if (more_const_call_expr_args_p (&iter))
52203a9d 8364 return END_BUILTINS;
8365 return DECL_FUNCTION_CODE (fndecl);
8366 }
8367
b7bf20db 8368 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 8369 return END_BUILTINS;
48e1416a 8370
b7bf20db 8371 arg = next_const_call_expr_arg (&iter);
c2f47e15 8372 argtype = TREE_TYPE (arg);
52203a9d 8373
8374 if (SCALAR_FLOAT_TYPE_P (parmtype))
8375 {
8376 if (! SCALAR_FLOAT_TYPE_P (argtype))
8377 return END_BUILTINS;
8378 }
8379 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8380 {
8381 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8382 return END_BUILTINS;
8383 }
8384 else if (POINTER_TYPE_P (parmtype))
8385 {
8386 if (! POINTER_TYPE_P (argtype))
8387 return END_BUILTINS;
8388 }
8389 else if (INTEGRAL_TYPE_P (parmtype))
8390 {
8391 if (! INTEGRAL_TYPE_P (argtype))
8392 return END_BUILTINS;
8393 }
8394 else
e9f80ff5 8395 return END_BUILTINS;
e9f80ff5 8396 }
8397
52203a9d 8398 /* Variable-length argument list. */
805e22b2 8399 return DECL_FUNCTION_CODE (fndecl);
8400}
8401
c2f47e15 8402/* Fold a call to __builtin_constant_p, if we know its argument ARG will
8403 evaluate to a constant. */
650e4c94 8404
8405static tree
c2f47e15 8406fold_builtin_constant_p (tree arg)
650e4c94 8407{
650e4c94 8408 /* We return 1 for a numeric type that's known to be a constant
8409 value at compile-time or for an aggregate type that's a
8410 literal constant. */
c2f47e15 8411 STRIP_NOPS (arg);
650e4c94 8412
8413 /* If we know this is a constant, emit the constant of one. */
c2f47e15 8414 if (CONSTANT_CLASS_P (arg)
8415 || (TREE_CODE (arg) == CONSTRUCTOR
8416 && TREE_CONSTANT (arg)))
650e4c94 8417 return integer_one_node;
c2f47e15 8418 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 8419 {
c2f47e15 8420 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 8421 if (TREE_CODE (op) == STRING_CST
8422 || (TREE_CODE (op) == ARRAY_REF
8423 && integer_zerop (TREE_OPERAND (op, 1))
8424 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8425 return integer_one_node;
8426 }
650e4c94 8427
1fb4300c 8428 /* If this expression has side effects, show we don't know it to be a
8429 constant. Likewise if it's a pointer or aggregate type since in
8430 those case we only want literals, since those are only optimized
f97c71a1 8431 when generating RTL, not later.
8432 And finally, if we are compiling an initializer, not code, we
8433 need to return a definite result now; there's not going to be any
8434 more optimization done. */
c2f47e15 8435 if (TREE_SIDE_EFFECTS (arg)
8436 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8437 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 8438 || cfun == 0
0b049e15 8439 || folding_initializer
8440 || force_folding_builtin_constant_p)
650e4c94 8441 return integer_zero_node;
8442
c2f47e15 8443 return NULL_TREE;
650e4c94 8444}
8445
01107f42 8446/* Create builtin_expect or builtin_expect_with_probability
8447 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8448 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8449 builtin_expect_with_probability instead uses third argument as PROBABILITY
8450 value. */
4ee9c684 8451
8452static tree
c83059be 8453build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
01107f42 8454 tree predictor, tree probability)
4ee9c684 8455{
76f5a783 8456 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 8457
01107f42 8458 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8459 : BUILT_IN_EXPECT_WITH_PROBABILITY);
76f5a783 8460 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8461 ret_type = TREE_TYPE (TREE_TYPE (fn));
8462 pred_type = TREE_VALUE (arg_types);
8463 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8464
389dd41b 8465 pred = fold_convert_loc (loc, pred_type, pred);
8466 expected = fold_convert_loc (loc, expected_type, expected);
01107f42 8467
8468 if (probability)
8469 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8470 else
8471 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8472 predictor);
76f5a783 8473
8474 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8475 build_int_cst (ret_type, 0));
8476}
8477
01107f42 8478/* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
76f5a783 8479 NULL_TREE if no simplification is possible. */
8480
c83059be 8481tree
01107f42 8482fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8483 tree arg3)
76f5a783 8484{
083bada9 8485 tree inner, fndecl, inner_arg0;
76f5a783 8486 enum tree_code code;
8487
083bada9 8488 /* Distribute the expected value over short-circuiting operators.
8489 See through the cast from truthvalue_type_node to long. */
8490 inner_arg0 = arg0;
d09ef31a 8491 while (CONVERT_EXPR_P (inner_arg0)
083bada9 8492 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8493 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8494 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8495
76f5a783 8496 /* If this is a builtin_expect within a builtin_expect keep the
8497 inner one. See through a comparison against a constant. It
8498 might have been added to create a thruthvalue. */
083bada9 8499 inner = inner_arg0;
8500
76f5a783 8501 if (COMPARISON_CLASS_P (inner)
8502 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8503 inner = TREE_OPERAND (inner, 0);
8504
8505 if (TREE_CODE (inner) == CALL_EXPR
8506 && (fndecl = get_callee_fndecl (inner))
a0e9bfbb 8507 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8508 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
76f5a783 8509 return arg0;
8510
083bada9 8511 inner = inner_arg0;
76f5a783 8512 code = TREE_CODE (inner);
8513 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8514 {
8515 tree op0 = TREE_OPERAND (inner, 0);
8516 tree op1 = TREE_OPERAND (inner, 1);
2f2a7720 8517 arg1 = save_expr (arg1);
76f5a783 8518
01107f42 8519 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8520 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
76f5a783 8521 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8522
389dd41b 8523 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 8524 }
8525
8526 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 8527 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 8528 return NULL_TREE;
4ee9c684 8529
76f5a783 8530 /* If we expect that a comparison against the argument will fold to
8531 a constant return the constant. In practice, this means a true
8532 constant or the address of a non-weak symbol. */
083bada9 8533 inner = inner_arg0;
4ee9c684 8534 STRIP_NOPS (inner);
8535 if (TREE_CODE (inner) == ADDR_EXPR)
8536 {
8537 do
8538 {
8539 inner = TREE_OPERAND (inner, 0);
8540 }
8541 while (TREE_CODE (inner) == COMPONENT_REF
8542 || TREE_CODE (inner) == ARRAY_REF);
53e9c5c4 8543 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
c2f47e15 8544 return NULL_TREE;
4ee9c684 8545 }
8546
76f5a783 8547 /* Otherwise, ARG0 already has the proper type for the return value. */
8548 return arg0;
4ee9c684 8549}
8550
c2f47e15 8551/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 8552
539a3a92 8553static tree
c2f47e15 8554fold_builtin_classify_type (tree arg)
539a3a92 8555{
c2f47e15 8556 if (arg == 0)
7002a1c8 8557 return build_int_cst (integer_type_node, no_type_class);
539a3a92 8558
7002a1c8 8559 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 8560}
8561
c2f47e15 8562/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 8563
8564static tree
c7cbde74 8565fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 8566{
c2f47e15 8567 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 8568 return NULL_TREE;
8569 else
8570 {
98d5ba5d 8571 c_strlen_data lendata = { };
8572 tree len = c_strlen (arg, 0, &lendata);
e6e27594 8573
8574 if (len)
c7cbde74 8575 return fold_convert_loc (loc, type, len);
e6e27594 8576
98d5ba5d 8577 if (!lendata.decl)
8578 c_strlen (arg, 1, &lendata);
7af57b1c 8579
98d5ba5d 8580 if (lendata.decl)
7af57b1c 8581 {
8582 if (EXPR_HAS_LOCATION (arg))
8583 loc = EXPR_LOCATION (arg);
8584 else if (loc == UNKNOWN_LOCATION)
8585 loc = input_location;
98d5ba5d 8586 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
7af57b1c 8587 }
8588
e6e27594 8589 return NULL_TREE;
8590 }
8591}
8592
92c43e3c 8593/* Fold a call to __builtin_inf or __builtin_huge_val. */
8594
8595static tree
389dd41b 8596fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 8597{
aa870c1b 8598 REAL_VALUE_TYPE real;
8599
40f4dbd5 8600 /* __builtin_inff is intended to be usable to define INFINITY on all
8601 targets. If an infinity is not available, INFINITY expands "to a
8602 positive constant of type float that overflows at translation
8603 time", footnote "In this case, using INFINITY will violate the
8604 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8605 Thus we pedwarn to ensure this constraint violation is
8606 diagnosed. */
92c43e3c 8607 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 8608 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 8609
aa870c1b 8610 real_inf (&real);
8611 return build_real (type, real);
92c43e3c 8612}
8613
d735c391 8614/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8615 NULL_TREE if no simplification can be made. */
8616
8617static tree
389dd41b 8618fold_builtin_sincos (location_t loc,
8619 tree arg0, tree arg1, tree arg2)
d735c391 8620{
c2f47e15 8621 tree type;
6c21be92 8622 tree fndecl, call = NULL_TREE;
d735c391 8623
c2f47e15 8624 if (!validate_arg (arg0, REAL_TYPE)
8625 || !validate_arg (arg1, POINTER_TYPE)
8626 || !validate_arg (arg2, POINTER_TYPE))
d735c391 8627 return NULL_TREE;
8628
d735c391 8629 type = TREE_TYPE (arg0);
d735c391 8630
8631 /* Calculate the result when the argument is a constant. */
e3240774 8632 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
6c21be92 8633 if (fn == END_BUILTINS)
d735c391 8634 return NULL_TREE;
8635
6c21be92 8636 /* Canonicalize sincos to cexpi. */
8637 if (TREE_CODE (arg0) == REAL_CST)
8638 {
8639 tree complex_type = build_complex_type (type);
744fe358 8640 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
6c21be92 8641 }
8642 if (!call)
8643 {
8644 if (!targetm.libc_has_function (function_c99_math_complex)
8645 || !builtin_decl_implicit_p (fn))
8646 return NULL_TREE;
8647 fndecl = builtin_decl_explicit (fn);
8648 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8649 call = builtin_save_expr (call);
8650 }
d735c391 8651
8234e9d3 8652 tree ptype = build_pointer_type (type);
8653 arg1 = fold_convert (ptype, arg1);
8654 arg2 = fold_convert (ptype, arg2);
a75b1c71 8655 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 8656 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8657 build_fold_indirect_ref_loc (loc, arg1),
6c21be92 8658 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
d735c391 8659 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8660 build_fold_indirect_ref_loc (loc, arg2),
6c21be92 8661 fold_build1_loc (loc, REALPART_EXPR, type, call)));
d735c391 8662}
8663
c2f47e15 8664/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8665 Return NULL_TREE if no simplification can be made. */
9c8a1629 8666
8667static tree
389dd41b 8668fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8669{
c2f47e15 8670 if (!validate_arg (arg1, POINTER_TYPE)
8671 || !validate_arg (arg2, POINTER_TYPE)
8672 || !validate_arg (len, INTEGER_TYPE))
8673 return NULL_TREE;
9c8a1629 8674
8675 /* If the LEN parameter is zero, return zero. */
8676 if (integer_zerop (len))
389dd41b 8677 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8678 arg1, arg2);
9c8a1629 8679
8680 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8681 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8682 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 8683
c4fef134 8684 /* If len parameter is one, return an expression corresponding to
8685 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8686 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8687 {
8688 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8689 tree cst_uchar_ptr_node
8690 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8691
389dd41b 8692 tree ind1
8693 = fold_convert_loc (loc, integer_type_node,
8694 build1 (INDIRECT_REF, cst_uchar_node,
8695 fold_convert_loc (loc,
8696 cst_uchar_ptr_node,
c4fef134 8697 arg1)));
389dd41b 8698 tree ind2
8699 = fold_convert_loc (loc, integer_type_node,
8700 build1 (INDIRECT_REF, cst_uchar_node,
8701 fold_convert_loc (loc,
8702 cst_uchar_ptr_node,
c4fef134 8703 arg2)));
389dd41b 8704 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 8705 }
9c8a1629 8706
c2f47e15 8707 return NULL_TREE;
9c8a1629 8708}
8709
c2f47e15 8710/* Fold a call to builtin isascii with argument ARG. */
d49367d4 8711
8712static tree
389dd41b 8713fold_builtin_isascii (location_t loc, tree arg)
d49367d4 8714{
c2f47e15 8715 if (!validate_arg (arg, INTEGER_TYPE))
8716 return NULL_TREE;
d49367d4 8717 else
8718 {
8719 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 8720 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8721 build_int_cst (integer_type_node,
c90b5d40 8722 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 8723 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 8724 arg, integer_zero_node);
d49367d4 8725 }
8726}
8727
c2f47e15 8728/* Fold a call to builtin toascii with argument ARG. */
d49367d4 8729
8730static tree
389dd41b 8731fold_builtin_toascii (location_t loc, tree arg)
d49367d4 8732{
c2f47e15 8733 if (!validate_arg (arg, INTEGER_TYPE))
8734 return NULL_TREE;
48e1416a 8735
c2f47e15 8736 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 8737 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8738 build_int_cst (integer_type_node, 0x7f));
d49367d4 8739}
8740
c2f47e15 8741/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 8742
8743static tree
389dd41b 8744fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 8745{
c2f47e15 8746 if (!validate_arg (arg, INTEGER_TYPE))
8747 return NULL_TREE;
df1cf42e 8748 else
8749 {
8750 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 8751 /* According to the C standard, isdigit is unaffected by locale.
8752 However, it definitely is affected by the target character set. */
624d37a6 8753 unsigned HOST_WIDE_INT target_digit0
8754 = lang_hooks.to_target_charset ('0');
8755
8756 if (target_digit0 == 0)
8757 return NULL_TREE;
8758
389dd41b 8759 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 8760 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8761 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 8762 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 8763 build_int_cst (unsigned_type_node, 9));
df1cf42e 8764 }
8765}
27f261ef 8766
c2f47e15 8767/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 8768
8769static tree
389dd41b 8770fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 8771{
c2f47e15 8772 if (!validate_arg (arg, REAL_TYPE))
8773 return NULL_TREE;
d1aade50 8774
389dd41b 8775 arg = fold_convert_loc (loc, type, arg);
389dd41b 8776 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8777}
8778
c2f47e15 8779/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 8780
8781static tree
389dd41b 8782fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 8783{
c2f47e15 8784 if (!validate_arg (arg, INTEGER_TYPE))
8785 return NULL_TREE;
d1aade50 8786
389dd41b 8787 arg = fold_convert_loc (loc, type, arg);
389dd41b 8788 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8789}
8790
abe4dcf6 8791/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8792
8793static tree
389dd41b 8794fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 8795{
239d491a 8796 if (validate_arg (arg, COMPLEX_TYPE)
8797 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 8798 {
8799 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 8800
abe4dcf6 8801 if (atan2_fn)
8802 {
c2f47e15 8803 tree new_arg = builtin_save_expr (arg);
389dd41b 8804 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8805 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8806 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 8807 }
8808 }
48e1416a 8809
abe4dcf6 8810 return NULL_TREE;
8811}
8812
3838b9ae 8813/* Fold a call to builtin frexp, we can assume the base is 2. */
8814
8815static tree
389dd41b 8816fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 8817{
8818 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8819 return NULL_TREE;
48e1416a 8820
3838b9ae 8821 STRIP_NOPS (arg0);
48e1416a 8822
3838b9ae 8823 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8824 return NULL_TREE;
48e1416a 8825
389dd41b 8826 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 8827
8828 /* Proceed if a valid pointer type was passed in. */
8829 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8830 {
8831 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8832 tree frac, exp;
48e1416a 8833
3838b9ae 8834 switch (value->cl)
8835 {
8836 case rvc_zero:
8837 /* For +-0, return (*exp = 0, +-0). */
8838 exp = integer_zero_node;
8839 frac = arg0;
8840 break;
8841 case rvc_nan:
8842 case rvc_inf:
8843 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 8844 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 8845 case rvc_normal:
8846 {
8847 /* Since the frexp function always expects base 2, and in
8848 GCC normalized significands are already in the range
8849 [0.5, 1.0), we have exactly what frexp wants. */
8850 REAL_VALUE_TYPE frac_rvt = *value;
8851 SET_REAL_EXP (&frac_rvt, 0);
8852 frac = build_real (rettype, frac_rvt);
7002a1c8 8853 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 8854 }
8855 break;
8856 default:
8857 gcc_unreachable ();
8858 }
48e1416a 8859
3838b9ae 8860 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8861 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 8862 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8863 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 8864 }
8865
8866 return NULL_TREE;
8867}
8868
ebf8b4f5 8869/* Fold a call to builtin modf. */
8870
8871static tree
389dd41b 8872fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 8873{
8874 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8875 return NULL_TREE;
48e1416a 8876
ebf8b4f5 8877 STRIP_NOPS (arg0);
48e1416a 8878
ebf8b4f5 8879 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8880 return NULL_TREE;
48e1416a 8881
389dd41b 8882 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 8883
8884 /* Proceed if a valid pointer type was passed in. */
8885 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8886 {
8887 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8888 REAL_VALUE_TYPE trunc, frac;
8889
8890 switch (value->cl)
8891 {
8892 case rvc_nan:
8893 case rvc_zero:
8894 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8895 trunc = frac = *value;
8896 break;
8897 case rvc_inf:
8898 /* For +-Inf, return (*arg1 = arg0, +-0). */
8899 frac = dconst0;
8900 frac.sign = value->sign;
8901 trunc = *value;
8902 break;
8903 case rvc_normal:
8904 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8905 real_trunc (&trunc, VOIDmode, value);
8906 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8907 /* If the original number was negative and already
8908 integral, then the fractional part is -0.0. */
8909 if (value->sign && frac.cl == rvc_zero)
8910 frac.sign = value->sign;
8911 break;
8912 }
48e1416a 8913
ebf8b4f5 8914 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8915 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 8916 build_real (rettype, trunc));
8917 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8918 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 8919 build_real (rettype, frac));
8920 }
48e1416a 8921
ebf8b4f5 8922 return NULL_TREE;
8923}
8924
12f08300 8925/* Given a location LOC, an interclass builtin function decl FNDECL
8926 and its single argument ARG, return an folded expression computing
8927 the same, or NULL_TREE if we either couldn't or didn't want to fold
8928 (the latter happen if there's an RTL instruction available). */
8929
8930static tree
8931fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8932{
8933 machine_mode mode;
8934
8935 if (!validate_arg (arg, REAL_TYPE))
8936 return NULL_TREE;
8937
8938 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8939 return NULL_TREE;
8940
8941 mode = TYPE_MODE (TREE_TYPE (arg));
8942
8943 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7f38718f 8944
12f08300 8945 /* If there is no optab, try generic code. */
8946 switch (DECL_FUNCTION_CODE (fndecl))
8947 {
8948 tree result;
a65c4d64 8949
12f08300 8950 CASE_FLT_FN (BUILT_IN_ISINF):
8951 {
8952 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8953 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8954 tree type = TREE_TYPE (arg);
8955 REAL_VALUE_TYPE r;
8956 char buf[128];
8957
8958 if (is_ibm_extended)
8959 {
8960 /* NaN and Inf are encoded in the high-order double value
8961 only. The low-order value is not significant. */
8962 type = double_type_node;
8963 mode = DFmode;
8964 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8965 }
8966 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8967 real_from_string (&r, buf);
8968 result = build_call_expr (isgr_fn, 2,
8969 fold_build1_loc (loc, ABS_EXPR, type, arg),
8970 build_real (type, r));
8971 return result;
8972 }
8973 CASE_FLT_FN (BUILT_IN_FINITE):
8974 case BUILT_IN_ISFINITE:
8975 {
8976 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8977 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8978 tree type = TREE_TYPE (arg);
8979 REAL_VALUE_TYPE r;
8980 char buf[128];
8981
8982 if (is_ibm_extended)
8983 {
8984 /* NaN and Inf are encoded in the high-order double value
8985 only. The low-order value is not significant. */
8986 type = double_type_node;
8987 mode = DFmode;
8988 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8989 }
8990 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8991 real_from_string (&r, buf);
8992 result = build_call_expr (isle_fn, 2,
8993 fold_build1_loc (loc, ABS_EXPR, type, arg),
8994 build_real (type, r));
8995 /*result = fold_build2_loc (loc, UNGT_EXPR,
8996 TREE_TYPE (TREE_TYPE (fndecl)),
8997 fold_build1_loc (loc, ABS_EXPR, type, arg),
8998 build_real (type, r));
8999 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9000 TREE_TYPE (TREE_TYPE (fndecl)),
9001 result);*/
9002 return result;
9003 }
9004 case BUILT_IN_ISNORMAL:
9005 {
9006 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9007 islessequal(fabs(x),DBL_MAX). */
9008 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9009 tree type = TREE_TYPE (arg);
9010 tree orig_arg, max_exp, min_exp;
9011 machine_mode orig_mode = mode;
9012 REAL_VALUE_TYPE rmax, rmin;
9013 char buf[128];
9014
9015 orig_arg = arg = builtin_save_expr (arg);
9016 if (is_ibm_extended)
9017 {
9018 /* Use double to test the normal range of IBM extended
9019 precision. Emin for IBM extended precision is
9020 different to emin for IEEE double, being 53 higher
9021 since the low double exponent is at least 53 lower
9022 than the high double exponent. */
9023 type = double_type_node;
9024 mode = DFmode;
9025 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9026 }
9027 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9028
9029 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9030 real_from_string (&rmax, buf);
9031 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9032 real_from_string (&rmin, buf);
9033 max_exp = build_real (type, rmax);
9034 min_exp = build_real (type, rmin);
9035
9036 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9037 if (is_ibm_extended)
9038 {
9039 /* Testing the high end of the range is done just using
9040 the high double, using the same test as isfinite().
9041 For the subnormal end of the range we first test the
9042 high double, then if its magnitude is equal to the
9043 limit of 0x1p-969, we test whether the low double is
9044 non-zero and opposite sign to the high double. */
9045 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9046 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9047 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9048 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9049 arg, min_exp);
9050 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9051 complex_double_type_node, orig_arg);
9052 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9053 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9054 tree zero = build_real (type, dconst0);
9055 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9056 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9057 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9058 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9059 fold_build3 (COND_EXPR,
9060 integer_type_node,
9061 hilt, logt, lolt));
9062 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9063 eq_min, ok_lo);
9064 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9065 gt_min, eq_min);
9066 }
9067 else
9068 {
9069 tree const isge_fn
9070 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9071 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9072 }
9073 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9074 max_exp, min_exp);
9075 return result;
9076 }
9077 default:
9078 break;
9079 }
9080
9081 return NULL_TREE;
9082}
9083
9084/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 9085 ARG is the argument for the call. */
726069ba 9086
9087static tree
12f08300 9088fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 9089{
12f08300 9090 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9091
c2f47e15 9092 if (!validate_arg (arg, REAL_TYPE))
d43cee80 9093 return NULL_TREE;
726069ba 9094
726069ba 9095 switch (builtin_index)
9096 {
12f08300 9097 case BUILT_IN_ISINF:
9098 if (!HONOR_INFINITIES (arg))
9099 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9100
9101 return NULL_TREE;
9102
c319d56a 9103 case BUILT_IN_ISINF_SIGN:
9104 {
9105 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9106 /* In a boolean context, GCC will fold the inner COND_EXPR to
9107 1. So e.g. "if (isinf_sign(x))" would be folded to just
9108 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
6cfc7001 9109 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
b9a16870 9110 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 9111 tree tmp = NULL_TREE;
9112
9113 arg = builtin_save_expr (arg);
9114
9115 if (signbit_fn && isinf_fn)
9116 {
389dd41b 9117 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9118 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 9119
389dd41b 9120 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9121 signbit_call, integer_zero_node);
389dd41b 9122 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9123 isinf_call, integer_zero_node);
48e1416a 9124
389dd41b 9125 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 9126 integer_minus_one_node, integer_one_node);
389dd41b 9127 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9128 isinf_call, tmp,
c319d56a 9129 integer_zero_node);
9130 }
9131
9132 return tmp;
9133 }
9134
12f08300 9135 case BUILT_IN_ISFINITE:
9136 if (!HONOR_NANS (arg)
9137 && !HONOR_INFINITIES (arg))
9138 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9139
9140 return NULL_TREE;
9141
9142 case BUILT_IN_ISNAN:
9143 if (!HONOR_NANS (arg))
9144 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9145
9146 {
9147 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9148 if (is_ibm_extended)
9149 {
9150 /* NaN and Inf are encoded in the high-order double value
9151 only. The low-order value is not significant. */
9152 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9153 }
9154 }
9155 arg = builtin_save_expr (arg);
9156 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9157
726069ba 9158 default:
64db345d 9159 gcc_unreachable ();
726069ba 9160 }
9161}
9162
12f08300 9163/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9164 This builtin will generate code to return the appropriate floating
9165 point classification depending on the value of the floating point
9166 number passed in. The possible return values must be supplied as
9167 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9168 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9169 one floating point argument which is "type generic". */
9170
9171static tree
9172fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9173{
9174 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9175 arg, type, res, tmp;
9176 machine_mode mode;
9177 REAL_VALUE_TYPE r;
9178 char buf[128];
9179
9180 /* Verify the required arguments in the original call. */
9181 if (nargs != 6
9182 || !validate_arg (args[0], INTEGER_TYPE)
9183 || !validate_arg (args[1], INTEGER_TYPE)
9184 || !validate_arg (args[2], INTEGER_TYPE)
9185 || !validate_arg (args[3], INTEGER_TYPE)
9186 || !validate_arg (args[4], INTEGER_TYPE)
9187 || !validate_arg (args[5], REAL_TYPE))
9188 return NULL_TREE;
9189
9190 fp_nan = args[0];
9191 fp_infinite = args[1];
9192 fp_normal = args[2];
9193 fp_subnormal = args[3];
9194 fp_zero = args[4];
9195 arg = args[5];
9196 type = TREE_TYPE (arg);
9197 mode = TYPE_MODE (type);
9198 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9199
9200 /* fpclassify(x) ->
9201 isnan(x) ? FP_NAN :
9202 (fabs(x) == Inf ? FP_INFINITE :
9203 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9204 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9205
9206 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9207 build_real (type, dconst0));
9208 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9209 tmp, fp_zero, fp_subnormal);
9210
9211 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9212 real_from_string (&r, buf);
9213 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9214 arg, build_real (type, r));
9215 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9216
9217 if (HONOR_INFINITIES (mode))
9218 {
9219 real_inf (&r);
9220 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9221 build_real (type, r));
9222 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9223 fp_infinite, res);
9224 }
9225
9226 if (HONOR_NANS (mode))
9227 {
9228 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9229 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9230 }
9231
9232 return res;
9233}
9234
9bc9f15f 9235/* Fold a call to an unordered comparison function such as
d5019fe8 9236 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 9237 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 9238 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9239 the opposite of the desired result. UNORDERED_CODE is used
9240 for modes that can hold NaNs and ORDERED_CODE is used for
9241 the rest. */
9bc9f15f 9242
9243static tree
389dd41b 9244fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 9245 enum tree_code unordered_code,
9246 enum tree_code ordered_code)
9247{
859f903a 9248 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 9249 enum tree_code code;
6978db0d 9250 tree type0, type1;
9251 enum tree_code code0, code1;
9252 tree cmp_type = NULL_TREE;
9bc9f15f 9253
6978db0d 9254 type0 = TREE_TYPE (arg0);
9255 type1 = TREE_TYPE (arg1);
a0c938f0 9256
6978db0d 9257 code0 = TREE_CODE (type0);
9258 code1 = TREE_CODE (type1);
a0c938f0 9259
6978db0d 9260 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9261 /* Choose the wider of two real types. */
9262 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9263 ? type0 : type1;
9264 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9265 cmp_type = type0;
9266 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9267 cmp_type = type1;
a0c938f0 9268
389dd41b 9269 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9270 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 9271
9272 if (unordered_code == UNORDERED_EXPR)
9273 {
93633022 9274 if (!HONOR_NANS (arg0))
389dd41b 9275 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9276 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 9277 }
9bc9f15f 9278
93633022 9279 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 9280 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9281 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 9282}
9283
0c93c8a9 9284/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9285 arithmetics if it can never overflow, or into internal functions that
9286 return both result of arithmetics and overflowed boolean flag in
732905bb 9287 a complex integer result, or some other check for overflow.
9288 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9289 checking part of that. */
0c93c8a9 9290
9291static tree
9292fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9293 tree arg0, tree arg1, tree arg2)
9294{
9295 enum internal_fn ifn = IFN_LAST;
732905bb 9296 /* The code of the expression corresponding to the type-generic
9297 built-in, or ERROR_MARK for the type-specific ones. */
9298 enum tree_code opcode = ERROR_MARK;
9299 bool ovf_only = false;
9300
0c93c8a9 9301 switch (fcode)
9302 {
732905bb 9303 case BUILT_IN_ADD_OVERFLOW_P:
9304 ovf_only = true;
9305 /* FALLTHRU */
0c93c8a9 9306 case BUILT_IN_ADD_OVERFLOW:
732905bb 9307 opcode = PLUS_EXPR;
9308 /* FALLTHRU */
0c93c8a9 9309 case BUILT_IN_SADD_OVERFLOW:
9310 case BUILT_IN_SADDL_OVERFLOW:
9311 case BUILT_IN_SADDLL_OVERFLOW:
9312 case BUILT_IN_UADD_OVERFLOW:
9313 case BUILT_IN_UADDL_OVERFLOW:
9314 case BUILT_IN_UADDLL_OVERFLOW:
9315 ifn = IFN_ADD_OVERFLOW;
9316 break;
732905bb 9317 case BUILT_IN_SUB_OVERFLOW_P:
9318 ovf_only = true;
9319 /* FALLTHRU */
0c93c8a9 9320 case BUILT_IN_SUB_OVERFLOW:
732905bb 9321 opcode = MINUS_EXPR;
9322 /* FALLTHRU */
0c93c8a9 9323 case BUILT_IN_SSUB_OVERFLOW:
9324 case BUILT_IN_SSUBL_OVERFLOW:
9325 case BUILT_IN_SSUBLL_OVERFLOW:
9326 case BUILT_IN_USUB_OVERFLOW:
9327 case BUILT_IN_USUBL_OVERFLOW:
9328 case BUILT_IN_USUBLL_OVERFLOW:
9329 ifn = IFN_SUB_OVERFLOW;
9330 break;
732905bb 9331 case BUILT_IN_MUL_OVERFLOW_P:
9332 ovf_only = true;
9333 /* FALLTHRU */
0c93c8a9 9334 case BUILT_IN_MUL_OVERFLOW:
732905bb 9335 opcode = MULT_EXPR;
9336 /* FALLTHRU */
0c93c8a9 9337 case BUILT_IN_SMUL_OVERFLOW:
9338 case BUILT_IN_SMULL_OVERFLOW:
9339 case BUILT_IN_SMULLL_OVERFLOW:
9340 case BUILT_IN_UMUL_OVERFLOW:
9341 case BUILT_IN_UMULL_OVERFLOW:
9342 case BUILT_IN_UMULLL_OVERFLOW:
9343 ifn = IFN_MUL_OVERFLOW;
9344 break;
9345 default:
9346 gcc_unreachable ();
9347 }
732905bb 9348
9349 /* For the "generic" overloads, the first two arguments can have different
9350 types and the last argument determines the target type to use to check
9351 for overflow. The arguments of the other overloads all have the same
9352 type. */
9353 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9354
9355 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9356 arguments are constant, attempt to fold the built-in call into a constant
9357 expression indicating whether or not it detected an overflow. */
9358 if (ovf_only
9359 && TREE_CODE (arg0) == INTEGER_CST
9360 && TREE_CODE (arg1) == INTEGER_CST)
9361 /* Perform the computation in the target type and check for overflow. */
9362 return omit_one_operand_loc (loc, boolean_type_node,
9363 arith_overflowed_p (opcode, type, arg0, arg1)
9364 ? boolean_true_node : boolean_false_node,
9365 arg2);
9366
0c93c8a9 9367 tree ctype = build_complex_type (type);
9368 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9369 2, arg0, arg1);
9370 tree tgt = save_expr (call);
9371 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9372 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9373 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
732905bb 9374
9375 if (ovf_only)
9376 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9377
9378 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
0c93c8a9 9379 tree store
9380 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9381 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9382}
9383
c388a0cf 9384/* Fold a call to __builtin_FILE to a constant string. */
9385
9386static inline tree
9387fold_builtin_FILE (location_t loc)
9388{
9389 if (const char *fname = LOCATION_FILE (loc))
859b51f8 9390 {
9391 /* The documentation says this builtin is equivalent to the preprocessor
9392 __FILE__ macro so it appears appropriate to use the same file prefix
9393 mappings. */
9394 fname = remap_macro_filename (fname);
c388a0cf 9395 return build_string_literal (strlen (fname) + 1, fname);
859b51f8 9396 }
c388a0cf 9397
9398 return build_string_literal (1, "");
9399}
9400
9401/* Fold a call to __builtin_FUNCTION to a constant string. */
9402
9403static inline tree
9404fold_builtin_FUNCTION ()
9405{
c2d38635 9406 const char *name = "";
9407
c388a0cf 9408 if (current_function_decl)
c2d38635 9409 name = lang_hooks.decl_printable_name (current_function_decl, 0);
c388a0cf 9410
c2d38635 9411 return build_string_literal (strlen (name) + 1, name);
c388a0cf 9412}
9413
9414/* Fold a call to __builtin_LINE to an integer constant. */
9415
9416static inline tree
9417fold_builtin_LINE (location_t loc, tree type)
9418{
9419 return build_int_cst (type, LOCATION_LINE (loc));
9420}
9421
c2f47e15 9422/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 9423 This function returns NULL_TREE if no simplification was possible. */
650e4c94 9424
4ee9c684 9425static tree
e80cc485 9426fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 9427{
e9f80ff5 9428 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 9429 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 9430 switch (fcode)
650e4c94 9431 {
c388a0cf 9432 case BUILT_IN_FILE:
9433 return fold_builtin_FILE (loc);
9434
9435 case BUILT_IN_FUNCTION:
9436 return fold_builtin_FUNCTION ();
9437
9438 case BUILT_IN_LINE:
9439 return fold_builtin_LINE (loc, type);
9440
c2f47e15 9441 CASE_FLT_FN (BUILT_IN_INF):
012f068a 9442 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
c2f47e15 9443 case BUILT_IN_INFD32:
9444 case BUILT_IN_INFD64:
9445 case BUILT_IN_INFD128:
389dd41b 9446 return fold_builtin_inf (loc, type, true);
7c2f0500 9447
c2f47e15 9448 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
012f068a 9449 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
389dd41b 9450 return fold_builtin_inf (loc, type, false);
7c2f0500 9451
c2f47e15 9452 case BUILT_IN_CLASSIFY_TYPE:
9453 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 9454
c2f47e15 9455 default:
9456 break;
9457 }
9458 return NULL_TREE;
9459}
7c2f0500 9460
c2f47e15 9461/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 9462 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 9463
c2f47e15 9464static tree
e80cc485 9465fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 9466{
9467 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9468 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 9469
9470 if (TREE_CODE (arg0) == ERROR_MARK)
9471 return NULL_TREE;
9472
744fe358 9473 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
6c21be92 9474 return ret;
9475
c2f47e15 9476 switch (fcode)
9477 {
650e4c94 9478 case BUILT_IN_CONSTANT_P:
7c2f0500 9479 {
c2f47e15 9480 tree val = fold_builtin_constant_p (arg0);
7c2f0500 9481
7c2f0500 9482 /* Gimplification will pull the CALL_EXPR for the builtin out of
9483 an if condition. When not optimizing, we'll not CSE it back.
9484 To avoid link error types of regressions, return false now. */
9485 if (!val && !optimize)
9486 val = integer_zero_node;
9487
9488 return val;
9489 }
650e4c94 9490
539a3a92 9491 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 9492 return fold_builtin_classify_type (arg0);
539a3a92 9493
650e4c94 9494 case BUILT_IN_STRLEN:
c7cbde74 9495 return fold_builtin_strlen (loc, type, arg0);
650e4c94 9496
4f35b1fc 9497 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 9498 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 9499 case BUILT_IN_FABSD32:
9500 case BUILT_IN_FABSD64:
9501 case BUILT_IN_FABSD128:
389dd41b 9502 return fold_builtin_fabs (loc, arg0, type);
d1aade50 9503
9504 case BUILT_IN_ABS:
9505 case BUILT_IN_LABS:
9506 case BUILT_IN_LLABS:
9507 case BUILT_IN_IMAXABS:
389dd41b 9508 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 9509
4f35b1fc 9510 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 9511 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9512 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9513 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 9514 break;
36d3581d 9515
4f35b1fc 9516 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 9517 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9518 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 9519 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 9520 break;
36d3581d 9521
4f35b1fc 9522 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 9523 if (validate_arg (arg0, COMPLEX_TYPE)
9524 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9525 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 9526 break;
36d3581d 9527
6c21be92 9528 CASE_FLT_FN (BUILT_IN_CARG):
9529 return fold_builtin_carg (loc, arg0, type);
c2373fdb 9530
6c21be92 9531 case BUILT_IN_ISASCII:
9532 return fold_builtin_isascii (loc, arg0);
48e1416a 9533
6c21be92 9534 case BUILT_IN_TOASCII:
9535 return fold_builtin_toascii (loc, arg0);
48e1416a 9536
6c21be92 9537 case BUILT_IN_ISDIGIT:
9538 return fold_builtin_isdigit (loc, arg0);
48e1416a 9539
12f08300 9540 CASE_FLT_FN (BUILT_IN_FINITE):
9541 case BUILT_IN_FINITED32:
9542 case BUILT_IN_FINITED64:
9543 case BUILT_IN_FINITED128:
9544 case BUILT_IN_ISFINITE:
9545 {
9546 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9547 if (ret)
9548 return ret;
9549 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9550 }
9551
9552 CASE_FLT_FN (BUILT_IN_ISINF):
9553 case BUILT_IN_ISINFD32:
9554 case BUILT_IN_ISINFD64:
9555 case BUILT_IN_ISINFD128:
9556 {
9557 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9558 if (ret)
9559 return ret;
9560 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9561 }
9562
9563 case BUILT_IN_ISNORMAL:
9564 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9565
6c21be92 9566 case BUILT_IN_ISINF_SIGN:
12f08300 9567 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9568
9569 CASE_FLT_FN (BUILT_IN_ISNAN):
9570 case BUILT_IN_ISNAND32:
9571 case BUILT_IN_ISNAND64:
9572 case BUILT_IN_ISNAND128:
9573 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
48e1416a 9574
6c21be92 9575 case BUILT_IN_FREE:
9576 if (integer_zerop (arg0))
9577 return build_empty_stmt (loc);
d064d976 9578 break;
c63f4ad3 9579
6c21be92 9580 default:
8b4af95f 9581 break;
6c21be92 9582 }
805e22b2 9583
6c21be92 9584 return NULL_TREE;
3bc5c41b 9585
6c21be92 9586}
728bac60 9587
6c21be92 9588/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9589 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 9590
9591static tree
e80cc485 9592fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 9593{
9594 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9595 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9596
6c21be92 9597 if (TREE_CODE (arg0) == ERROR_MARK
9598 || TREE_CODE (arg1) == ERROR_MARK)
9599 return NULL_TREE;
e5407ca6 9600
744fe358 9601 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
6c21be92 9602 return ret;
e84da7c1 9603
6c21be92 9604 switch (fcode)
9605 {
e84da7c1 9606 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9607 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9608 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9609 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 9610 return do_mpfr_lgamma_r (arg0, arg1, type);
9611 break;
c2f47e15 9612
3838b9ae 9613 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 9614 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 9615
ebf8b4f5 9616 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 9617 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 9618
c2f47e15 9619 case BUILT_IN_STRSPN:
389dd41b 9620 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 9621
9622 case BUILT_IN_STRCSPN:
389dd41b 9623 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 9624
c2f47e15 9625 case BUILT_IN_STRPBRK:
389dd41b 9626 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 9627
9628 case BUILT_IN_EXPECT:
01107f42 9629 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
c2f47e15 9630
9bc9f15f 9631 case BUILT_IN_ISGREATER:
389dd41b 9632 return fold_builtin_unordered_cmp (loc, fndecl,
9633 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 9634 case BUILT_IN_ISGREATEREQUAL:
389dd41b 9635 return fold_builtin_unordered_cmp (loc, fndecl,
9636 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 9637 case BUILT_IN_ISLESS:
389dd41b 9638 return fold_builtin_unordered_cmp (loc, fndecl,
9639 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 9640 case BUILT_IN_ISLESSEQUAL:
389dd41b 9641 return fold_builtin_unordered_cmp (loc, fndecl,
9642 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 9643 case BUILT_IN_ISLESSGREATER:
389dd41b 9644 return fold_builtin_unordered_cmp (loc, fndecl,
9645 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 9646 case BUILT_IN_ISUNORDERED:
389dd41b 9647 return fold_builtin_unordered_cmp (loc, fndecl,
9648 arg0, arg1, UNORDERED_EXPR,
d5019fe8 9649 NOP_EXPR);
9bc9f15f 9650
7c2f0500 9651 /* We do the folding for va_start in the expander. */
9652 case BUILT_IN_VA_START:
9653 break;
f0613857 9654
0a39fd54 9655 case BUILT_IN_OBJECT_SIZE:
c2f47e15 9656 return fold_builtin_object_size (arg0, arg1);
0a39fd54 9657
1cd6e20d 9658 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9659 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9660
9661 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9662 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9663
c2f47e15 9664 default:
9665 break;
9666 }
9667 return NULL_TREE;
9668}
9669
9670/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 9671 and ARG2.
c2f47e15 9672 This function returns NULL_TREE if no simplification was possible. */
9673
9674static tree
389dd41b 9675fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 9676 tree arg0, tree arg1, tree arg2)
c2f47e15 9677{
9678 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9679 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 9680
9681 if (TREE_CODE (arg0) == ERROR_MARK
9682 || TREE_CODE (arg1) == ERROR_MARK
9683 || TREE_CODE (arg2) == ERROR_MARK)
9684 return NULL_TREE;
9685
744fe358 9686 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9687 arg0, arg1, arg2))
6c21be92 9688 return ret;
9689
c2f47e15 9690 switch (fcode)
9691 {
9692
9693 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 9694 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 9695
e5407ca6 9696 CASE_FLT_FN (BUILT_IN_REMQUO):
9697 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9698 && validate_arg (arg1, REAL_TYPE)
9699 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 9700 return do_mpfr_remquo (arg0, arg1, arg2);
9701 break;
e5407ca6 9702
c2f47e15 9703 case BUILT_IN_MEMCMP:
7f38a6aa 9704 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
c2f47e15 9705
c83059be 9706 case BUILT_IN_EXPECT:
01107f42 9707 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9708
9709 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9710 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
c83059be 9711
0c93c8a9 9712 case BUILT_IN_ADD_OVERFLOW:
9713 case BUILT_IN_SUB_OVERFLOW:
9714 case BUILT_IN_MUL_OVERFLOW:
732905bb 9715 case BUILT_IN_ADD_OVERFLOW_P:
9716 case BUILT_IN_SUB_OVERFLOW_P:
9717 case BUILT_IN_MUL_OVERFLOW_P:
0c93c8a9 9718 case BUILT_IN_SADD_OVERFLOW:
9719 case BUILT_IN_SADDL_OVERFLOW:
9720 case BUILT_IN_SADDLL_OVERFLOW:
9721 case BUILT_IN_SSUB_OVERFLOW:
9722 case BUILT_IN_SSUBL_OVERFLOW:
9723 case BUILT_IN_SSUBLL_OVERFLOW:
9724 case BUILT_IN_SMUL_OVERFLOW:
9725 case BUILT_IN_SMULL_OVERFLOW:
9726 case BUILT_IN_SMULLL_OVERFLOW:
9727 case BUILT_IN_UADD_OVERFLOW:
9728 case BUILT_IN_UADDL_OVERFLOW:
9729 case BUILT_IN_UADDLL_OVERFLOW:
9730 case BUILT_IN_USUB_OVERFLOW:
9731 case BUILT_IN_USUBL_OVERFLOW:
9732 case BUILT_IN_USUBLL_OVERFLOW:
9733 case BUILT_IN_UMUL_OVERFLOW:
9734 case BUILT_IN_UMULL_OVERFLOW:
9735 case BUILT_IN_UMULLL_OVERFLOW:
9736 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9737
650e4c94 9738 default:
9739 break;
9740 }
c2f47e15 9741 return NULL_TREE;
9742}
650e4c94 9743
c2f47e15 9744/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 9745 arguments. IGNORE is true if the result of the
9746 function call is ignored. This function returns NULL_TREE if no
9747 simplification was possible. */
48e1416a 9748
2165588a 9749tree
e80cc485 9750fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 9751{
9752 tree ret = NULL_TREE;
a7f5bb2d 9753
c2f47e15 9754 switch (nargs)
9755 {
9756 case 0:
e80cc485 9757 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 9758 break;
9759 case 1:
e80cc485 9760 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 9761 break;
9762 case 2:
e80cc485 9763 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 9764 break;
9765 case 3:
e80cc485 9766 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 9767 break;
c2f47e15 9768 default:
12f08300 9769 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 9770 break;
9771 }
9772 if (ret)
9773 {
75a70cf9 9774 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 9775 SET_EXPR_LOCATION (ret, loc);
c2f47e15 9776 return ret;
9777 }
9778 return NULL_TREE;
9779}
9780
0e80b01d 9781/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9782 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9783 of arguments in ARGS to be omitted. OLDNARGS is the number of
9784 elements in ARGS. */
c2f47e15 9785
9786static tree
0e80b01d 9787rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9788 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 9789{
0e80b01d 9790 int nargs = oldnargs - skip + n;
9791 tree *buffer;
c2f47e15 9792
0e80b01d 9793 if (n > 0)
c2f47e15 9794 {
0e80b01d 9795 int i, j;
c2f47e15 9796
0e80b01d 9797 buffer = XALLOCAVEC (tree, nargs);
9798 for (i = 0; i < n; i++)
9799 buffer[i] = va_arg (newargs, tree);
9800 for (j = skip; j < oldnargs; j++, i++)
9801 buffer[i] = args[j];
9802 }
9803 else
9804 buffer = args + skip;
19fbe3a4 9805
0e80b01d 9806 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9807}
c2f47e15 9808
198622c0 9809/* Return true if FNDECL shouldn't be folded right now.
9810 If a built-in function has an inline attribute always_inline
9811 wrapper, defer folding it after always_inline functions have
9812 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9813 might not be performed. */
9814
51d2c51e 9815bool
198622c0 9816avoid_folding_inline_builtin (tree fndecl)
9817{
9818 return (DECL_DECLARED_INLINE_P (fndecl)
9819 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9820 && cfun
9821 && !cfun->always_inline_functions_inlined
9822 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9823}
9824
4ee9c684 9825/* A wrapper function for builtin folding that prevents warnings for
491e04ef 9826 "statement without effect" and the like, caused by removing the
4ee9c684 9827 call node earlier than the warning is generated. */
9828
9829tree
389dd41b 9830fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 9831{
c2f47e15 9832 tree ret = NULL_TREE;
9833 tree fndecl = get_callee_fndecl (exp);
a0e9bfbb 9834 if (fndecl && fndecl_built_in_p (fndecl)
48dc2227 9835 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9836 yet. Defer folding until we see all the arguments
9837 (after inlining). */
9838 && !CALL_EXPR_VA_ARG_PACK (exp))
9839 {
9840 int nargs = call_expr_nargs (exp);
9841
9842 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9843 instead last argument is __builtin_va_arg_pack (). Defer folding
9844 even in that case, until arguments are finalized. */
9845 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9846 {
9847 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
a0e9bfbb 9848 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
48dc2227 9849 return NULL_TREE;
9850 }
9851
198622c0 9852 if (avoid_folding_inline_builtin (fndecl))
9853 return NULL_TREE;
9854
c2f47e15 9855 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 9856 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9857 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 9858 else
9859 {
9d884767 9860 tree *args = CALL_EXPR_ARGP (exp);
9861 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 9862 if (ret)
389dd41b 9863 return ret;
c2f47e15 9864 }
4ee9c684 9865 }
c2f47e15 9866 return NULL_TREE;
9867}
48e1416a 9868
9d884767 9869/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9870 N arguments are passed in the array ARGARRAY. Return a folded
9871 expression or NULL_TREE if no simplification was possible. */
805e22b2 9872
9873tree
9d884767 9874fold_builtin_call_array (location_t loc, tree,
d01f58f9 9875 tree fn,
9876 int n,
9877 tree *argarray)
7e15618b 9878{
9d884767 9879 if (TREE_CODE (fn) != ADDR_EXPR)
9880 return NULL_TREE;
c2f47e15 9881
9d884767 9882 tree fndecl = TREE_OPERAND (fn, 0);
9883 if (TREE_CODE (fndecl) == FUNCTION_DECL
a0e9bfbb 9884 && fndecl_built_in_p (fndecl))
9d884767 9885 {
9886 /* If last argument is __builtin_va_arg_pack (), arguments to this
9887 function are not finalized yet. Defer folding until they are. */
9888 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9889 {
9890 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
a0e9bfbb 9891 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9d884767 9892 return NULL_TREE;
9893 }
9894 if (avoid_folding_inline_builtin (fndecl))
9895 return NULL_TREE;
9896 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9897 return targetm.fold_builtin (fndecl, n, argarray, false);
9898 else
9899 return fold_builtin_n (loc, fndecl, argarray, n, false);
9900 }
c2f47e15 9901
9d884767 9902 return NULL_TREE;
c2f47e15 9903}
9904
af1409ad 9905/* Construct a new CALL_EXPR using the tail of the argument list of EXP
9906 along with N new arguments specified as the "..." parameters. SKIP
9907 is the number of arguments in EXP to be omitted. This function is used
9908 to do varargs-to-varargs transformations. */
9909
9910static tree
9911rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9912{
9913 va_list ap;
9914 tree t;
9915
9916 va_start (ap, n);
9917 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9918 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9919 va_end (ap);
c2f47e15 9920
af1409ad 9921 return t;
c2f47e15 9922}
9923
9924/* Validate a single argument ARG against a tree code CODE representing
184fac50 9925 a type. Return true when argument is valid. */
48e1416a 9926
c2f47e15 9927static bool
184fac50 9928validate_arg (const_tree arg, enum tree_code code)
c2f47e15 9929{
9930 if (!arg)
9931 return false;
9932 else if (code == POINTER_TYPE)
184fac50 9933 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 9934 else if (code == INTEGER_TYPE)
9935 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 9936 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 9937}
0eb671f7 9938
75a70cf9 9939/* This function validates the types of a function call argument list
9940 against a specified list of tree_codes. If the last specifier is a 0,
9941 that represents an ellipses, otherwise the last specifier must be a
9942 VOID_TYPE.
9943
9944 This is the GIMPLE version of validate_arglist. Eventually we want to
9945 completely convert builtins.c to work from GIMPLEs and the tree based
9946 validate_arglist will then be removed. */
9947
9948bool
1a91d914 9949validate_gimple_arglist (const gcall *call, ...)
75a70cf9 9950{
9951 enum tree_code code;
9952 bool res = 0;
9953 va_list ap;
9954 const_tree arg;
9955 size_t i;
9956
9957 va_start (ap, call);
9958 i = 0;
9959
9960 do
9961 {
d62e827b 9962 code = (enum tree_code) va_arg (ap, int);
75a70cf9 9963 switch (code)
9964 {
9965 case 0:
9966 /* This signifies an ellipses, any further arguments are all ok. */
9967 res = true;
9968 goto end;
9969 case VOID_TYPE:
9970 /* This signifies an endlink, if no arguments remain, return
9971 true, otherwise return false. */
9972 res = (i == gimple_call_num_args (call));
9973 goto end;
9974 default:
9975 /* If no parameters remain or the parameter's code does not
9976 match the specified code, return false. Otherwise continue
9977 checking any remaining arguments. */
9978 arg = gimple_call_arg (call, i++);
9979 if (!validate_arg (arg, code))
9980 goto end;
9981 break;
9982 }
9983 }
9984 while (1);
9985
9986 /* We need gotos here since we can only have one VA_CLOSE in a
9987 function. */
9988 end: ;
9989 va_end (ap);
9990
9991 return res;
9992}
9993
fc2a2dcb 9994/* Default target-specific builtin expander that does nothing. */
9995
9996rtx
aecda0d6 9997default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9998 rtx target ATTRIBUTE_UNUSED,
9999 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 10000 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 10001 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 10002{
10003 return NULL_RTX;
10004}
c7926a82 10005
01537105 10006/* Returns true is EXP represents data that would potentially reside
10007 in a readonly section. */
10008
b9ea678c 10009bool
01537105 10010readonly_data_expr (tree exp)
10011{
10012 STRIP_NOPS (exp);
10013
9ff0637e 10014 if (TREE_CODE (exp) != ADDR_EXPR)
10015 return false;
10016
10017 exp = get_base_address (TREE_OPERAND (exp, 0));
10018 if (!exp)
10019 return false;
10020
10021 /* Make sure we call decl_readonly_section only for trees it
10022 can handle (since it returns true for everything it doesn't
10023 understand). */
491e04ef 10024 if (TREE_CODE (exp) == STRING_CST
9ff0637e 10025 || TREE_CODE (exp) == CONSTRUCTOR
53e9c5c4 10026 || (VAR_P (exp) && TREE_STATIC (exp)))
9ff0637e 10027 return decl_readonly_section (exp, 0);
01537105 10028 else
10029 return false;
10030}
4ee9c684 10031
c2f47e15 10032/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10033 to the call, and TYPE is its return type.
4ee9c684 10034
c2f47e15 10035 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10036 simplified form of the call as a tree.
10037
10038 The simplified form may be a constant or other expression which
10039 computes the same value, but in a more efficient manner (including
10040 calls to other builtin functions).
10041
10042 The call may contain arguments which need to be evaluated, but
10043 which are not useful to determine the result of the call. In
10044 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10045 COMPOUND_EXPR will be an argument which must be evaluated.
10046 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10047 COMPOUND_EXPR in the chain will contain the tree for the simplified
10048 form of the builtin function call. */
10049
10050static tree
389dd41b 10051fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10052{
c2f47e15 10053 if (!validate_arg (s1, POINTER_TYPE)
10054 || !validate_arg (s2, POINTER_TYPE))
10055 return NULL_TREE;
4ee9c684 10056 else
10057 {
4ee9c684 10058 tree fn;
10059 const char *p1, *p2;
10060
10061 p2 = c_getstr (s2);
10062 if (p2 == NULL)
c2f47e15 10063 return NULL_TREE;
4ee9c684 10064
10065 p1 = c_getstr (s1);
10066 if (p1 != NULL)
10067 {
10068 const char *r = strpbrk (p1, p2);
daa1d5f5 10069 tree tem;
4ee9c684 10070
10071 if (r == NULL)
779b4c41 10072 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 10073
10074 /* Return an offset into the constant string argument. */
2cc66f2a 10075 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10076 return fold_convert_loc (loc, type, tem);
4ee9c684 10077 }
10078
10079 if (p2[0] == '\0')
05abc81b 10080 /* strpbrk(x, "") == NULL.
10081 Evaluate and ignore s1 in case it had side-effects. */
44bfe16d 10082 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
4ee9c684 10083
10084 if (p2[1] != '\0')
c2f47e15 10085 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 10086
b9a16870 10087 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 10088 if (!fn)
c2f47e15 10089 return NULL_TREE;
4ee9c684 10090
10091 /* New argument list transforming strpbrk(s1, s2) to
10092 strchr(s1, s2[0]). */
7002a1c8 10093 return build_call_expr_loc (loc, fn, 2, s1,
10094 build_int_cst (integer_type_node, p2[0]));
4ee9c684 10095 }
10096}
10097
c2f47e15 10098/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10099 to the call.
4ee9c684 10100
c2f47e15 10101 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10102 simplified form of the call as a tree.
10103
10104 The simplified form may be a constant or other expression which
10105 computes the same value, but in a more efficient manner (including
10106 calls to other builtin functions).
10107
10108 The call may contain arguments which need to be evaluated, but
10109 which are not useful to determine the result of the call. In
10110 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10111 COMPOUND_EXPR will be an argument which must be evaluated.
10112 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10113 COMPOUND_EXPR in the chain will contain the tree for the simplified
10114 form of the builtin function call. */
10115
10116static tree
389dd41b 10117fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 10118{
c2f47e15 10119 if (!validate_arg (s1, POINTER_TYPE)
10120 || !validate_arg (s2, POINTER_TYPE))
10121 return NULL_TREE;
4ee9c684 10122 else
10123 {
4ee9c684 10124 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10125
c2f47e15 10126 /* If either argument is "", return NULL_TREE. */
4ee9c684 10127 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 10128 /* Evaluate and ignore both arguments in case either one has
10129 side-effects. */
389dd41b 10130 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 10131 s1, s2);
c2f47e15 10132 return NULL_TREE;
4ee9c684 10133 }
10134}
10135
c2f47e15 10136/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10137 to the call.
4ee9c684 10138
c2f47e15 10139 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10140 simplified form of the call as a tree.
10141
10142 The simplified form may be a constant or other expression which
10143 computes the same value, but in a more efficient manner (including
10144 calls to other builtin functions).
10145
10146 The call may contain arguments which need to be evaluated, but
10147 which are not useful to determine the result of the call. In
10148 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10149 COMPOUND_EXPR will be an argument which must be evaluated.
10150 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10151 COMPOUND_EXPR in the chain will contain the tree for the simplified
10152 form of the builtin function call. */
10153
10154static tree
389dd41b 10155fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 10156{
c2f47e15 10157 if (!validate_arg (s1, POINTER_TYPE)
10158 || !validate_arg (s2, POINTER_TYPE))
10159 return NULL_TREE;
4ee9c684 10160 else
10161 {
c2f47e15 10162 /* If the first argument is "", return NULL_TREE. */
b5e46e2c 10163 const char *p1 = c_getstr (s1);
4ee9c684 10164 if (p1 && *p1 == '\0')
10165 {
10166 /* Evaluate and ignore argument s2 in case it has
10167 side-effects. */
389dd41b 10168 return omit_one_operand_loc (loc, size_type_node,
39761420 10169 size_zero_node, s2);
4ee9c684 10170 }
10171
10172 /* If the second argument is "", return __builtin_strlen(s1). */
b5e46e2c 10173 const char *p2 = c_getstr (s2);
4ee9c684 10174 if (p2 && *p2 == '\0')
10175 {
b9a16870 10176 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 10177
10178 /* If the replacement _DECL isn't initialized, don't do the
10179 transformation. */
10180 if (!fn)
c2f47e15 10181 return NULL_TREE;
4ee9c684 10182
389dd41b 10183 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 10184 }
c2f47e15 10185 return NULL_TREE;
4ee9c684 10186 }
10187}
10188
c2f47e15 10189/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 10190 produced. False otherwise. This is done so that we don't output the error
10191 or warning twice or three times. */
75a70cf9 10192
743b0c6a 10193bool
c2f47e15 10194fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 10195{
10196 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 10197 int nargs = call_expr_nargs (exp);
10198 tree arg;
d98fd4a4 10199 /* There is good chance the current input_location points inside the
10200 definition of the va_start macro (perhaps on the token for
10201 builtin) in a system header, so warnings will not be emitted.
10202 Use the location in real source code. */
be1e7283 10203 location_t current_location =
d98fd4a4 10204 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10205 NULL);
4ee9c684 10206
257d99c3 10207 if (!stdarg_p (fntype))
743b0c6a 10208 {
10209 error ("%<va_start%> used in function with fixed args");
10210 return true;
10211 }
c2f47e15 10212
10213 if (va_start_p)
79012a9d 10214 {
c2f47e15 10215 if (va_start_p && (nargs != 2))
10216 {
10217 error ("wrong number of arguments to function %<va_start%>");
10218 return true;
10219 }
10220 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 10221 }
10222 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10223 when we checked the arguments and if needed issued a warning. */
c2f47e15 10224 else
4ee9c684 10225 {
c2f47e15 10226 if (nargs == 0)
10227 {
10228 /* Evidently an out of date version of <stdarg.h>; can't validate
10229 va_start's second argument, but can still work as intended. */
d98fd4a4 10230 warning_at (current_location,
7edb1062 10231 OPT_Wvarargs,
10232 "%<__builtin_next_arg%> called without an argument");
c2f47e15 10233 return true;
10234 }
10235 else if (nargs > 1)
a0c938f0 10236 {
c2f47e15 10237 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 10238 return true;
10239 }
c2f47e15 10240 arg = CALL_EXPR_ARG (exp, 0);
10241 }
10242
a8dd994c 10243 if (TREE_CODE (arg) == SSA_NAME)
10244 arg = SSA_NAME_VAR (arg);
10245
c2f47e15 10246 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 10247 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 10248 the arguments and if needed issuing a warning. */
10249 if (!integer_zerop (arg))
10250 {
10251 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 10252
4ee9c684 10253 /* Strip off all nops for the sake of the comparison. This
10254 is not quite the same as STRIP_NOPS. It does more.
10255 We must also strip off INDIRECT_EXPR for C++ reference
10256 parameters. */
72dd6141 10257 while (CONVERT_EXPR_P (arg)
4ee9c684 10258 || TREE_CODE (arg) == INDIRECT_REF)
10259 arg = TREE_OPERAND (arg, 0);
10260 if (arg != last_parm)
a0c938f0 10261 {
b08cf617 10262 /* FIXME: Sometimes with the tree optimizers we can get the
10263 not the last argument even though the user used the last
10264 argument. We just warn and set the arg to be the last
10265 argument so that we will get wrong-code because of
10266 it. */
d98fd4a4 10267 warning_at (current_location,
7edb1062 10268 OPT_Wvarargs,
d98fd4a4 10269 "second parameter of %<va_start%> not last named argument");
743b0c6a 10270 }
24158ad7 10271
10272 /* Undefined by C99 7.15.1.4p4 (va_start):
10273 "If the parameter parmN is declared with the register storage
10274 class, with a function or array type, or with a type that is
10275 not compatible with the type that results after application of
10276 the default argument promotions, the behavior is undefined."
10277 */
10278 else if (DECL_REGISTER (arg))
d98fd4a4 10279 {
10280 warning_at (current_location,
7edb1062 10281 OPT_Wvarargs,
67cf9b55 10282 "undefined behavior when second parameter of "
d98fd4a4 10283 "%<va_start%> is declared with %<register%> storage");
10284 }
24158ad7 10285
79012a9d 10286 /* We want to verify the second parameter just once before the tree
a0c938f0 10287 optimizers are run and then avoid keeping it in the tree,
10288 as otherwise we could warn even for correct code like:
10289 void foo (int i, ...)
10290 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 10291 if (va_start_p)
10292 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10293 else
10294 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 10295 }
10296 return false;
4ee9c684 10297}
10298
10299
c2f47e15 10300/* Expand a call EXP to __builtin_object_size. */
0a39fd54 10301
f7715905 10302static rtx
0a39fd54 10303expand_builtin_object_size (tree exp)
10304{
10305 tree ost;
10306 int object_size_type;
10307 tree fndecl = get_callee_fndecl (exp);
0a39fd54 10308
c2f47e15 10309 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 10310 {
8c41abe8 10311 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
b8c23db3 10312 exp, fndecl);
0a39fd54 10313 expand_builtin_trap ();
10314 return const0_rtx;
10315 }
10316
c2f47e15 10317 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 10318 STRIP_NOPS (ost);
10319
10320 if (TREE_CODE (ost) != INTEGER_CST
10321 || tree_int_cst_sgn (ost) < 0
10322 || compare_tree_int (ost, 3) > 0)
10323 {
8c41abe8 10324 error ("%Klast argument of %qD is not integer constant between 0 and 3",
b8c23db3 10325 exp, fndecl);
0a39fd54 10326 expand_builtin_trap ();
10327 return const0_rtx;
10328 }
10329
e913b5cd 10330 object_size_type = tree_to_shwi (ost);
0a39fd54 10331
10332 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10333}
10334
10335/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10336 FCODE is the BUILT_IN_* to use.
c2f47e15 10337 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 10338 otherwise try to get the result in TARGET, if convenient (and in
10339 mode MODE if that's convenient). */
10340
10341static rtx
3754d046 10342expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 10343 enum built_in_function fcode)
10344{
c2f47e15 10345 if (!validate_arglist (exp,
0a39fd54 10346 POINTER_TYPE,
10347 fcode == BUILT_IN_MEMSET_CHK
10348 ? INTEGER_TYPE : POINTER_TYPE,
10349 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 10350 return NULL_RTX;
0a39fd54 10351
e6a18b5a 10352 tree dest = CALL_EXPR_ARG (exp, 0);
10353 tree src = CALL_EXPR_ARG (exp, 1);
10354 tree len = CALL_EXPR_ARG (exp, 2);
10355 tree size = CALL_EXPR_ARG (exp, 3);
0a39fd54 10356
e6a18b5a 10357 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10358 /*str=*/NULL_TREE, size);
5aef8938 10359
10360 if (!tree_fits_uhwi_p (size))
c2f47e15 10361 return NULL_RTX;
0a39fd54 10362
e913b5cd 10363 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 10364 {
5aef8938 10365 /* Avoid transforming the checking call to an ordinary one when
10366 an overflow has been detected or when the call couldn't be
10367 validated because the size is not constant. */
10368 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10369 return NULL_RTX;
0a39fd54 10370
5aef8938 10371 tree fn = NULL_TREE;
0a39fd54 10372 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10373 mem{cpy,pcpy,move,set} is available. */
10374 switch (fcode)
10375 {
10376 case BUILT_IN_MEMCPY_CHK:
b9a16870 10377 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 10378 break;
10379 case BUILT_IN_MEMPCPY_CHK:
b9a16870 10380 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 10381 break;
10382 case BUILT_IN_MEMMOVE_CHK:
b9a16870 10383 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 10384 break;
10385 case BUILT_IN_MEMSET_CHK:
b9a16870 10386 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 10387 break;
10388 default:
10389 break;
10390 }
10391
10392 if (! fn)
c2f47e15 10393 return NULL_RTX;
0a39fd54 10394
0568e9c1 10395 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 10396 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10397 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 10398 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10399 }
10400 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 10401 return NULL_RTX;
0a39fd54 10402 else
10403 {
957d0361 10404 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 10405
10406 /* If DEST is not a pointer type, call the normal function. */
10407 if (dest_align == 0)
c2f47e15 10408 return NULL_RTX;
0a39fd54 10409
10410 /* If SRC and DEST are the same (and not volatile), do nothing. */
10411 if (operand_equal_p (src, dest, 0))
10412 {
10413 tree expr;
10414
10415 if (fcode != BUILT_IN_MEMPCPY_CHK)
10416 {
10417 /* Evaluate and ignore LEN in case it has side-effects. */
10418 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10419 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10420 }
10421
2cc66f2a 10422 expr = fold_build_pointer_plus (dest, len);
0a39fd54 10423 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10424 }
10425
10426 /* __memmove_chk special case. */
10427 if (fcode == BUILT_IN_MEMMOVE_CHK)
10428 {
957d0361 10429 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 10430
10431 if (src_align == 0)
c2f47e15 10432 return NULL_RTX;
0a39fd54 10433
10434 /* If src is categorized for a readonly section we can use
10435 normal __memcpy_chk. */
10436 if (readonly_data_expr (src))
10437 {
b9a16870 10438 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 10439 if (!fn)
c2f47e15 10440 return NULL_RTX;
0568e9c1 10441 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10442 dest, src, len, size);
a65c4d64 10443 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10444 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 10445 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10446 }
10447 }
c2f47e15 10448 return NULL_RTX;
0a39fd54 10449 }
10450}
10451
10452/* Emit warning if a buffer overflow is detected at compile time. */
10453
10454static void
10455maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10456{
5aef8938 10457 /* The source string. */
10458 tree srcstr = NULL_TREE;
10459 /* The size of the destination object. */
10460 tree objsize = NULL_TREE;
10461 /* The string that is being concatenated with (as in __strcat_chk)
10462 or null if it isn't. */
10463 tree catstr = NULL_TREE;
10464 /* The maximum length of the source sequence in a bounded operation
10465 (such as __strncat_chk) or null if the operation isn't bounded
10466 (such as __strcat_chk). */
e6a18b5a 10467 tree maxread = NULL_TREE;
f3969b49 10468 /* The exact size of the access (such as in __strncpy_chk). */
10469 tree size = NULL_TREE;
0a39fd54 10470
10471 switch (fcode)
10472 {
10473 case BUILT_IN_STRCPY_CHK:
10474 case BUILT_IN_STPCPY_CHK:
5aef8938 10475 srcstr = CALL_EXPR_ARG (exp, 1);
10476 objsize = CALL_EXPR_ARG (exp, 2);
10477 break;
10478
0a39fd54 10479 case BUILT_IN_STRCAT_CHK:
5aef8938 10480 /* For __strcat_chk the warning will be emitted only if overflowing
10481 by at least strlen (dest) + 1 bytes. */
10482 catstr = CALL_EXPR_ARG (exp, 0);
10483 srcstr = CALL_EXPR_ARG (exp, 1);
10484 objsize = CALL_EXPR_ARG (exp, 2);
0a39fd54 10485 break;
5aef8938 10486
b356dfef 10487 case BUILT_IN_STRNCAT_CHK:
5aef8938 10488 catstr = CALL_EXPR_ARG (exp, 0);
10489 srcstr = CALL_EXPR_ARG (exp, 1);
e6a18b5a 10490 maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 10491 objsize = CALL_EXPR_ARG (exp, 3);
10492 break;
10493
0a39fd54 10494 case BUILT_IN_STRNCPY_CHK:
1063acde 10495 case BUILT_IN_STPNCPY_CHK:
5aef8938 10496 srcstr = CALL_EXPR_ARG (exp, 1);
f3969b49 10497 size = CALL_EXPR_ARG (exp, 2);
5aef8938 10498 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 10499 break;
5aef8938 10500
0a39fd54 10501 case BUILT_IN_SNPRINTF_CHK:
10502 case BUILT_IN_VSNPRINTF_CHK:
e6a18b5a 10503 maxread = CALL_EXPR_ARG (exp, 1);
5aef8938 10504 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 10505 break;
10506 default:
10507 gcc_unreachable ();
10508 }
10509
e6a18b5a 10510 if (catstr && maxread)
0a39fd54 10511 {
5aef8938 10512 /* Check __strncat_chk. There is no way to determine the length
10513 of the string to which the source string is being appended so
10514 just warn when the length of the source string is not known. */
8d6c6ef5 10515 check_strncat_sizes (exp, objsize);
10516 return;
0a39fd54 10517 }
0a39fd54 10518
e6a18b5a 10519 /* The destination argument is the first one for all built-ins above. */
10520 tree dst = CALL_EXPR_ARG (exp, 0);
10521
10522 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
0a39fd54 10523}
10524
10525/* Emit warning if a buffer overflow is detected at compile time
10526 in __sprintf_chk/__vsprintf_chk calls. */
10527
10528static void
10529maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10530{
1e4adcfc 10531 tree size, len, fmt;
0a39fd54 10532 const char *fmt_str;
c2f47e15 10533 int nargs = call_expr_nargs (exp);
0a39fd54 10534
10535 /* Verify the required arguments in the original call. */
48e1416a 10536
c2f47e15 10537 if (nargs < 4)
0a39fd54 10538 return;
c2f47e15 10539 size = CALL_EXPR_ARG (exp, 2);
10540 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 10541
e913b5cd 10542 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 10543 return;
10544
10545 /* Check whether the format is a literal string constant. */
10546 fmt_str = c_getstr (fmt);
10547 if (fmt_str == NULL)
10548 return;
10549
d4473c84 10550 if (!init_target_chars ())
99eabcc1 10551 return;
10552
0a39fd54 10553 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 10554 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 10555 len = build_int_cstu (size_type_node, strlen (fmt_str));
10556 /* If the format is "%s" and first ... argument is a string literal,
10557 we know it too. */
c2f47e15 10558 else if (fcode == BUILT_IN_SPRINTF_CHK
10559 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 10560 {
10561 tree arg;
10562
c2f47e15 10563 if (nargs < 5)
0a39fd54 10564 return;
c2f47e15 10565 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 10566 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10567 return;
10568
10569 len = c_strlen (arg, 1);
e913b5cd 10570 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 10571 return;
10572 }
10573 else
10574 return;
10575
5aef8938 10576 /* Add one for the terminating nul. */
10577 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
e6a18b5a 10578
10579 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10580 /*maxread=*/NULL_TREE, len, size);
0a39fd54 10581}
10582
2c281b15 10583/* Emit warning if a free is called with address of a variable. */
10584
10585static void
10586maybe_emit_free_warning (tree exp)
10587{
10588 tree arg = CALL_EXPR_ARG (exp, 0);
10589
10590 STRIP_NOPS (arg);
10591 if (TREE_CODE (arg) != ADDR_EXPR)
10592 return;
10593
10594 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 10595 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 10596 return;
10597
10598 if (SSA_VAR_P (arg))
f74ea1c2 10599 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10600 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 10601 else
f74ea1c2 10602 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10603 "%Kattempt to free a non-heap object", exp);
2c281b15 10604}
10605
c2f47e15 10606/* Fold a call to __builtin_object_size with arguments PTR and OST,
10607 if possible. */
0a39fd54 10608
f7715905 10609static tree
c2f47e15 10610fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 10611{
a6caa15f 10612 unsigned HOST_WIDE_INT bytes;
0a39fd54 10613 int object_size_type;
10614
c2f47e15 10615 if (!validate_arg (ptr, POINTER_TYPE)
10616 || !validate_arg (ost, INTEGER_TYPE))
10617 return NULL_TREE;
0a39fd54 10618
0a39fd54 10619 STRIP_NOPS (ost);
10620
10621 if (TREE_CODE (ost) != INTEGER_CST
10622 || tree_int_cst_sgn (ost) < 0
10623 || compare_tree_int (ost, 3) > 0)
c2f47e15 10624 return NULL_TREE;
0a39fd54 10625
e913b5cd 10626 object_size_type = tree_to_shwi (ost);
0a39fd54 10627
10628 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10629 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10630 and (size_t) 0 for types 2 and 3. */
10631 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 10632 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 10633
10634 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 10635 {
4e91a07b 10636 compute_builtin_object_size (ptr, object_size_type, &bytes);
6da74b21 10637 if (wi::fits_to_tree_p (bytes, size_type_node))
10638 return build_int_cstu (size_type_node, bytes);
a6caa15f 10639 }
0a39fd54 10640 else if (TREE_CODE (ptr) == SSA_NAME)
10641 {
0a39fd54 10642 /* If object size is not known yet, delay folding until
10643 later. Maybe subsequent passes will help determining
10644 it. */
4e91a07b 10645 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10646 && wi::fits_to_tree_p (bytes, size_type_node))
6da74b21 10647 return build_int_cstu (size_type_node, bytes);
0a39fd54 10648 }
10649
a6caa15f 10650 return NULL_TREE;
0a39fd54 10651}
10652
12f08300 10653/* Builtins with folding operations that operate on "..." arguments
10654 need special handling; we need to store the arguments in a convenient
10655 data structure before attempting any folding. Fortunately there are
10656 only a few builtins that fall into this category. FNDECL is the
10657 function, EXP is the CALL_EXPR for the call. */
10658
10659static tree
10660fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10661{
10662 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10663 tree ret = NULL_TREE;
10664
10665 switch (fcode)
10666 {
10667 case BUILT_IN_FPCLASSIFY:
10668 ret = fold_builtin_fpclassify (loc, args, nargs);
10669 break;
10670
10671 default:
10672 break;
10673 }
10674 if (ret)
10675 {
10676 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10677 SET_EXPR_LOCATION (ret, loc);
10678 TREE_NO_WARNING (ret) = 1;
10679 return ret;
10680 }
10681 return NULL_TREE;
10682}
10683
99eabcc1 10684/* Initialize format string characters in the target charset. */
10685
b9ea678c 10686bool
99eabcc1 10687init_target_chars (void)
10688{
10689 static bool init;
10690 if (!init)
10691 {
10692 target_newline = lang_hooks.to_target_charset ('\n');
10693 target_percent = lang_hooks.to_target_charset ('%');
10694 target_c = lang_hooks.to_target_charset ('c');
10695 target_s = lang_hooks.to_target_charset ('s');
10696 if (target_newline == 0 || target_percent == 0 || target_c == 0
10697 || target_s == 0)
10698 return false;
10699
10700 target_percent_c[0] = target_percent;
10701 target_percent_c[1] = target_c;
10702 target_percent_c[2] = '\0';
10703
10704 target_percent_s[0] = target_percent;
10705 target_percent_s[1] = target_s;
10706 target_percent_s[2] = '\0';
10707
10708 target_percent_s_newline[0] = target_percent;
10709 target_percent_s_newline[1] = target_s;
10710 target_percent_s_newline[2] = target_newline;
10711 target_percent_s_newline[3] = '\0';
a0c938f0 10712
99eabcc1 10713 init = true;
10714 }
10715 return true;
10716}
bffb7645 10717
f0c477f2 10718/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10719 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 10720 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 10721 function assumes that you cleared the MPFR flags and then
10722 calculated M to see if anything subsequently set a flag prior to
10723 entering this function. Return NULL_TREE if any checks fail. */
10724
10725static tree
d4473c84 10726do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 10727{
10728 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10729 overflow/underflow occurred. If -frounding-math, proceed iff the
10730 result of calling FUNC was exact. */
d4473c84 10731 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 10732 && (!flag_rounding_math || !inexact))
10733 {
10734 REAL_VALUE_TYPE rr;
10735
66fa16e6 10736 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 10737 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10738 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10739 but the mpft_t is not, then we underflowed in the
10740 conversion. */
776a7bab 10741 if (real_isfinite (&rr)
f0c477f2 10742 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10743 {
10744 REAL_VALUE_TYPE rmode;
10745
10746 real_convert (&rmode, TYPE_MODE (type), &rr);
10747 /* Proceed iff the specified mode can hold the value. */
10748 if (real_identical (&rmode, &rr))
10749 return build_real (type, rmode);
10750 }
10751 }
10752 return NULL_TREE;
10753}
10754
239d491a 10755/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10756 number and no overflow/underflow occurred. INEXACT is true if M
10757 was not exactly calculated. TYPE is the tree type for the result.
10758 This function assumes that you cleared the MPFR flags and then
10759 calculated M to see if anything subsequently set a flag prior to
652d9409 10760 entering this function. Return NULL_TREE if any checks fail, if
10761 FORCE_CONVERT is true, then bypass the checks. */
239d491a 10762
10763static tree
652d9409 10764do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 10765{
10766 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10767 overflow/underflow occurred. If -frounding-math, proceed iff the
10768 result of calling FUNC was exact. */
652d9409 10769 if (force_convert
10770 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10771 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10772 && (!flag_rounding_math || !inexact)))
239d491a 10773 {
10774 REAL_VALUE_TYPE re, im;
10775
b0e7c4d4 10776 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10777 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 10778 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10779 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10780 but the mpft_t is not, then we underflowed in the
10781 conversion. */
652d9409 10782 if (force_convert
10783 || (real_isfinite (&re) && real_isfinite (&im)
10784 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10785 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 10786 {
10787 REAL_VALUE_TYPE re_mode, im_mode;
10788
10789 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10790 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10791 /* Proceed iff the specified mode can hold the value. */
652d9409 10792 if (force_convert
10793 || (real_identical (&re_mode, &re)
10794 && real_identical (&im_mode, &im)))
239d491a 10795 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10796 build_real (TREE_TYPE (type), im_mode));
10797 }
10798 }
10799 return NULL_TREE;
10800}
239d491a 10801
e5407ca6 10802/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10803 the pointer *(ARG_QUO) and return the result. The type is taken
10804 from the type of ARG0 and is used for setting the precision of the
10805 calculation and results. */
10806
10807static tree
10808do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10809{
10810 tree const type = TREE_TYPE (arg0);
10811 tree result = NULL_TREE;
48e1416a 10812
e5407ca6 10813 STRIP_NOPS (arg0);
10814 STRIP_NOPS (arg1);
48e1416a 10815
e5407ca6 10816 /* To proceed, MPFR must exactly represent the target floating point
10817 format, which only happens when the target base equals two. */
10818 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10819 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10820 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10821 {
10822 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10823 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10824
776a7bab 10825 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 10826 {
e2eb2b7f 10827 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10828 const int prec = fmt->p;
10829 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 10830 tree result_rem;
10831 long integer_quo;
10832 mpfr_t m0, m1;
10833
10834 mpfr_inits2 (prec, m0, m1, NULL);
10835 mpfr_from_real (m0, ra0, GMP_RNDN);
10836 mpfr_from_real (m1, ra1, GMP_RNDN);
10837 mpfr_clear_flags ();
e2eb2b7f 10838 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 10839 /* Remquo is independent of the rounding mode, so pass
10840 inexact=0 to do_mpfr_ckconv(). */
10841 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10842 mpfr_clears (m0, m1, NULL);
10843 if (result_rem)
10844 {
10845 /* MPFR calculates quo in the host's long so it may
10846 return more bits in quo than the target int can hold
10847 if sizeof(host long) > sizeof(target int). This can
10848 happen even for native compilers in LP64 mode. In
10849 these cases, modulo the quo value with the largest
10850 number that the target int can hold while leaving one
10851 bit for the sign. */
10852 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10853 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10854
10855 /* Dereference the quo pointer argument. */
10856 arg_quo = build_fold_indirect_ref (arg_quo);
10857 /* Proceed iff a valid pointer type was passed in. */
10858 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10859 {
10860 /* Set the value. */
7002a1c8 10861 tree result_quo
10862 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10863 build_int_cst (TREE_TYPE (arg_quo),
10864 integer_quo));
e5407ca6 10865 TREE_SIDE_EFFECTS (result_quo) = 1;
10866 /* Combine the quo assignment with the rem. */
10867 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10868 result_quo, result_rem));
10869 }
10870 }
10871 }
10872 }
10873 return result;
10874}
e84da7c1 10875
10876/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10877 resulting value as a tree with type TYPE. The mpfr precision is
10878 set to the precision of TYPE. We assume that this mpfr function
10879 returns zero if the result could be calculated exactly within the
10880 requested precision. In addition, the integer pointer represented
10881 by ARG_SG will be dereferenced and set to the appropriate signgam
10882 (-1,1) value. */
10883
10884static tree
10885do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10886{
10887 tree result = NULL_TREE;
10888
10889 STRIP_NOPS (arg);
48e1416a 10890
e84da7c1 10891 /* To proceed, MPFR must exactly represent the target floating point
10892 format, which only happens when the target base equals two. Also
10893 verify ARG is a constant and that ARG_SG is an int pointer. */
10894 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10895 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10896 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10897 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10898 {
10899 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10900
10901 /* In addition to NaN and Inf, the argument cannot be zero or a
10902 negative integer. */
776a7bab 10903 if (real_isfinite (ra)
e84da7c1 10904 && ra->cl != rvc_zero
9af5ce0c 10905 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 10906 {
e2eb2b7f 10907 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10908 const int prec = fmt->p;
10909 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 10910 int inexact, sg;
10911 mpfr_t m;
10912 tree result_lg;
10913
10914 mpfr_init2 (m, prec);
10915 mpfr_from_real (m, ra, GMP_RNDN);
10916 mpfr_clear_flags ();
e2eb2b7f 10917 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 10918 result_lg = do_mpfr_ckconv (m, type, inexact);
10919 mpfr_clear (m);
10920 if (result_lg)
10921 {
10922 tree result_sg;
10923
10924 /* Dereference the arg_sg pointer argument. */
10925 arg_sg = build_fold_indirect_ref (arg_sg);
10926 /* Assign the signgam value into *arg_sg. */
10927 result_sg = fold_build2 (MODIFY_EXPR,
10928 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 10929 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 10930 TREE_SIDE_EFFECTS (result_sg) = 1;
10931 /* Combine the signgam assignment with the lgamma result. */
10932 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10933 result_sg, result_lg));
10934 }
10935 }
10936 }
10937
10938 return result;
10939}
75a70cf9 10940
c699fab8 10941/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10942 mpc function FUNC on it and return the resulting value as a tree
10943 with type TYPE. The mpfr precision is set to the precision of
10944 TYPE. We assume that function FUNC returns zero if the result
652d9409 10945 could be calculated exactly within the requested precision. If
10946 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10947 in the arguments and/or results. */
c699fab8 10948
63e89698 10949tree
652d9409 10950do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 10951 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10952{
10953 tree result = NULL_TREE;
48e1416a 10954
c699fab8 10955 STRIP_NOPS (arg0);
10956 STRIP_NOPS (arg1);
10957
10958 /* To proceed, MPFR must exactly represent the target floating point
10959 format, which only happens when the target base equals two. */
10960 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10961 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10962 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10963 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10964 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10965 {
10966 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10967 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10968 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10969 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10970
652d9409 10971 if (do_nonfinite
10972 || (real_isfinite (re0) && real_isfinite (im0)
10973 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 10974 {
10975 const struct real_format *const fmt =
10976 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10977 const int prec = fmt->p;
10978 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10979 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10980 int inexact;
10981 mpc_t m0, m1;
48e1416a 10982
c699fab8 10983 mpc_init2 (m0, prec);
10984 mpc_init2 (m1, prec);
9af5ce0c 10985 mpfr_from_real (mpc_realref (m0), re0, rnd);
10986 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10987 mpfr_from_real (mpc_realref (m1), re1, rnd);
10988 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 10989 mpfr_clear_flags ();
10990 inexact = func (m0, m0, m1, crnd);
652d9409 10991 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 10992 mpc_clear (m0);
10993 mpc_clear (m1);
10994 }
10995 }
10996
10997 return result;
10998}
239d491a 10999
75a70cf9 11000/* A wrapper function for builtin folding that prevents warnings for
11001 "statement without effect" and the like, caused by removing the
11002 call node earlier than the warning is generated. */
11003
11004tree
1a91d914 11005fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 11006{
11007 tree ret = NULL_TREE;
11008 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 11009 location_t loc = gimple_location (stmt);
a0e9bfbb 11010 if (fndecl && fndecl_built_in_p (fndecl)
75a70cf9 11011 && !gimple_call_va_arg_pack_p (stmt))
11012 {
11013 int nargs = gimple_call_num_args (stmt);
9845fb99 11014 tree *args = (nargs > 0
11015 ? gimple_call_arg_ptr (stmt, 0)
11016 : &error_mark_node);
75a70cf9 11017
198622c0 11018 if (avoid_folding_inline_builtin (fndecl))
11019 return NULL_TREE;
75a70cf9 11020 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11021 {
9845fb99 11022 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 11023 }
11024 else
11025 {
9d884767 11026 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 11027 if (ret)
11028 {
11029 /* Propagate location information from original call to
11030 expansion of builtin. Otherwise things like
11031 maybe_emit_chk_warning, that operate on the expansion
11032 of a builtin, will use the wrong location information. */
11033 if (gimple_has_location (stmt))
11034 {
11035 tree realret = ret;
11036 if (TREE_CODE (ret) == NOP_EXPR)
11037 realret = TREE_OPERAND (ret, 0);
11038 if (CAN_HAVE_LOCATION_P (realret)
11039 && !EXPR_HAS_LOCATION (realret))
389dd41b 11040 SET_EXPR_LOCATION (realret, loc);
75a70cf9 11041 return realret;
11042 }
11043 return ret;
11044 }
11045 }
11046 }
11047 return NULL_TREE;
11048}
7bfefa9d 11049
b9a16870 11050/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 11051 and set ASMSPEC as its user assembler name. DECL must be a
11052 function decl that declares a builtin. */
11053
11054void
11055set_builtin_user_assembler_name (tree decl, const char *asmspec)
11056{
a0e9bfbb 11057 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
7bfefa9d 11058 && asmspec != 0);
11059
61ffc71a 11060 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 11061 set_user_assembler_name (builtin, asmspec);
61ffc71a 11062
11063 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11064 && INT_TYPE_SIZE < BITS_PER_WORD)
7bfefa9d 11065 {
44504d18 11066 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
61ffc71a 11067 set_user_assembler_libfunc ("ffs", asmspec);
44504d18 11068 set_optab_libfunc (ffs_optab, mode, "ffs");
7bfefa9d 11069 }
11070}
a6b74a67 11071
11072/* Return true if DECL is a builtin that expands to a constant or similarly
11073 simple code. */
11074bool
11075is_simple_builtin (tree decl)
11076{
a0e9bfbb 11077 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
a6b74a67 11078 switch (DECL_FUNCTION_CODE (decl))
11079 {
11080 /* Builtins that expand to constants. */
11081 case BUILT_IN_CONSTANT_P:
11082 case BUILT_IN_EXPECT:
11083 case BUILT_IN_OBJECT_SIZE:
11084 case BUILT_IN_UNREACHABLE:
11085 /* Simple register moves or loads from stack. */
fca0886c 11086 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 11087 case BUILT_IN_RETURN_ADDRESS:
11088 case BUILT_IN_EXTRACT_RETURN_ADDR:
11089 case BUILT_IN_FROB_RETURN_ADDR:
11090 case BUILT_IN_RETURN:
11091 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11092 case BUILT_IN_FRAME_ADDRESS:
11093 case BUILT_IN_VA_END:
11094 case BUILT_IN_STACK_SAVE:
11095 case BUILT_IN_STACK_RESTORE:
11096 /* Exception state returns or moves registers around. */
11097 case BUILT_IN_EH_FILTER:
11098 case BUILT_IN_EH_POINTER:
11099 case BUILT_IN_EH_COPY_VALUES:
11100 return true;
11101
11102 default:
11103 return false;
11104 }
11105
11106 return false;
11107}
11108
11109/* Return true if DECL is a builtin that is not expensive, i.e., they are
11110 most probably expanded inline into reasonably simple code. This is a
11111 superset of is_simple_builtin. */
11112bool
11113is_inexpensive_builtin (tree decl)
11114{
11115 if (!decl)
11116 return false;
11117 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11118 return true;
11119 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11120 switch (DECL_FUNCTION_CODE (decl))
11121 {
11122 case BUILT_IN_ABS:
2b34677f 11123 CASE_BUILT_IN_ALLOCA:
74bdbe96 11124 case BUILT_IN_BSWAP16:
a6b74a67 11125 case BUILT_IN_BSWAP32:
11126 case BUILT_IN_BSWAP64:
11127 case BUILT_IN_CLZ:
11128 case BUILT_IN_CLZIMAX:
11129 case BUILT_IN_CLZL:
11130 case BUILT_IN_CLZLL:
11131 case BUILT_IN_CTZ:
11132 case BUILT_IN_CTZIMAX:
11133 case BUILT_IN_CTZL:
11134 case BUILT_IN_CTZLL:
11135 case BUILT_IN_FFS:
11136 case BUILT_IN_FFSIMAX:
11137 case BUILT_IN_FFSL:
11138 case BUILT_IN_FFSLL:
11139 case BUILT_IN_IMAXABS:
11140 case BUILT_IN_FINITE:
11141 case BUILT_IN_FINITEF:
11142 case BUILT_IN_FINITEL:
11143 case BUILT_IN_FINITED32:
11144 case BUILT_IN_FINITED64:
11145 case BUILT_IN_FINITED128:
11146 case BUILT_IN_FPCLASSIFY:
11147 case BUILT_IN_ISFINITE:
11148 case BUILT_IN_ISINF_SIGN:
11149 case BUILT_IN_ISINF:
11150 case BUILT_IN_ISINFF:
11151 case BUILT_IN_ISINFL:
11152 case BUILT_IN_ISINFD32:
11153 case BUILT_IN_ISINFD64:
11154 case BUILT_IN_ISINFD128:
11155 case BUILT_IN_ISNAN:
11156 case BUILT_IN_ISNANF:
11157 case BUILT_IN_ISNANL:
11158 case BUILT_IN_ISNAND32:
11159 case BUILT_IN_ISNAND64:
11160 case BUILT_IN_ISNAND128:
11161 case BUILT_IN_ISNORMAL:
11162 case BUILT_IN_ISGREATER:
11163 case BUILT_IN_ISGREATEREQUAL:
11164 case BUILT_IN_ISLESS:
11165 case BUILT_IN_ISLESSEQUAL:
11166 case BUILT_IN_ISLESSGREATER:
11167 case BUILT_IN_ISUNORDERED:
11168 case BUILT_IN_VA_ARG_PACK:
11169 case BUILT_IN_VA_ARG_PACK_LEN:
11170 case BUILT_IN_VA_COPY:
11171 case BUILT_IN_TRAP:
11172 case BUILT_IN_SAVEREGS:
11173 case BUILT_IN_POPCOUNTL:
11174 case BUILT_IN_POPCOUNTLL:
11175 case BUILT_IN_POPCOUNTIMAX:
11176 case BUILT_IN_POPCOUNT:
11177 case BUILT_IN_PARITYL:
11178 case BUILT_IN_PARITYLL:
11179 case BUILT_IN_PARITYIMAX:
11180 case BUILT_IN_PARITY:
11181 case BUILT_IN_LABS:
11182 case BUILT_IN_LLABS:
11183 case BUILT_IN_PREFETCH:
ca4c3545 11184 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 11185 return true;
11186
11187 default:
11188 return is_simple_builtin (decl);
11189 }
11190
11191 return false;
11192}
507a998e 11193
11194/* Return true if T is a constant and the value cast to a target char
11195 can be represented by a host char.
11196 Store the casted char constant in *P if so. */
11197
11198bool
11199target_char_cst_p (tree t, char *p)
11200{
11201 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11202 return false;
11203
11204 *p = (char)tree_to_uhwi (t);
11205 return true;
11206}
e6a18b5a 11207
11208/* Return the maximum object size. */
11209
11210tree
11211max_object_size (void)
11212{
11213 /* To do: Make this a configurable parameter. */
11214 return TYPE_MAX_VALUE (ptrdiff_type_node);
11215}