]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
Allow automatics in equivalences
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
fbd26352 2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
7c2ecb89 20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
53800dbe 24#include "config.h"
25#include "system.h"
805e22b2 26#include "coretypes.h"
9ef16211 27#include "backend.h"
7c29e30e 28#include "target.h"
29#include "rtl.h"
9ef16211 30#include "tree.h"
ea36272b 31#include "memmodel.h"
9ef16211 32#include "gimple.h"
7c29e30e 33#include "predict.h"
a950155e 34#include "params.h"
7c29e30e 35#include "tm_p.h"
36#include "stringpool.h"
c296f633 37#include "tree-vrp.h"
7c29e30e 38#include "tree-ssanames.h"
39#include "expmed.h"
40#include "optabs.h"
7c29e30e 41#include "emit-rtl.h"
42#include "recog.h"
7c29e30e 43#include "diagnostic-core.h"
b20a8bb4 44#include "alias.h"
b20a8bb4 45#include "fold-const.h"
6c21be92 46#include "fold-const-call.h"
e6a18b5a 47#include "gimple-ssa-warn-restrict.h"
9ed99284 48#include "stor-layout.h"
49#include "calls.h"
50#include "varasm.h"
51#include "tree-object-size.h"
dae0b5cb 52#include "realmpfr.h"
94ea8568 53#include "cfgrtl.h"
53800dbe 54#include "except.h"
d53441c8 55#include "dojump.h"
56#include "explow.h"
d53441c8 57#include "stmt.h"
53800dbe 58#include "expr.h"
d8fc4d0b 59#include "libfuncs.h"
53800dbe 60#include "output.h"
61#include "typeclass.h"
63c62881 62#include "langhooks.h"
162719b3 63#include "value-prof.h"
3b9c3a16 64#include "builtins.h"
30a86690 65#include "stringpool.h"
66#include "attribs.h"
f9acf11a 67#include "asan.h"
1f24b8e9 68#include "internal-fn.h"
e3240774 69#include "case-cfn-macros.h"
732905bb 70#include "gimple-fold.h"
5aef8938 71#include "intl.h"
859b51f8 72#include "file-prefix-map.h" /* remap_macro_filename() */
a7babc1e 73#include "gomp-constants.h"
74#include "omp-general.h"
5383fb56 75
3b9c3a16 76struct target_builtins default_target_builtins;
77#if SWITCHABLE_TARGET
78struct target_builtins *this_target_builtins = &default_target_builtins;
79#endif
80
ab7943b9 81/* Define the names of the builtin function types and codes. */
96423453 82const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84
9cfddb70 85#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 86const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 87{
88#include "builtins.def"
89};
ab7943b9 90
cffdfb3d 91/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 92 initialized to NULL_TREE. */
cffdfb3d 93builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 94
0b049e15 95/* Non-zero if __builtin_constant_p should be folded right away. */
96bool force_folding_builtin_constant_p;
97
aecda0d6 98static int target_char_cast (tree, char *);
d8ae1baa 99static rtx get_memory_rtx (tree, tree);
aecda0d6 100static int apply_args_size (void);
101static int apply_result_size (void);
aecda0d6 102static rtx result_vector (int, rtx);
aecda0d6 103static void expand_builtin_prefetch (tree);
104static rtx expand_builtin_apply_args (void);
105static rtx expand_builtin_apply_args_1 (void);
106static rtx expand_builtin_apply (rtx, rtx, rtx);
107static void expand_builtin_return (rtx);
108static enum type_class type_to_class (tree);
109static rtx expand_builtin_classify_type (tree);
6b43bae4 110static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 111static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 112static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 113static rtx expand_builtin_sincos (tree);
f97eea22 114static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 115static rtx expand_builtin_int_roundingfn (tree, rtx);
116static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 117static rtx expand_builtin_next_arg (void);
aecda0d6 118static rtx expand_builtin_va_start (tree);
119static rtx expand_builtin_va_end (tree);
120static rtx expand_builtin_va_copy (tree);
0dbefa15 121static rtx inline_expand_builtin_string_cmp (tree, rtx);
a65c4d64 122static rtx expand_builtin_strcmp (tree, rtx);
3754d046 123static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
f77c4496 124static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
8d6c6ef5 125static rtx expand_builtin_memchr (tree, rtx);
a65c4d64 126static rtx expand_builtin_memcpy (tree, rtx);
d0fbba1a 127static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
02aa6d73 128 rtx target, tree exp,
129 memop_ret retmode);
4d317237 130static rtx expand_builtin_memmove (tree, rtx);
d0fbba1a 131static rtx expand_builtin_mempcpy (tree, rtx);
02aa6d73 132static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
5aef8938 133static rtx expand_builtin_strcat (tree, rtx);
a65c4d64 134static rtx expand_builtin_strcpy (tree, rtx);
a788aa5f 135static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
3754d046 136static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
4d317237 137static rtx expand_builtin_stpncpy (tree, rtx);
5aef8938 138static rtx expand_builtin_strncat (tree, rtx);
a65c4d64 139static rtx expand_builtin_strncpy (tree, rtx);
f77c4496 140static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
3754d046 141static rtx expand_builtin_memset (tree, rtx, machine_mode);
142static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 143static rtx expand_builtin_bzero (tree);
3754d046 144static rtx expand_builtin_strlen (tree, rtx, machine_mode);
864bd5de 145static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
2b29cc6a 146static rtx expand_builtin_alloca (tree);
3754d046 147static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 148static rtx expand_builtin_frame_address (tree, tree);
389dd41b 149static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 150static rtx expand_builtin_expect (tree, rtx);
01107f42 151static rtx expand_builtin_expect_with_probability (tree, rtx);
aecda0d6 152static tree fold_builtin_constant_p (tree);
153static tree fold_builtin_classify_type (tree);
c7cbde74 154static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 155static tree fold_builtin_inf (location_t, tree, int);
389dd41b 156static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
184fac50 157static bool validate_arg (const_tree, enum tree_code code);
aecda0d6 158static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 159static rtx expand_builtin_signbit (tree, rtx);
389dd41b 160static tree fold_builtin_memcmp (location_t, tree, tree, tree);
389dd41b 161static tree fold_builtin_isascii (location_t, tree);
162static tree fold_builtin_toascii (location_t, tree);
163static tree fold_builtin_isdigit (location_t, tree);
164static tree fold_builtin_fabs (location_t, tree, tree);
165static tree fold_builtin_abs (location_t, tree, tree);
166static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 167 enum tree_code);
e80cc485 168static tree fold_builtin_0 (location_t, tree);
169static tree fold_builtin_1 (location_t, tree, tree);
170static tree fold_builtin_2 (location_t, tree, tree, tree);
171static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
12f08300 172static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 173
174static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
389dd41b 175static tree fold_builtin_strspn (location_t, tree, tree);
176static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 177
0a39fd54 178static rtx expand_builtin_object_size (tree);
3754d046 179static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 180 enum built_in_function);
181static void maybe_emit_chk_warning (tree, enum built_in_function);
182static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 183static void maybe_emit_free_warning (tree);
c2f47e15 184static tree fold_builtin_object_size (tree, tree);
99eabcc1 185
e788f202 186unsigned HOST_WIDE_INT target_newline;
b9ea678c 187unsigned HOST_WIDE_INT target_percent;
99eabcc1 188static unsigned HOST_WIDE_INT target_c;
189static unsigned HOST_WIDE_INT target_s;
aea88c77 190char target_percent_c[3];
b9ea678c 191char target_percent_s[3];
e788f202 192char target_percent_s_newline[4];
e5407ca6 193static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 194static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 195static void expand_builtin_sync_synchronize (void);
0a39fd54 196
7bfefa9d 197/* Return true if NAME starts with __builtin_ or __sync_. */
198
b29139ad 199static bool
1c47b3e8 200is_builtin_name (const char *name)
b6a5fc45 201{
b6a5fc45 202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
1cd6e20d 206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
b6a5fc45 208 return false;
209}
4ee9c684 210
1c47b3e8 211/* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
214
ae62deea 215bool
1c47b3e8 216called_as_built_in (tree node)
217{
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
220 will have. */
221 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222 return is_builtin_name (name);
223}
224
ceea063b 225/* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
0d8f7716 229
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 235 whereas foo() itself starts on an even address.
698537d1 236
3482bf13 237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
239
240static bool
241get_object_alignment_2 (tree exp, unsigned int *alignp,
242 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 243{
eaa09bfd 244 poly_int64 bitsize, bitpos;
98ab9e8f 245 tree offset;
3754d046 246 machine_mode mode;
292237f3 247 int unsignedp, reversep, volatilep;
c8a2b4ff 248 unsigned int align = BITS_PER_UNIT;
ceea063b 249 bool known_alignment = false;
698537d1 250
98ab9e8f 251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
292237f3 253 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
b3b6e4b5 254 &unsignedp, &reversep, &volatilep);
98ab9e8f 255
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
3482bf13 258 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 259 {
3482bf13 260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 align = 2 * BITS_PER_UNIT;
0d8f7716 266 }
3482bf13 267 else if (TREE_CODE (exp) == LABEL_DECL)
268 ;
269 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 270 {
3482bf13 271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp = DECL_INITIAL (exp);
98ab9e8f 273 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 274 if (CONSTANT_CLASS_P (exp))
579d67ba 275 align = targetm.constant_alignment (exp, align);
e532afed 276
3482bf13 277 known_alignment = true;
98ab9e8f 278 }
3482bf13 279 else if (DECL_P (exp))
ceea063b 280 {
3482bf13 281 align = DECL_ALIGN (exp);
ceea063b 282 known_alignment = true;
ceea063b 283 }
3482bf13 284 else if (TREE_CODE (exp) == INDIRECT_REF
285 || TREE_CODE (exp) == MEM_REF
286 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 287 {
288 tree addr = TREE_OPERAND (exp, 0);
ceea063b 289 unsigned ptr_align;
290 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 291 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 292
ab1e78e5 293 /* If the address is explicitely aligned, handle that. */
98ab9e8f 294 if (TREE_CODE (addr) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
296 {
ab1e78e5 297 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 ptr_bitmask *= BITS_PER_UNIT;
ac29ece2 299 align = least_bit_hwi (ptr_bitmask);
98ab9e8f 300 addr = TREE_OPERAND (addr, 0);
301 }
ceea063b 302
3482bf13 303 known_alignment
304 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 305 align = MAX (ptr_align, align);
306
ab1e78e5 307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos &= ptr_bitmask;
309
4083990a 310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
3482bf13 312 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 313 {
3482bf13 314 if (TMR_INDEX (exp))
315 {
316 unsigned HOST_WIDE_INT step = 1;
317 if (TMR_STEP (exp))
f9ae6f95 318 step = TREE_INT_CST_LOW (TMR_STEP (exp));
ac29ece2 319 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
3482bf13 320 }
321 if (TMR_INDEX2 (exp))
322 align = BITS_PER_UNIT;
323 known_alignment = false;
153c3b50 324 }
ceea063b 325
3482bf13 326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 329 alignment knowledge and if using that alignment would
330 improve the situation. */
700a9760 331 unsigned int talign;
4083990a 332 if (!addr_p && !known_alignment
700a9760 333 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 && talign > align)
335 align = talign;
4083990a 336 else
337 {
338 /* Else adjust bitpos accordingly. */
339 bitpos += ptr_bitpos;
340 if (TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
90ca1268 342 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
4083990a 343 }
98ab9e8f 344 }
3482bf13 345 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 346 {
3482bf13 347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 350 if (CONSTANT_CLASS_P (exp))
579d67ba 351 align = targetm.constant_alignment (exp, align);
e532afed 352
3482bf13 353 known_alignment = true;
98ab9e8f 354 }
98ab9e8f 355
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
c8a2b4ff 358 if (offset)
98ab9e8f 359 {
ad464c56 360 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 361 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 362 {
c8a2b4ff 363 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 if (inner)
365 align = MIN (align, inner);
98ab9e8f 366 }
98ab9e8f 367 }
368
eaa09bfd 369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372 if (alt_align != 0 && alt_align < align)
373 {
374 align = alt_align;
375 known_alignment = false;
376 }
377
3482bf13 378 *alignp = align;
eaa09bfd 379 *bitposp = bitpos.coeffs[0] & (align - 1);
ceea063b 380 return known_alignment;
0c883ef3 381}
382
3482bf13 383/* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
387
388bool
389get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
391{
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
393}
394
957d0361 395/* Return the alignment in bits of EXP, an object. */
0c883ef3 396
397unsigned int
957d0361 398get_object_alignment (tree exp)
0c883ef3 399{
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
402
ceea063b 403 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 404
98ab9e8f 405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
407
408 if (bitpos != 0)
ac29ece2 409 align = least_bit_hwi (bitpos);
957d0361 410 return align;
698537d1 411}
412
ceea063b 413/* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
53800dbe 417
ceea063b 418 If EXP is not a pointer, false is returned too. */
53800dbe 419
ceea063b 420bool
421get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
53800dbe 423{
153c3b50 424 STRIP_NOPS (exp);
535e2026 425
153c3b50 426 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
906a9403 429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430 {
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
438 {
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
441 {
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
445 }
446 }
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
450 }
153c3b50 451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 453 {
ceea063b 454 unsigned int ptr_align, ptr_misalign;
153c3b50 455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 456
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 {
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
d10da77a 461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
3482bf13 465 /* We cannot really tell whether this result is an approximation. */
b428654a 466 return false;
ceea063b 467 }
468 else
69fbc3aa 469 {
470 *bitposp = 0;
ceea063b 471 *alignp = BITS_PER_UNIT;
472 return false;
69fbc3aa 473 }
53800dbe 474 }
0bb8b39a 475 else if (TREE_CODE (exp) == INTEGER_CST)
476 {
477 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
481 }
153c3b50 482
69fbc3aa 483 *bitposp = 0;
ceea063b 484 *alignp = BITS_PER_UNIT;
485 return false;
53800dbe 486}
487
69fbc3aa 488/* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
491
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
494
495unsigned int
496get_pointer_alignment (tree exp)
497{
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
ceea063b 500
501 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 502
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
505
506 if (bitpos != 0)
ac29ece2 507 align = least_bit_hwi (bitpos);
69fbc3aa 508
509 return align;
510}
511
c4183f31 512/* Return the number of leading non-zero elements in the sequence
c62d63d4 513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
515
c4183f31 516unsigned
c62d63d4 517string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
518{
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
520
521 unsigned n;
522
523 if (eltsize == 1)
524 {
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
527 {
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
531 }
532 }
533 else
534 {
535 for (n = 0; n < maxelts; n++)
536 {
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
540 }
541 }
542 return n;
543}
544
7af57b1c 545/* For a call at LOC to a function FN that expects a string in the argument
546 ARG, issue a diagnostic due to it being a called with an argument
547 declared at NONSTR that is a character array with no terminating NUL. */
548
549void
550warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
551{
552 if (TREE_NO_WARNING (arg))
553 return;
554
555 loc = expansion_point_location_if_in_system_header (loc);
556
557 if (warning_at (loc, OPT_Wstringop_overflow_,
558 "%qs argument missing terminating nul", fn))
559 {
560 inform (DECL_SOURCE_LOCATION (decl),
561 "referenced argument declared here");
562 TREE_NO_WARNING (arg) = 1;
563 }
564}
565
a788aa5f 566/* If EXP refers to an unterminated constant character array return
567 the declaration of the object of which the array is a member or
fec27bf2 568 element and if SIZE is not null, set *SIZE to the size of
569 the unterminated array and set *EXACT if the size is exact or
570 clear it otherwise. Otherwise return null. */
a788aa5f 571
50e57712 572tree
fec27bf2 573unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
a788aa5f 574{
fec27bf2 575 /* C_STRLEN will return NULL and set DECL in the info
576 structure if EXP references a unterminated array. */
98d5ba5d 577 c_strlen_data lendata = { };
578 tree len = c_strlen (exp, 1, &lendata);
6f7fa01f 579 if (len == NULL_TREE && lendata.minlen && lendata.decl)
fec27bf2 580 {
581 if (size)
582 {
6f7fa01f 583 len = lendata.minlen;
98d5ba5d 584 if (lendata.off)
fec27bf2 585 {
98d5ba5d 586 /* Constant offsets are already accounted for in LENDATA.MINLEN,
587 but not in a SSA_NAME + CST expression. */
588 if (TREE_CODE (lendata.off) == INTEGER_CST)
fec27bf2 589 *exact = true;
98d5ba5d 590 else if (TREE_CODE (lendata.off) == PLUS_EXPR
591 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
fec27bf2 592 {
593 /* Subtract the offset from the size of the array. */
594 *exact = false;
98d5ba5d 595 tree temp = TREE_OPERAND (lendata.off, 1);
fec27bf2 596 temp = fold_convert (ssizetype, temp);
597 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
598 }
599 else
600 *exact = false;
601 }
602 else
603 *exact = true;
604
605 *size = len;
606 }
98d5ba5d 607 return lendata.decl;
fec27bf2 608 }
609
610 return NULL_TREE;
a788aa5f 611}
612
c62d63d4 613/* Compute the length of a null-terminated character string or wide
614 character string handling character sizes of 1, 2, and 4 bytes.
615 TREE_STRING_LENGTH is not the right way because it evaluates to
616 the size of the character array in bytes (as opposed to characters)
617 and because it can contain a zero byte in the middle.
53800dbe 618
4172d65e 619 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 620 into the instruction stream and zero if it is going to be expanded.
4172d65e 621 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 622 is returned, otherwise NULL, since
623 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
624 evaluate the side-effects.
625
6bda159e 626 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
627 accesses. Note that this implies the result is not going to be emitted
628 into the instruction stream.
629
2b84b289 630 Additional information about the string accessed may be recorded
631 in DATA. For example, if SRC references an unterminated string,
632 then the declaration will be stored in the DECL field. If the
633 length of the unterminated string can be determined, it'll be
634 stored in the LEN field. Note this length could well be different
635 than what a C strlen call would return.
7af57b1c 636
893c4605 637 ELTSIZE is 1 for normal single byte character strings, and 2 or
638 4 for wide characer strings. ELTSIZE is by default 1.
902de8ed 639
893c4605 640 The value returned is of type `ssizetype'. */
53800dbe 641
4ee9c684 642tree
2b84b289 643c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
53800dbe 644{
2b84b289 645 /* If we were not passed a DATA pointer, then get one to a local
646 structure. That avoids having to check DATA for NULL before
647 each time we want to use it. */
7d2853d1 648 c_strlen_data local_strlen_data = { };
2b84b289 649 if (!data)
650 data = &local_strlen_data;
651
2a0aa722 652 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
681fab1e 653 STRIP_NOPS (src);
654 if (TREE_CODE (src) == COND_EXPR
655 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
656 {
657 tree len1, len2;
658
2b84b289 659 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
660 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
0862b7e9 661 if (tree_int_cst_equal (len1, len2))
681fab1e 662 return len1;
663 }
664
665 if (TREE_CODE (src) == COMPOUND_EXPR
666 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
2b84b289 667 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
681fab1e 668
c62d63d4 669 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 670
c62d63d4 671 /* Offset from the beginning of the string in bytes. */
672 tree byteoff;
893c4605 673 tree memsize;
7af57b1c 674 tree decl;
675 src = string_constant (src, &byteoff, &memsize, &decl);
53800dbe 676 if (src == 0)
c2f47e15 677 return NULL_TREE;
902de8ed 678
c62d63d4 679 /* Determine the size of the string element. */
893c4605 680 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
681 return NULL_TREE;
c62d63d4 682
683 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
47d2cd73 684 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
c4183f31 685 in case the latter is less than the size of the array, such as when
686 SRC refers to a short string literal used to initialize a large array.
687 In that case, the elements of the array after the terminating NUL are
688 all NUL. */
689 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
9b7116a1 690 strelts = strelts / eltsize;
c4183f31 691
893c4605 692 if (!tree_fits_uhwi_p (memsize))
693 return NULL_TREE;
694
9b7116a1 695 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
c62d63d4 696
697 /* PTR can point to the byte representation of any string type, including
698 char* and wchar_t*. */
699 const char *ptr = TREE_STRING_POINTER (src);
902de8ed 700
c62d63d4 701 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
53800dbe 702 {
893c4605 703 /* The code below works only for single byte character types. */
704 if (eltsize != 1)
705 return NULL_TREE;
706
c4183f31 707 /* If the string has an internal NUL character followed by any
708 non-NUL characters (e.g., "foo\0bar"), we can't compute
709 the offset to the following NUL if we don't know where to
53800dbe 710 start searching for it. */
c4183f31 711 unsigned len = string_length (ptr, eltsize, strelts);
902de8ed 712
2b84b289 713 /* Return when an embedded null character is found or none at all.
714 In the latter case, set the DECL/LEN field in the DATA structure
715 so that callers may examine them. */
7af57b1c 716 if (len + 1 < strelts)
893c4605 717 return NULL_TREE;
7af57b1c 718 else if (len >= maxelts)
719 {
2b84b289 720 data->decl = decl;
fec27bf2 721 data->off = byteoff;
6f7fa01f 722 data->minlen = ssize_int (len);
7af57b1c 723 return NULL_TREE;
724 }
d5d661d5 725
9b7116a1 726 /* For empty strings the result should be zero. */
727 if (len == 0)
728 return ssize_int (0);
729
53800dbe 730 /* We don't know the starting offset, but we do know that the string
c4183f31 731 has no internal zero bytes. If the offset falls within the bounds
732 of the string subtract the offset from the length of the string,
733 and return that. Otherwise the length is zero. Take care to
734 use SAVE_EXPR in case the OFFSET has side-effects. */
f90ef0b2 735 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
736 : byteoff;
737 offsave = fold_convert_loc (loc, sizetype, offsave);
c4183f31 738 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
f90ef0b2 739 size_int (len));
740 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
741 offsave);
742 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
c4183f31 743 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
744 build_zero_cst (ssizetype));
53800dbe 745 }
746
c62d63d4 747 /* Offset from the beginning of the string in elements. */
748 HOST_WIDE_INT eltoff;
749
53800dbe 750 /* We have a known offset into the string. Start searching there for
27d0c333 751 a null character if we can represent it as a single HOST_WIDE_INT. */
c62d63d4 752 if (byteoff == 0)
753 eltoff = 0;
2a0aa722 754 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
c62d63d4 755 eltoff = -1;
53800dbe 756 else
2a0aa722 757 eltoff = tree_to_uhwi (byteoff) / eltsize;
902de8ed 758
1f63a7d6 759 /* If the offset is known to be out of bounds, warn, and call strlen at
760 runtime. */
9b7116a1 761 if (eltoff < 0 || eltoff >= maxelts)
53800dbe 762 {
94a62c5a 763 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 764 if (only_value != 2
94a62c5a 765 && !TREE_NO_WARNING (src)
766 && warning_at (loc, OPT_Warray_bounds,
767 "offset %qwi outside bounds of constant string",
768 eltoff))
769 TREE_NO_WARNING (src) = 1;
c2f47e15 770 return NULL_TREE;
53800dbe 771 }
902de8ed 772
893c4605 773 /* If eltoff is larger than strelts but less than maxelts the
774 string length is zero, since the excess memory will be zero. */
775 if (eltoff > strelts)
776 return ssize_int (0);
777
53800dbe 778 /* Use strlen to search for the first zero byte. Since any strings
779 constructed with build_string will have nulls appended, we win even
780 if we get handed something like (char[4])"abcd".
781
c62d63d4 782 Since ELTOFF is our starting index into the string, no further
53800dbe 783 calculation is needed. */
c62d63d4 784 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
893c4605 785 strelts - eltoff);
c62d63d4 786
9b7116a1 787 /* Don't know what to return if there was no zero termination.
2b84b289 788 Ideally this would turn into a gcc_checking_assert over time.
789 Set DECL/LEN so callers can examine them. */
9b7116a1 790 if (len >= maxelts - eltoff)
7af57b1c 791 {
2b84b289 792 data->decl = decl;
fec27bf2 793 data->off = byteoff;
6f7fa01f 794 data->minlen = ssize_int (len);
7af57b1c 795 return NULL_TREE;
796 }
2a0aa722 797
c62d63d4 798 return ssize_int (len);
53800dbe 799}
800
e913b5cd 801/* Return a constant integer corresponding to target reading
78a0882d 802 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
803 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
804 are assumed to be zero, otherwise it reads as many characters
805 as needed. */
806
807rtx
808c_readstr (const char *str, scalar_int_mode mode,
809 bool null_terminated_p/*=true*/)
6840589f 810{
6840589f 811 HOST_WIDE_INT ch;
812 unsigned int i, j;
e913b5cd 813 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 814
815 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 816 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
817 / HOST_BITS_PER_WIDE_INT;
818
a12aa4cc 819 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 820 for (i = 0; i < len; i++)
821 tmp[i] = 0;
6840589f 822
6840589f 823 ch = 1;
824 for (i = 0; i < GET_MODE_SIZE (mode); i++)
825 {
826 j = i;
827 if (WORDS_BIG_ENDIAN)
828 j = GET_MODE_SIZE (mode) - i - 1;
829 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 830 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 831 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
832 j *= BITS_PER_UNIT;
7d3f6cc7 833
78a0882d 834 if (ch || !null_terminated_p)
6840589f 835 ch = (unsigned char) str[i];
e913b5cd 836 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 837 }
ddb1be65 838
ab2c1de8 839 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 840 return immed_wide_int_const (c, mode);
6840589f 841}
842
ecc318ff 843/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 844 host char type, return zero and put that value into variable pointed to by
ecc318ff 845 P. */
846
847static int
aecda0d6 848target_char_cast (tree cst, char *p)
ecc318ff 849{
850 unsigned HOST_WIDE_INT val, hostval;
851
c19686c5 852 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 853 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
854 return 1;
855
e913b5cd 856 /* Do not care if it fits or not right here. */
f9ae6f95 857 val = TREE_INT_CST_LOW (cst);
e913b5cd 858
ecc318ff 859 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
edc19fd0 860 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ecc318ff 861
862 hostval = val;
863 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
edc19fd0 864 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ecc318ff 865
866 if (val != hostval)
867 return 1;
868
869 *p = hostval;
870 return 0;
871}
872
4ee9c684 873/* Similar to save_expr, but assumes that arbitrary code is not executed
874 in between the multiple evaluations. In particular, we assume that a
875 non-addressable local variable will not be modified. */
876
877static tree
878builtin_save_expr (tree exp)
879{
f6c35aa4 880 if (TREE_CODE (exp) == SSA_NAME
881 || (TREE_ADDRESSABLE (exp) == 0
882 && (TREE_CODE (exp) == PARM_DECL
53e9c5c4 883 || (VAR_P (exp) && !TREE_STATIC (exp)))))
4ee9c684 884 return exp;
885
886 return save_expr (exp);
887}
888
53800dbe 889/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
890 times to get the address of either a higher stack frame, or a return
891 address located within it (depending on FNDECL_CODE). */
902de8ed 892
c626df3d 893static rtx
869d0ef0 894expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 895{
896 int i;
869d0ef0 897 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
3f840859 898 if (tem == NULL_RTX)
e3e15c50 899 {
3f840859 900 /* For a zero count with __builtin_return_address, we don't care what
901 frame address we return, because target-specific definitions will
902 override us. Therefore frame pointer elimination is OK, and using
903 the soft frame pointer is OK.
904
905 For a nonzero count, or a zero count with __builtin_frame_address,
906 we require a stable offset from the current frame pointer to the
907 previous one, so we must use the hard frame pointer, and
908 we must disable frame pointer elimination. */
909 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
910 tem = frame_pointer_rtx;
911 else
912 {
913 tem = hard_frame_pointer_rtx;
e3e15c50 914
3f840859 915 /* Tell reload not to eliminate the frame pointer. */
916 crtl->accesses_prior_frames = 1;
917 }
e3e15c50 918 }
869d0ef0 919
53800dbe 920 if (count > 0)
921 SETUP_FRAME_ADDRESSES ();
53800dbe 922
3a69c60c 923 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 924 register. There is no way to access it off of the current frame
925 pointer, but it can be accessed off the previous frame pointer by
926 reading the value from the register window save area. */
a26d6c60 927 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 928 count--;
53800dbe 929
930 /* Scan back COUNT frames to the specified frame. */
931 for (i = 0; i < count; i++)
932 {
933 /* Assume the dynamic chain pointer is in the word that the
934 frame address points to, unless otherwise specified. */
53800dbe 935 tem = DYNAMIC_CHAIN_ADDRESS (tem);
53800dbe 936 tem = memory_address (Pmode, tem);
00060fc2 937 tem = gen_frame_mem (Pmode, tem);
83fc1478 938 tem = copy_to_reg (tem);
53800dbe 939 }
940
3a69c60c 941 /* For __builtin_frame_address, return what we've got. But, on
942 the SPARC for example, we may have to add a bias. */
53800dbe 943 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 944 return FRAME_ADDR_RTX (tem);
53800dbe 945
3a69c60c 946 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 947#ifdef RETURN_ADDR_RTX
948 tem = RETURN_ADDR_RTX (count, tem);
949#else
950 tem = memory_address (Pmode,
29c05e22 951 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 952 tem = gen_frame_mem (Pmode, tem);
53800dbe 953#endif
954 return tem;
955}
956
f7c44134 957/* Alias set used for setjmp buffer. */
32c2fdea 958static alias_set_type setjmp_alias_set = -1;
f7c44134 959
6b7f6858 960/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 961 return to RECEIVER_LABEL. This is also called directly by the SJLJ
962 exception handling code. */
53800dbe 963
6b7f6858 964void
aecda0d6 965expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 966{
3754d046 967 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 968 rtx stack_save;
f7c44134 969 rtx mem;
53800dbe 970
f7c44134 971 if (setjmp_alias_set == -1)
972 setjmp_alias_set = new_alias_set ();
973
85d654dd 974 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 975
37ae8504 976 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 977
6b7f6858 978 /* We store the frame pointer and the address of receiver_label in
979 the buffer and use the rest of it for the stack save area, which
980 is machine-dependent. */
53800dbe 981
f7c44134 982 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 983 set_mem_alias_set (mem, setjmp_alias_set);
1cc5239c 984 emit_move_insn (mem, hard_frame_pointer_rtx);
f7c44134 985
29c05e22 986 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
987 GET_MODE_SIZE (Pmode))),
ab6ab77e 988 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 989
990 emit_move_insn (validize_mem (mem),
6b7f6858 991 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 992
993 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 994 plus_constant (Pmode, buf_addr,
53800dbe 995 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 996 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 997 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 998
999 /* If there is further processing to do, do it. */
a3c81e61 1000 if (targetm.have_builtin_setjmp_setup ())
1001 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
53800dbe 1002
29f09705 1003 /* We have a nonlocal label. */
18d50ae6 1004 cfun->has_nonlocal_label = 1;
6b7f6858 1005}
53800dbe 1006
2c8a1497 1007/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 1008 also called directly by the SJLJ exception handling code.
1009 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 1010
1011void
a3c81e61 1012expand_builtin_setjmp_receiver (rtx receiver_label)
6b7f6858 1013{
82c7907c 1014 rtx chain;
1015
4598ade9 1016 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 1017 marked as used by this function. */
18b42941 1018 emit_use (hard_frame_pointer_rtx);
53800dbe 1019
1020 /* Mark the static chain as clobbered here so life information
1021 doesn't get messed up for it. */
3c56e0c1 1022 chain = rtx_for_static_chain (current_function_decl, true);
82c7907c 1023 if (chain && REG_P (chain))
1024 emit_clobber (chain);
53800dbe 1025
a494b6d7 1026 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
53800dbe 1027 {
4598ade9 1028 /* If the argument pointer can be eliminated in favor of the
1029 frame pointer, we don't need to restore it. We assume here
1030 that if such an elimination is present, it can always be used.
1031 This is the case on all known machines; if we don't make this
1032 assumption, we do unnecessary saving on many machines. */
53800dbe 1033 size_t i;
e99c3a1d 1034 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 1035
3098b2d3 1036 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 1037 if (elim_regs[i].from == ARG_POINTER_REGNUM
1038 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1039 break;
1040
3098b2d3 1041 if (i == ARRAY_SIZE (elim_regs))
53800dbe 1042 {
1043 /* Now restore our arg pointer from the address at which it
05927e40 1044 was saved in our stack frame. */
27a7a23a 1045 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 1046 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 1047 }
1048 }
53800dbe 1049
a3c81e61 1050 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1051 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1052 else if (targetm.have_nonlocal_goto_receiver ())
1053 emit_insn (targetm.gen_nonlocal_goto_receiver ());
53800dbe 1054 else
a3c81e61 1055 { /* Nothing */ }
57f6bb94 1056
3072d30e 1057 /* We must not allow the code we just generated to be reordered by
1058 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 1059 happen immediately, not later. */
3072d30e 1060 emit_insn (gen_blockage ());
6b7f6858 1061}
53800dbe 1062
53800dbe 1063/* __builtin_longjmp is passed a pointer to an array of five words (not
1064 all will be used on all machines). It operates similarly to the C
1065 library function of the same name, but is more efficient. Much of
2c8a1497 1066 the code below is copied from the handling of non-local gotos. */
53800dbe 1067
c626df3d 1068static void
aecda0d6 1069expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 1070{
1e0c0b35 1071 rtx fp, lab, stack;
1072 rtx_insn *insn, *last;
3754d046 1073 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 1074
48e1416a 1075 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 1076 function */
1077 if (SUPPORTS_STACK_ALIGNMENT)
1078 crtl->need_drap = true;
1079
f7c44134 1080 if (setjmp_alias_set == -1)
1081 setjmp_alias_set = new_alias_set ();
1082
85d654dd 1083 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 1084
53800dbe 1085 buf_addr = force_reg (Pmode, buf_addr);
1086
82c7907c 1087 /* We require that the user must pass a second argument of 1, because
1088 that is what builtin_setjmp will return. */
64db345d 1089 gcc_assert (value == const1_rtx);
53800dbe 1090
4712c7d6 1091 last = get_last_insn ();
a3c81e61 1092 if (targetm.have_builtin_longjmp ())
1093 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
53800dbe 1094 else
53800dbe 1095 {
1096 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 1097 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 1098 GET_MODE_SIZE (Pmode)));
1099
29c05e22 1100 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 1101 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 1102 set_mem_alias_set (fp, setjmp_alias_set);
1103 set_mem_alias_set (lab, setjmp_alias_set);
1104 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 1105
1106 /* Pick up FP, label, and SP from the block and jump. This code is
1107 from expand_goto in stmt.c; see there for detailed comments. */
a3c81e61 1108 if (targetm.have_nonlocal_goto ())
53800dbe 1109 /* We have to pass a value to the nonlocal_goto pattern that will
1110 get copied into the static_chain pointer, but it does not matter
1111 what that value is, because builtin_setjmp does not use it. */
a3c81e61 1112 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 1113 else
53800dbe 1114 {
18b42941 1115 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1116 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1117
5b5aa173 1118 lab = copy_to_reg (lab);
1119
5f337044 1120 /* Restore the frame pointer and stack pointer. We must use a
1121 temporary since the setjmp buffer may be a local. */
1122 fp = copy_to_reg (fp);
e9c97615 1123 emit_stack_restore (SAVE_NONLOCAL, stack);
5b5aa173 1124
1125 /* Ensure the frame pointer move is not optimized. */
1126 emit_insn (gen_blockage ());
1127 emit_clobber (hard_frame_pointer_rtx);
1128 emit_clobber (frame_pointer_rtx);
5f337044 1129 emit_move_insn (hard_frame_pointer_rtx, fp);
53800dbe 1130
18b42941 1131 emit_use (hard_frame_pointer_rtx);
1132 emit_use (stack_pointer_rtx);
53800dbe 1133 emit_indirect_jump (lab);
1134 }
1135 }
615166bb 1136
1137 /* Search backwards and mark the jump insn as a non-local goto.
1138 Note that this precludes the use of __builtin_longjmp to a
1139 __builtin_setjmp target in the same function. However, we've
1140 already cautioned the user that these functions are for
1141 internal exception handling use only. */
449c0509 1142 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1143 {
64db345d 1144 gcc_assert (insn != last);
7d3f6cc7 1145
6d7dc5b9 1146 if (JUMP_P (insn))
449c0509 1147 {
a1ddb869 1148 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1149 break;
1150 }
6d7dc5b9 1151 else if (CALL_P (insn))
9342ee68 1152 break;
449c0509 1153 }
53800dbe 1154}
1155
0e80b01d 1156static inline bool
1157more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1158{
1159 return (iter->i < iter->n);
1160}
1161
1162/* This function validates the types of a function call argument list
1163 against a specified list of tree_codes. If the last specifier is a 0,
5cfa3fc8 1164 that represents an ellipsis, otherwise the last specifier must be a
0e80b01d 1165 VOID_TYPE. */
1166
1167static bool
1168validate_arglist (const_tree callexpr, ...)
1169{
1170 enum tree_code code;
1171 bool res = 0;
1172 va_list ap;
1173 const_call_expr_arg_iterator iter;
1174 const_tree arg;
1175
1176 va_start (ap, callexpr);
1177 init_const_call_expr_arg_iterator (callexpr, &iter);
1178
5cfa3fc8 1179 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
184fac50 1180 tree fn = CALL_EXPR_FN (callexpr);
1181 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
5cfa3fc8 1182
1183 for (unsigned argno = 1; ; ++argno)
0e80b01d 1184 {
1185 code = (enum tree_code) va_arg (ap, int);
5cfa3fc8 1186
0e80b01d 1187 switch (code)
1188 {
1189 case 0:
1190 /* This signifies an ellipses, any further arguments are all ok. */
1191 res = true;
1192 goto end;
1193 case VOID_TYPE:
1194 /* This signifies an endlink, if no arguments remain, return
1195 true, otherwise return false. */
1196 res = !more_const_call_expr_args_p (&iter);
1197 goto end;
5cfa3fc8 1198 case POINTER_TYPE:
1199 /* The actual argument must be nonnull when either the whole
1200 called function has been declared nonnull, or when the formal
1201 argument corresponding to the actual argument has been. */
184fac50 1202 if (argmap
1203 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1204 {
1205 arg = next_const_call_expr_arg (&iter);
1206 if (!validate_arg (arg, code) || integer_zerop (arg))
1207 goto end;
1208 break;
1209 }
5cfa3fc8 1210 /* FALLTHRU */
0e80b01d 1211 default:
1212 /* If no parameters remain or the parameter's code does not
1213 match the specified code, return false. Otherwise continue
1214 checking any remaining arguments. */
1215 arg = next_const_call_expr_arg (&iter);
184fac50 1216 if (!validate_arg (arg, code))
0e80b01d 1217 goto end;
1218 break;
1219 }
1220 }
0e80b01d 1221
1222 /* We need gotos here since we can only have one VA_CLOSE in a
1223 function. */
1224 end: ;
1225 va_end (ap);
1226
5cfa3fc8 1227 BITMAP_FREE (argmap);
1228
0e80b01d 1229 return res;
1230}
1231
4ee9c684 1232/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1233 and the address of the save area. */
1234
1235static rtx
c2f47e15 1236expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1237{
1238 tree t_label, t_save_area;
1e0c0b35 1239 rtx r_label, r_save_area, r_fp, r_sp;
1240 rtx_insn *insn;
4ee9c684 1241
c2f47e15 1242 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1243 return NULL_RTX;
1244
c2f47e15 1245 t_label = CALL_EXPR_ARG (exp, 0);
1246 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1247
8ec3c5c2 1248 r_label = expand_normal (t_label);
3dce56cc 1249 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1250 r_save_area = expand_normal (t_save_area);
3dce56cc 1251 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1252 /* Copy the address of the save location to a register just in case it was
1253 based on the frame pointer. */
51adbc8a 1254 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1255 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1256 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1257 plus_constant (Pmode, r_save_area,
1258 GET_MODE_SIZE (Pmode)));
4ee9c684 1259
18d50ae6 1260 crtl->has_nonlocal_goto = 1;
4ee9c684 1261
4ee9c684 1262 /* ??? We no longer need to pass the static chain value, afaik. */
a3c81e61 1263 if (targetm.have_nonlocal_goto ())
1264 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
4ee9c684 1265 else
4ee9c684 1266 {
18b42941 1267 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1268 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1269
5b5aa173 1270 r_label = copy_to_reg (r_label);
1271
5f337044 1272 /* Restore the frame pointer and stack pointer. We must use a
1273 temporary since the setjmp buffer may be a local. */
1274 r_fp = copy_to_reg (r_fp);
e9c97615 1275 emit_stack_restore (SAVE_NONLOCAL, r_sp);
5b5aa173 1276
1277 /* Ensure the frame pointer move is not optimized. */
1278 emit_insn (gen_blockage ());
1279 emit_clobber (hard_frame_pointer_rtx);
1280 emit_clobber (frame_pointer_rtx);
5f337044 1281 emit_move_insn (hard_frame_pointer_rtx, r_fp);
491e04ef 1282
4ee9c684 1283 /* USE of hard_frame_pointer_rtx added for consistency;
1284 not clear if really needed. */
18b42941 1285 emit_use (hard_frame_pointer_rtx);
1286 emit_use (stack_pointer_rtx);
ad0d0af8 1287
1288 /* If the architecture is using a GP register, we must
1289 conservatively assume that the target function makes use of it.
1290 The prologue of functions with nonlocal gotos must therefore
1291 initialize the GP register to the appropriate value, and we
1292 must then make sure that this value is live at the point
1293 of the jump. (Note that this doesn't necessarily apply
1294 to targets with a nonlocal_goto pattern; they are free
1295 to implement it in their own way. Note also that this is
1296 a no-op if the GP register is a global invariant.) */
1e826931 1297 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1298 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
18b42941 1299 emit_use (pic_offset_table_rtx);
ad0d0af8 1300
4ee9c684 1301 emit_indirect_jump (r_label);
1302 }
491e04ef 1303
4ee9c684 1304 /* Search backwards to the jump insn and mark it as a
1305 non-local goto. */
1306 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1307 {
6d7dc5b9 1308 if (JUMP_P (insn))
4ee9c684 1309 {
a1ddb869 1310 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1311 break;
1312 }
6d7dc5b9 1313 else if (CALL_P (insn))
4ee9c684 1314 break;
1315 }
1316
1317 return const0_rtx;
1318}
1319
843d08a9 1320/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1321 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1322 It updates the stack pointer in that block to the current value. This is
1323 also called directly by the SJLJ exception handling code. */
843d08a9 1324
97354ae4 1325void
843d08a9 1326expand_builtin_update_setjmp_buf (rtx buf_addr)
1327{
3754d046 1328 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
77e843a0 1329 buf_addr = convert_memory_address (Pmode, buf_addr);
d1ff492e 1330 rtx stack_save
843d08a9 1331 = gen_rtx_MEM (sa_mode,
1332 memory_address
1333 (sa_mode,
29c05e22 1334 plus_constant (Pmode, buf_addr,
1335 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1336
e9c97615 1337 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1338}
1339
5e3608d8 1340/* Expand a call to __builtin_prefetch. For a target that does not support
1341 data prefetch, evaluate the memory address argument in case it has side
1342 effects. */
1343
1344static void
c2f47e15 1345expand_builtin_prefetch (tree exp)
5e3608d8 1346{
1347 tree arg0, arg1, arg2;
c2f47e15 1348 int nargs;
5e3608d8 1349 rtx op0, op1, op2;
1350
c2f47e15 1351 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1352 return;
1353
c2f47e15 1354 arg0 = CALL_EXPR_ARG (exp, 0);
1355
26a5cadb 1356 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1357 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1358 locality). */
c2f47e15 1359 nargs = call_expr_nargs (exp);
1360 if (nargs > 1)
1361 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1362 else
c2f47e15 1363 arg1 = integer_zero_node;
1364 if (nargs > 2)
1365 arg2 = CALL_EXPR_ARG (exp, 2);
1366 else
2512209b 1367 arg2 = integer_three_node;
5e3608d8 1368
1369 /* Argument 0 is an address. */
1370 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1371
1372 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1373 if (TREE_CODE (arg1) != INTEGER_CST)
1374 {
07e3a3d2 1375 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1376 arg1 = integer_zero_node;
5e3608d8 1377 }
8ec3c5c2 1378 op1 = expand_normal (arg1);
5e3608d8 1379 /* Argument 1 must be either zero or one. */
1380 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1381 {
c3ceba8e 1382 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1383 " using zero");
5e3608d8 1384 op1 = const0_rtx;
1385 }
1386
1387 /* Argument 2 (locality) must be a compile-time constant int. */
1388 if (TREE_CODE (arg2) != INTEGER_CST)
1389 {
07e3a3d2 1390 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1391 arg2 = integer_zero_node;
1392 }
8ec3c5c2 1393 op2 = expand_normal (arg2);
5e3608d8 1394 /* Argument 2 must be 0, 1, 2, or 3. */
1395 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1396 {
c3ceba8e 1397 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1398 op2 = const0_rtx;
1399 }
1400
1d375a79 1401 if (targetm.have_prefetch ())
5e3608d8 1402 {
2e966e2a 1403 class expand_operand ops[3];
8786db1e 1404
1405 create_address_operand (&ops[0], op0);
1406 create_integer_operand (&ops[1], INTVAL (op1));
1407 create_integer_operand (&ops[2], INTVAL (op2));
1d375a79 1408 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
8786db1e 1409 return;
5e3608d8 1410 }
0a534ba7 1411
f0ce3b1f 1412 /* Don't do anything with direct references to volatile memory, but
1413 generate code to handle other side effects. */
e16ceb8e 1414 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1415 emit_insn (op0);
5e3608d8 1416}
1417
f7c44134 1418/* Get a MEM rtx for expression EXP which is the address of an operand
f135a8d4 1419 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
d8ae1baa 1420 the maximum length of the block of memory that might be accessed or
1421 NULL if unknown. */
f7c44134 1422
53800dbe 1423static rtx
d8ae1baa 1424get_memory_rtx (tree exp, tree len)
53800dbe 1425{
ad0a178f 1426 tree orig_exp = exp;
1427 rtx addr, mem;
ad0a178f 1428
1429 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1430 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1431 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1432 exp = TREE_OPERAND (exp, 0);
1433
1434 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1435 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1436
f7c44134 1437 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1438 First remove any nops. */
72dd6141 1439 while (CONVERT_EXPR_P (exp)
f7c44134 1440 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1441 exp = TREE_OPERAND (exp, 0);
1442
5dd3f78f 1443 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1444 (as builtin stringops may alias with anything). */
1445 exp = fold_build2 (MEM_REF,
1446 build_array_type (char_type_node,
1447 build_range_type (sizetype,
1448 size_one_node, len)),
1449 exp, build_int_cst (ptr_type_node, 0));
1450
1451 /* If the MEM_REF has no acceptable address, try to get the base object
1452 from the original address we got, and build an all-aliasing
1453 unknown-sized access to that one. */
1454 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1455 set_mem_attributes (mem, exp, 0);
1456 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1457 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1458 0))))
eec8e941 1459 {
5dd3f78f 1460 exp = build_fold_addr_expr (exp);
1461 exp = fold_build2 (MEM_REF,
1462 build_array_type (char_type_node,
1463 build_range_type (sizetype,
1464 size_zero_node,
1465 NULL)),
1466 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1467 set_mem_attributes (mem, exp, 0);
eec8e941 1468 }
5dd3f78f 1469 set_mem_alias_set (mem, 0);
53800dbe 1470 return mem;
1471}
1472\f
1473/* Built-in functions to perform an untyped call and return. */
1474
3b9c3a16 1475#define apply_args_mode \
1476 (this_target_builtins->x_apply_args_mode)
1477#define apply_result_mode \
1478 (this_target_builtins->x_apply_result_mode)
53800dbe 1479
53800dbe 1480/* Return the size required for the block returned by __builtin_apply_args,
1481 and initialize apply_args_mode. */
1482
1483static int
aecda0d6 1484apply_args_size (void)
53800dbe 1485{
1486 static int size = -1;
58e9ce8f 1487 int align;
1488 unsigned int regno;
53800dbe 1489
1490 /* The values computed by this function never change. */
1491 if (size < 0)
1492 {
1493 /* The first value is the incoming arg-pointer. */
1494 size = GET_MODE_SIZE (Pmode);
1495
1496 /* The second value is the structure value address unless this is
1497 passed as an "invisible" first argument. */
6812c89e 1498 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1499 size += GET_MODE_SIZE (Pmode);
1500
1501 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1502 if (FUNCTION_ARG_REGNO_P (regno))
1503 {
d8ba6ec1 1504 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1505
64db345d 1506 gcc_assert (mode != VOIDmode);
53800dbe 1507
1508 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1509 if (size % align != 0)
1510 size = CEIL (size, align) * align;
53800dbe 1511 size += GET_MODE_SIZE (mode);
1512 apply_args_mode[regno] = mode;
1513 }
1514 else
1515 {
d8ba6ec1 1516 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
53800dbe 1517 }
1518 }
1519 return size;
1520}
1521
1522/* Return the size required for the block returned by __builtin_apply,
1523 and initialize apply_result_mode. */
1524
1525static int
aecda0d6 1526apply_result_size (void)
53800dbe 1527{
1528 static int size = -1;
1529 int align, regno;
53800dbe 1530
1531 /* The values computed by this function never change. */
1532 if (size < 0)
1533 {
1534 size = 0;
1535
1536 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1537 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1538 {
d8ba6ec1 1539 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1540
64db345d 1541 gcc_assert (mode != VOIDmode);
53800dbe 1542
1543 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1544 if (size % align != 0)
1545 size = CEIL (size, align) * align;
1546 size += GET_MODE_SIZE (mode);
1547 apply_result_mode[regno] = mode;
1548 }
1549 else
d8ba6ec1 1550 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
53800dbe 1551
1552 /* Allow targets that use untyped_call and untyped_return to override
1553 the size so that machine-specific information can be stored here. */
1554#ifdef APPLY_RESULT_SIZE
1555 size = APPLY_RESULT_SIZE;
1556#endif
1557 }
1558 return size;
1559}
1560
53800dbe 1561/* Create a vector describing the result block RESULT. If SAVEP is true,
1562 the result block is used to save the values; otherwise it is used to
1563 restore the values. */
1564
1565static rtx
aecda0d6 1566result_vector (int savep, rtx result)
53800dbe 1567{
1568 int regno, size, align, nelts;
d8ba6ec1 1569 fixed_size_mode mode;
53800dbe 1570 rtx reg, mem;
364c0c59 1571 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1572
53800dbe 1573 size = nelts = 0;
1574 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1575 if ((mode = apply_result_mode[regno]) != VOIDmode)
1576 {
1577 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1578 if (size % align != 0)
1579 size = CEIL (size, align) * align;
1580 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1581 mem = adjust_address (result, mode, size);
53800dbe 1582 savevec[nelts++] = (savep
d1f9b275 1583 ? gen_rtx_SET (mem, reg)
1584 : gen_rtx_SET (reg, mem));
53800dbe 1585 size += GET_MODE_SIZE (mode);
1586 }
1587 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1588}
53800dbe 1589
1590/* Save the state required to perform an untyped call with the same
1591 arguments as were passed to the current function. */
1592
1593static rtx
aecda0d6 1594expand_builtin_apply_args_1 (void)
53800dbe 1595{
1c7e61a7 1596 rtx registers, tem;
53800dbe 1597 int size, align, regno;
d8ba6ec1 1598 fixed_size_mode mode;
6812c89e 1599 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1600
1601 /* Create a block where the arg-pointer, structure value address,
1602 and argument registers can be saved. */
1603 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1604
1605 /* Walk past the arg-pointer and structure value address. */
1606 size = GET_MODE_SIZE (Pmode);
6812c89e 1607 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1608 size += GET_MODE_SIZE (Pmode);
1609
1610 /* Save each register used in calling a function to the block. */
1611 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1612 if ((mode = apply_args_mode[regno]) != VOIDmode)
1613 {
53800dbe 1614 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1615 if (size % align != 0)
1616 size = CEIL (size, align) * align;
1617
1618 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1619
e513d163 1620 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1621 size += GET_MODE_SIZE (mode);
1622 }
1623
1624 /* Save the arg pointer to the block. */
27a7a23a 1625 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1626 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1627 as we might have pretended they were passed. Make sure it's a valid
1628 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1629 if (STACK_GROWS_DOWNWARD)
1630 tem
1631 = force_operand (plus_constant (Pmode, tem,
1632 crtl->args.pretend_args_size),
1633 NULL_RTX);
1c7e61a7 1634 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1635
53800dbe 1636 size = GET_MODE_SIZE (Pmode);
1637
1638 /* Save the structure value address unless this is passed as an
1639 "invisible" first argument. */
45550790 1640 if (struct_incoming_value)
22eb1ed5 1641 emit_move_insn (adjust_address (registers, Pmode, size),
1642 copy_to_reg (struct_incoming_value));
53800dbe 1643
1644 /* Return the address of the block. */
1645 return copy_addr_to_reg (XEXP (registers, 0));
1646}
1647
1648/* __builtin_apply_args returns block of memory allocated on
1649 the stack into which is stored the arg pointer, structure
1650 value address, static chain, and all the registers that might
1651 possibly be used in performing a function call. The code is
1652 moved to the start of the function so the incoming values are
1653 saved. */
27d0c333 1654
53800dbe 1655static rtx
aecda0d6 1656expand_builtin_apply_args (void)
53800dbe 1657{
1658 /* Don't do __builtin_apply_args more than once in a function.
1659 Save the result of the first call and reuse it. */
1660 if (apply_args_value != 0)
1661 return apply_args_value;
1662 {
1663 /* When this function is called, it means that registers must be
1664 saved on entry to this function. So we migrate the
1665 call to the first insn of this function. */
1666 rtx temp;
53800dbe 1667
1668 start_sequence ();
1669 temp = expand_builtin_apply_args_1 ();
9ed997be 1670 rtx_insn *seq = get_insns ();
53800dbe 1671 end_sequence ();
1672
1673 apply_args_value = temp;
1674
31d3e01c 1675 /* Put the insns after the NOTE that starts the function.
1676 If this is inside a start_sequence, make the outer-level insn
53800dbe 1677 chain current, so the code is placed at the start of the
0ef1a651 1678 function. If internal_arg_pointer is a non-virtual pseudo,
1679 it needs to be placed after the function that initializes
1680 that pseudo. */
53800dbe 1681 push_topmost_sequence ();
0ef1a651 1682 if (REG_P (crtl->args.internal_arg_pointer)
1683 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1684 emit_insn_before (seq, parm_birth_insn);
1685 else
1686 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1687 pop_topmost_sequence ();
1688 return temp;
1689 }
1690}
1691
1692/* Perform an untyped call and save the state required to perform an
1693 untyped return of whatever value was returned by the given function. */
1694
1695static rtx
aecda0d6 1696expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1697{
1698 int size, align, regno;
d8ba6ec1 1699 fixed_size_mode mode;
1e0c0b35 1700 rtx incoming_args, result, reg, dest, src;
1701 rtx_call_insn *call_insn;
53800dbe 1702 rtx old_stack_level = 0;
1703 rtx call_fusage = 0;
6812c89e 1704 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1705
85d654dd 1706 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1707
53800dbe 1708 /* Create a block where the return registers can be saved. */
1709 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1710
53800dbe 1711 /* Fetch the arg pointer from the ARGUMENTS block. */
1712 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1713 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1714 if (!STACK_GROWS_DOWNWARD)
1715 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1716 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1717
04a46d40 1718 /* Push a new argument block and copy the arguments. Do not allow
1719 the (potential) memcpy call below to interfere with our stack
1720 manipulations. */
53800dbe 1721 do_pending_stack_adjust ();
04a46d40 1722 NO_DEFER_POP;
53800dbe 1723
2358393e 1724 /* Save the stack with nonlocal if available. */
71512c05 1725 if (targetm.have_save_stack_nonlocal ())
e9c97615 1726 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1727 else
e9c97615 1728 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1729
59647703 1730 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1731 arguments to the outgoing arguments address. We can pass TRUE
1732 as the 4th argument because we just saved the stack pointer
1733 and will restore it right after the call. */
2b34677f 1734 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
27a7a23a 1735
1736 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1737 may have already set current_function_calls_alloca to true.
1738 current_function_calls_alloca won't be set if argsize is zero,
1739 so we have to guarantee need_drap is true here. */
1740 if (SUPPORTS_STACK_ALIGNMENT)
1741 crtl->need_drap = true;
1742
59647703 1743 dest = virtual_outgoing_args_rtx;
3764c94e 1744 if (!STACK_GROWS_DOWNWARD)
1745 {
1746 if (CONST_INT_P (argsize))
1747 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1748 else
1749 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1750 }
2a631e19 1751 dest = gen_rtx_MEM (BLKmode, dest);
1752 set_mem_align (dest, PARM_BOUNDARY);
1753 src = gen_rtx_MEM (BLKmode, incoming_args);
1754 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1755 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1756
1757 /* Refer to the argument block. */
1758 apply_args_size ();
1759 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1760 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1761
1762 /* Walk past the arg-pointer and structure value address. */
1763 size = GET_MODE_SIZE (Pmode);
45550790 1764 if (struct_value)
53800dbe 1765 size += GET_MODE_SIZE (Pmode);
1766
1767 /* Restore each of the registers previously saved. Make USE insns
1768 for each of these registers for use in making the call. */
1769 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1770 if ((mode = apply_args_mode[regno]) != VOIDmode)
1771 {
1772 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1773 if (size % align != 0)
1774 size = CEIL (size, align) * align;
1775 reg = gen_rtx_REG (mode, regno);
e513d163 1776 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1777 use_reg (&call_fusage, reg);
1778 size += GET_MODE_SIZE (mode);
1779 }
1780
1781 /* Restore the structure value address unless this is passed as an
1782 "invisible" first argument. */
1783 size = GET_MODE_SIZE (Pmode);
45550790 1784 if (struct_value)
53800dbe 1785 {
1786 rtx value = gen_reg_rtx (Pmode);
e513d163 1787 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1788 emit_move_insn (struct_value, value);
8ad4c111 1789 if (REG_P (struct_value))
45550790 1790 use_reg (&call_fusage, struct_value);
53800dbe 1791 }
1792
1793 /* All arguments and registers used for the call are set up by now! */
82c7907c 1794 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1795
1796 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1797 and we don't want to load it into a register as an optimization,
1798 because prepare_call_address already did it if it should be done. */
1799 if (GET_CODE (function) != SYMBOL_REF)
1800 function = memory_address (FUNCTION_MODE, function);
1801
1802 /* Generate the actual call instruction and save the return value. */
1d99ab0a 1803 if (targetm.have_untyped_call ())
1804 {
1805 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1806 emit_call_insn (targetm.gen_untyped_call (mem, result,
1807 result_vector (1, result)));
1808 }
7f265a08 1809 else if (targetm.have_call_value ())
53800dbe 1810 {
1811 rtx valreg = 0;
1812
1813 /* Locate the unique return register. It is not possible to
1814 express a call that sets more than one return register using
1815 call_value; use untyped_call for that. In fact, untyped_call
1816 only needs to save the return registers in the given block. */
1817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1818 if ((mode = apply_result_mode[regno]) != VOIDmode)
1819 {
7f265a08 1820 gcc_assert (!valreg); /* have_untyped_call required. */
7d3f6cc7 1821
53800dbe 1822 valreg = gen_rtx_REG (mode, regno);
1823 }
1824
7f265a08 1825 emit_insn (targetm.gen_call_value (valreg,
1826 gen_rtx_MEM (FUNCTION_MODE, function),
1827 const0_rtx, NULL_RTX, const0_rtx));
53800dbe 1828
e513d163 1829 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1830 }
1831 else
64db345d 1832 gcc_unreachable ();
53800dbe 1833
d5f9786f 1834 /* Find the CALL insn we just emitted, and attach the register usage
1835 information. */
1836 call_insn = last_call_insn ();
1837 add_function_usage_to (call_insn, call_fusage);
53800dbe 1838
1839 /* Restore the stack. */
71512c05 1840 if (targetm.have_save_stack_nonlocal ())
e9c97615 1841 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1842 else
e9c97615 1843 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1844 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1845
04a46d40 1846 OK_DEFER_POP;
1847
53800dbe 1848 /* Return the address of the result block. */
85d654dd 1849 result = copy_addr_to_reg (XEXP (result, 0));
1850 return convert_memory_address (ptr_mode, result);
53800dbe 1851}
1852
1853/* Perform an untyped return. */
1854
1855static void
aecda0d6 1856expand_builtin_return (rtx result)
53800dbe 1857{
1858 int size, align, regno;
d8ba6ec1 1859 fixed_size_mode mode;
53800dbe 1860 rtx reg;
57c26b3a 1861 rtx_insn *call_fusage = 0;
53800dbe 1862
85d654dd 1863 result = convert_memory_address (Pmode, result);
726ec87c 1864
53800dbe 1865 apply_result_size ();
1866 result = gen_rtx_MEM (BLKmode, result);
1867
1d99ab0a 1868 if (targetm.have_untyped_return ())
53800dbe 1869 {
1d99ab0a 1870 rtx vector = result_vector (0, result);
1871 emit_jump_insn (targetm.gen_untyped_return (result, vector));
53800dbe 1872 emit_barrier ();
1873 return;
1874 }
53800dbe 1875
1876 /* Restore the return value and note that each value is used. */
1877 size = 0;
1878 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1879 if ((mode = apply_result_mode[regno]) != VOIDmode)
1880 {
1881 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1882 if (size % align != 0)
1883 size = CEIL (size, align) * align;
1884 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1885 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1886
1887 push_to_sequence (call_fusage);
18b42941 1888 emit_use (reg);
53800dbe 1889 call_fusage = get_insns ();
1890 end_sequence ();
1891 size += GET_MODE_SIZE (mode);
1892 }
1893
1894 /* Put the USE insns before the return. */
31d3e01c 1895 emit_insn (call_fusage);
53800dbe 1896
1897 /* Return whatever values was restored by jumping directly to the end
1898 of the function. */
62380d2d 1899 expand_naked_return ();
53800dbe 1900}
1901
539a3a92 1902/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1903
539a3a92 1904static enum type_class
aecda0d6 1905type_to_class (tree type)
539a3a92 1906{
1907 switch (TREE_CODE (type))
1908 {
1909 case VOID_TYPE: return void_type_class;
1910 case INTEGER_TYPE: return integer_type_class;
539a3a92 1911 case ENUMERAL_TYPE: return enumeral_type_class;
1912 case BOOLEAN_TYPE: return boolean_type_class;
1913 case POINTER_TYPE: return pointer_type_class;
1914 case REFERENCE_TYPE: return reference_type_class;
1915 case OFFSET_TYPE: return offset_type_class;
1916 case REAL_TYPE: return real_type_class;
1917 case COMPLEX_TYPE: return complex_type_class;
1918 case FUNCTION_TYPE: return function_type_class;
1919 case METHOD_TYPE: return method_type_class;
1920 case RECORD_TYPE: return record_type_class;
1921 case UNION_TYPE:
1922 case QUAL_UNION_TYPE: return union_type_class;
1923 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1924 ? string_type_class : array_type_class);
539a3a92 1925 case LANG_TYPE: return lang_type_class;
1926 default: return no_type_class;
1927 }
1928}
bf8e3599 1929
c2f47e15 1930/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1931
53800dbe 1932static rtx
c2f47e15 1933expand_builtin_classify_type (tree exp)
53800dbe 1934{
c2f47e15 1935 if (call_expr_nargs (exp))
1936 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1937 return GEN_INT (no_type_class);
1938}
1939
8c32188e 1940/* This helper macro, meant to be used in mathfn_built_in below, determines
1941 which among a set of builtin math functions is appropriate for a given type
1942 mode. The `F' (float) and `L' (long double) are automatically generated
1943 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1944 types, there are additional types that are considered with 'F32', 'F64',
1945 'F128', etc. suffixes. */
e3240774 1946#define CASE_MATHFN(MATHFN) \
1947 CASE_CFN_##MATHFN: \
1948 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1949 fcodel = BUILT_IN_##MATHFN##L ; break;
8c32188e 1950/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1951 types. */
1952#define CASE_MATHFN_FLOATN(MATHFN) \
1953 CASE_CFN_##MATHFN: \
1954 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1955 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1956 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1957 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1958 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1959 break;
cd2656b0 1960/* Similar to above, but appends _R after any F/L suffix. */
e3240774 1961#define CASE_MATHFN_REENT(MATHFN) \
1962 case CFN_BUILT_IN_##MATHFN##_R: \
1963 case CFN_BUILT_IN_##MATHFN##F_R: \
1964 case CFN_BUILT_IN_##MATHFN##L_R: \
1965 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1966 fcodel = BUILT_IN_##MATHFN##L_R ; break;
07976da7 1967
6c21be92 1968/* Return a function equivalent to FN but operating on floating-point
1969 values of type TYPE, or END_BUILTINS if no such function exists.
e3240774 1970 This is purely an operation on function codes; it does not guarantee
1971 that the target actually has an implementation of the function. */
c319d56a 1972
6c21be92 1973static built_in_function
e3240774 1974mathfn_built_in_2 (tree type, combined_fn fn)
0a68165a 1975{
8c32188e 1976 tree mtype;
6c21be92 1977 built_in_function fcode, fcodef, fcodel;
8c32188e 1978 built_in_function fcodef16 = END_BUILTINS;
1979 built_in_function fcodef32 = END_BUILTINS;
1980 built_in_function fcodef64 = END_BUILTINS;
1981 built_in_function fcodef128 = END_BUILTINS;
1982 built_in_function fcodef32x = END_BUILTINS;
1983 built_in_function fcodef64x = END_BUILTINS;
1984 built_in_function fcodef128x = END_BUILTINS;
07976da7 1985
1986 switch (fn)
1987 {
e3240774 1988 CASE_MATHFN (ACOS)
1989 CASE_MATHFN (ACOSH)
1990 CASE_MATHFN (ASIN)
1991 CASE_MATHFN (ASINH)
1992 CASE_MATHFN (ATAN)
1993 CASE_MATHFN (ATAN2)
1994 CASE_MATHFN (ATANH)
1995 CASE_MATHFN (CBRT)
054e9558 1996 CASE_MATHFN_FLOATN (CEIL)
e3240774 1997 CASE_MATHFN (CEXPI)
8c32188e 1998 CASE_MATHFN_FLOATN (COPYSIGN)
e3240774 1999 CASE_MATHFN (COS)
2000 CASE_MATHFN (COSH)
2001 CASE_MATHFN (DREM)
2002 CASE_MATHFN (ERF)
2003 CASE_MATHFN (ERFC)
2004 CASE_MATHFN (EXP)
2005 CASE_MATHFN (EXP10)
2006 CASE_MATHFN (EXP2)
2007 CASE_MATHFN (EXPM1)
2008 CASE_MATHFN (FABS)
2009 CASE_MATHFN (FDIM)
054e9558 2010 CASE_MATHFN_FLOATN (FLOOR)
8c32188e 2011 CASE_MATHFN_FLOATN (FMA)
2012 CASE_MATHFN_FLOATN (FMAX)
2013 CASE_MATHFN_FLOATN (FMIN)
e3240774 2014 CASE_MATHFN (FMOD)
2015 CASE_MATHFN (FREXP)
2016 CASE_MATHFN (GAMMA)
2017 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2018 CASE_MATHFN (HUGE_VAL)
2019 CASE_MATHFN (HYPOT)
2020 CASE_MATHFN (ILOGB)
2021 CASE_MATHFN (ICEIL)
2022 CASE_MATHFN (IFLOOR)
2023 CASE_MATHFN (INF)
2024 CASE_MATHFN (IRINT)
2025 CASE_MATHFN (IROUND)
2026 CASE_MATHFN (ISINF)
2027 CASE_MATHFN (J0)
2028 CASE_MATHFN (J1)
2029 CASE_MATHFN (JN)
2030 CASE_MATHFN (LCEIL)
2031 CASE_MATHFN (LDEXP)
2032 CASE_MATHFN (LFLOOR)
2033 CASE_MATHFN (LGAMMA)
2034 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2035 CASE_MATHFN (LLCEIL)
2036 CASE_MATHFN (LLFLOOR)
2037 CASE_MATHFN (LLRINT)
2038 CASE_MATHFN (LLROUND)
2039 CASE_MATHFN (LOG)
2040 CASE_MATHFN (LOG10)
2041 CASE_MATHFN (LOG1P)
2042 CASE_MATHFN (LOG2)
2043 CASE_MATHFN (LOGB)
2044 CASE_MATHFN (LRINT)
2045 CASE_MATHFN (LROUND)
2046 CASE_MATHFN (MODF)
2047 CASE_MATHFN (NAN)
2048 CASE_MATHFN (NANS)
054e9558 2049 CASE_MATHFN_FLOATN (NEARBYINT)
e3240774 2050 CASE_MATHFN (NEXTAFTER)
2051 CASE_MATHFN (NEXTTOWARD)
2052 CASE_MATHFN (POW)
2053 CASE_MATHFN (POWI)
2054 CASE_MATHFN (POW10)
2055 CASE_MATHFN (REMAINDER)
2056 CASE_MATHFN (REMQUO)
054e9558 2057 CASE_MATHFN_FLOATN (RINT)
2058 CASE_MATHFN_FLOATN (ROUND)
e3240774 2059 CASE_MATHFN (SCALB)
2060 CASE_MATHFN (SCALBLN)
2061 CASE_MATHFN (SCALBN)
2062 CASE_MATHFN (SIGNBIT)
2063 CASE_MATHFN (SIGNIFICAND)
2064 CASE_MATHFN (SIN)
2065 CASE_MATHFN (SINCOS)
2066 CASE_MATHFN (SINH)
8c32188e 2067 CASE_MATHFN_FLOATN (SQRT)
e3240774 2068 CASE_MATHFN (TAN)
2069 CASE_MATHFN (TANH)
2070 CASE_MATHFN (TGAMMA)
054e9558 2071 CASE_MATHFN_FLOATN (TRUNC)
e3240774 2072 CASE_MATHFN (Y0)
2073 CASE_MATHFN (Y1)
2074 CASE_MATHFN (YN)
07976da7 2075
e3240774 2076 default:
2077 return END_BUILTINS;
2078 }
07976da7 2079
8c32188e 2080 mtype = TYPE_MAIN_VARIANT (type);
2081 if (mtype == double_type_node)
6c21be92 2082 return fcode;
8c32188e 2083 else if (mtype == float_type_node)
6c21be92 2084 return fcodef;
8c32188e 2085 else if (mtype == long_double_type_node)
6c21be92 2086 return fcodel;
8c32188e 2087 else if (mtype == float16_type_node)
2088 return fcodef16;
2089 else if (mtype == float32_type_node)
2090 return fcodef32;
2091 else if (mtype == float64_type_node)
2092 return fcodef64;
2093 else if (mtype == float128_type_node)
2094 return fcodef128;
2095 else if (mtype == float32x_type_node)
2096 return fcodef32x;
2097 else if (mtype == float64x_type_node)
2098 return fcodef64x;
2099 else if (mtype == float128x_type_node)
2100 return fcodef128x;
07976da7 2101 else
6c21be92 2102 return END_BUILTINS;
2103}
2104
2105/* Return mathematic function equivalent to FN but operating directly on TYPE,
2106 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2107 otherwise use the explicit declaration. If we can't do the conversion,
2108 return null. */
2109
2110static tree
e3240774 2111mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
6c21be92 2112{
2113 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2114 if (fcode2 == END_BUILTINS)
c2f47e15 2115 return NULL_TREE;
b9a16870 2116
2117 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2118 return NULL_TREE;
2119
2120 return builtin_decl_explicit (fcode2);
0a68165a 2121}
2122
e3240774 2123/* Like mathfn_built_in_1, but always use the implicit array. */
c319d56a 2124
2125tree
e3240774 2126mathfn_built_in (tree type, combined_fn fn)
c319d56a 2127{
2128 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2129}
2130
e3240774 2131/* Like mathfn_built_in_1, but take a built_in_function and
2132 always use the implicit array. */
2133
2134tree
2135mathfn_built_in (tree type, enum built_in_function fn)
2136{
2137 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2138}
2139
1f24b8e9 2140/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2141 return its code, otherwise return IFN_LAST. Note that this function
2142 only tests whether the function is defined in internals.def, not whether
2143 it is actually available on the target. */
2144
2145internal_fn
2146associated_internal_fn (tree fndecl)
2147{
2148 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2149 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2150 switch (DECL_FUNCTION_CODE (fndecl))
2151 {
2152#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2153 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
8c32188e 2154#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2155 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2156 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
c9452b7c 2157#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2158 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1f24b8e9 2159#include "internal-fn.def"
2160
2161 CASE_FLT_FN (BUILT_IN_POW10):
2162 return IFN_EXP10;
2163
2164 CASE_FLT_FN (BUILT_IN_DREM):
2165 return IFN_REMAINDER;
2166
2167 CASE_FLT_FN (BUILT_IN_SCALBN):
2168 CASE_FLT_FN (BUILT_IN_SCALBLN):
2169 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2170 return IFN_LDEXP;
2171 return IFN_LAST;
2172
2173 default:
2174 return IFN_LAST;
2175 }
2176}
2177
2178/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2179 on the current target by a call to an internal function, return the
2180 code of that internal function, otherwise return IFN_LAST. The caller
2181 is responsible for ensuring that any side-effects of the built-in
2182 call are dealt with correctly. E.g. if CALL sets errno, the caller
2183 must decide that the errno result isn't needed or make it available
2184 in some other way. */
2185
2186internal_fn
2187replacement_internal_fn (gcall *call)
2188{
2189 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2190 {
2191 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2192 if (ifn != IFN_LAST)
2193 {
2194 tree_pair types = direct_internal_fn_types (ifn, call);
acdfe9e0 2195 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2196 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1f24b8e9 2197 return ifn;
2198 }
2199 }
2200 return IFN_LAST;
2201}
2202
7e0713b1 2203/* Expand a call to the builtin trinary math functions (fma).
2204 Return NULL_RTX if a normal call should be emitted rather than expanding the
2205 function in-line. EXP is the expression that is a call to the builtin
2206 function; if convenient, the result should be placed in TARGET.
2207 SUBTARGET may be used as the target for computing one of EXP's
2208 operands. */
2209
2210static rtx
2211expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2212{
2213 optab builtin_optab;
1e0c0b35 2214 rtx op0, op1, op2, result;
2215 rtx_insn *insns;
7e0713b1 2216 tree fndecl = get_callee_fndecl (exp);
2217 tree arg0, arg1, arg2;
3754d046 2218 machine_mode mode;
7e0713b1 2219
2220 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2221 return NULL_RTX;
2222
2223 arg0 = CALL_EXPR_ARG (exp, 0);
2224 arg1 = CALL_EXPR_ARG (exp, 1);
2225 arg2 = CALL_EXPR_ARG (exp, 2);
2226
2227 switch (DECL_FUNCTION_CODE (fndecl))
2228 {
2229 CASE_FLT_FN (BUILT_IN_FMA):
8c32188e 2230 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7e0713b1 2231 builtin_optab = fma_optab; break;
2232 default:
2233 gcc_unreachable ();
2234 }
2235
2236 /* Make a suitable register to place result in. */
2237 mode = TYPE_MODE (TREE_TYPE (exp));
2238
2239 /* Before working hard, check whether the instruction is available. */
2240 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2241 return NULL_RTX;
2242
de2e453e 2243 result = gen_reg_rtx (mode);
7e0713b1 2244
2245 /* Always stabilize the argument list. */
2246 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2247 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2248 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2249
2250 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2251 op1 = expand_normal (arg1);
2252 op2 = expand_normal (arg2);
2253
2254 start_sequence ();
2255
de2e453e 2256 /* Compute into RESULT.
2257 Set RESULT to wherever the result comes back. */
2258 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2259 result, 0);
7e0713b1 2260
2261 /* If we were unable to expand via the builtin, stop the sequence
2262 (without outputting the insns) and call to the library function
2263 with the stabilized argument list. */
de2e453e 2264 if (result == 0)
7e0713b1 2265 {
2266 end_sequence ();
2267 return expand_call (exp, target, target == const0_rtx);
2268 }
2269
2270 /* Output the entire sequence. */
2271 insns = get_insns ();
2272 end_sequence ();
2273 emit_insn (insns);
2274
de2e453e 2275 return result;
7e0713b1 2276}
2277
6b43bae4 2278/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2279 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2280 function in-line. EXP is the expression that is a call to the builtin
2281 function; if convenient, the result should be placed in TARGET.
2282 SUBTARGET may be used as the target for computing one of EXP's
2283 operands. */
2284
2285static rtx
2286expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2287{
2288 optab builtin_optab;
1e0c0b35 2289 rtx op0;
2290 rtx_insn *insns;
6b43bae4 2291 tree fndecl = get_callee_fndecl (exp);
3754d046 2292 machine_mode mode;
abfea505 2293 tree arg;
6b43bae4 2294
c2f47e15 2295 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2296 return NULL_RTX;
6b43bae4 2297
c2f47e15 2298 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2299
2300 switch (DECL_FUNCTION_CODE (fndecl))
2301 {
4f35b1fc 2302 CASE_FLT_FN (BUILT_IN_SIN):
2303 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2304 builtin_optab = sincos_optab; break;
2305 default:
64db345d 2306 gcc_unreachable ();
6b43bae4 2307 }
2308
2309 /* Make a suitable register to place result in. */
2310 mode = TYPE_MODE (TREE_TYPE (exp));
2311
6b43bae4 2312 /* Check if sincos insn is available, otherwise fallback
0bed3869 2313 to sin or cos insn. */
d6bf3b14 2314 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2315 switch (DECL_FUNCTION_CODE (fndecl))
2316 {
4f35b1fc 2317 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2318 builtin_optab = sin_optab; break;
4f35b1fc 2319 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2320 builtin_optab = cos_optab; break;
2321 default:
64db345d 2322 gcc_unreachable ();
6b43bae4 2323 }
6b43bae4 2324
2325 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2326 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2327 {
de2e453e 2328 rtx result = gen_reg_rtx (mode);
6b43bae4 2329
2330 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2331 need to expand the argument again. This way, we will not perform
2332 side-effects more the once. */
abfea505 2333 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2334
1db6d067 2335 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2336
6b43bae4 2337 start_sequence ();
2338
de2e453e 2339 /* Compute into RESULT.
2340 Set RESULT to wherever the result comes back. */
6b43bae4 2341 if (builtin_optab == sincos_optab)
2342 {
de2e453e 2343 int ok;
7d3f6cc7 2344
6b43bae4 2345 switch (DECL_FUNCTION_CODE (fndecl))
2346 {
4f35b1fc 2347 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2348 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2349 break;
4f35b1fc 2350 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2351 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2352 break;
2353 default:
64db345d 2354 gcc_unreachable ();
6b43bae4 2355 }
de2e453e 2356 gcc_assert (ok);
6b43bae4 2357 }
2358 else
de2e453e 2359 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2360
de2e453e 2361 if (result != 0)
6b43bae4 2362 {
6b43bae4 2363 /* Output the entire sequence. */
2364 insns = get_insns ();
2365 end_sequence ();
2366 emit_insn (insns);
de2e453e 2367 return result;
6b43bae4 2368 }
2369
2370 /* If we were unable to expand via the builtin, stop the sequence
2371 (without outputting the insns) and call to the library function
2372 with the stabilized argument list. */
2373 end_sequence ();
2374 }
2375
de2e453e 2376 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2377}
2378
a65c4d64 2379/* Given an interclass math builtin decl FNDECL and it's argument ARG
2380 return an RTL instruction code that implements the functionality.
2381 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2382
a65c4d64 2383static enum insn_code
2384interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2385{
a65c4d64 2386 bool errno_set = false;
6cdd383a 2387 optab builtin_optab = unknown_optab;
3754d046 2388 machine_mode mode;
a67a90e5 2389
2390 switch (DECL_FUNCTION_CODE (fndecl))
2391 {
2392 CASE_FLT_FN (BUILT_IN_ILOGB):
12f08300 2393 errno_set = true; builtin_optab = ilogb_optab; break;
2394 CASE_FLT_FN (BUILT_IN_ISINF):
2395 builtin_optab = isinf_optab; break;
2396 case BUILT_IN_ISNORMAL:
2397 case BUILT_IN_ISFINITE:
2398 CASE_FLT_FN (BUILT_IN_FINITE):
2399 case BUILT_IN_FINITED32:
2400 case BUILT_IN_FINITED64:
2401 case BUILT_IN_FINITED128:
2402 case BUILT_IN_ISINFD32:
2403 case BUILT_IN_ISINFD64:
2404 case BUILT_IN_ISINFD128:
2405 /* These builtins have no optabs (yet). */
cde061c1 2406 break;
a67a90e5 2407 default:
2408 gcc_unreachable ();
2409 }
2410
2411 /* There's no easy way to detect the case we need to set EDOM. */
2412 if (flag_errno_math && errno_set)
a65c4d64 2413 return CODE_FOR_nothing;
a67a90e5 2414
2415 /* Optab mode depends on the mode of the input argument. */
2416 mode = TYPE_MODE (TREE_TYPE (arg));
2417
cde061c1 2418 if (builtin_optab)
d6bf3b14 2419 return optab_handler (builtin_optab, mode);
a65c4d64 2420 return CODE_FOR_nothing;
2421}
2422
2423/* Expand a call to one of the builtin math functions that operate on
12f08300 2424 floating point argument and output an integer result (ilogb, isinf,
2425 isnan, etc).
a65c4d64 2426 Return 0 if a normal call should be emitted rather than expanding the
2427 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2428 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2429
2430static rtx
f97eea22 2431expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2432{
2433 enum insn_code icode = CODE_FOR_nothing;
2434 rtx op0;
2435 tree fndecl = get_callee_fndecl (exp);
3754d046 2436 machine_mode mode;
a65c4d64 2437 tree arg;
2438
2439 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2440 return NULL_RTX;
2441
2442 arg = CALL_EXPR_ARG (exp, 0);
2443 icode = interclass_mathfn_icode (arg, fndecl);
2444 mode = TYPE_MODE (TREE_TYPE (arg));
2445
a67a90e5 2446 if (icode != CODE_FOR_nothing)
2447 {
2e966e2a 2448 class expand_operand ops[1];
1e0c0b35 2449 rtx_insn *last = get_last_insn ();
4e2a2fb4 2450 tree orig_arg = arg;
a67a90e5 2451
2452 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2453 need to expand the argument again. This way, we will not perform
2454 side-effects more the once. */
abfea505 2455 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2456
f97eea22 2457 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2458
2459 if (mode != GET_MODE (op0))
2460 op0 = convert_to_mode (mode, op0, 0);
2461
8786db1e 2462 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2463 if (maybe_legitimize_operands (icode, 0, 1, ops)
2464 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2465 return ops[0].value;
2466
4e2a2fb4 2467 delete_insns_since (last);
2468 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2469 }
2470
a65c4d64 2471 return NULL_RTX;
a67a90e5 2472}
2473
c3147c1a 2474/* Expand a call to the builtin sincos math function.
c2f47e15 2475 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2476 function in-line. EXP is the expression that is a call to the builtin
2477 function. */
2478
2479static rtx
2480expand_builtin_sincos (tree exp)
2481{
2482 rtx op0, op1, op2, target1, target2;
3754d046 2483 machine_mode mode;
c3147c1a 2484 tree arg, sinp, cosp;
2485 int result;
389dd41b 2486 location_t loc = EXPR_LOCATION (exp);
be5575b2 2487 tree alias_type, alias_off;
c3147c1a 2488
c2f47e15 2489 if (!validate_arglist (exp, REAL_TYPE,
2490 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2491 return NULL_RTX;
c3147c1a 2492
c2f47e15 2493 arg = CALL_EXPR_ARG (exp, 0);
2494 sinp = CALL_EXPR_ARG (exp, 1);
2495 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2496
2497 /* Make a suitable register to place result in. */
2498 mode = TYPE_MODE (TREE_TYPE (arg));
2499
2500 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2501 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2502 return NULL_RTX;
2503
2504 target1 = gen_reg_rtx (mode);
2505 target2 = gen_reg_rtx (mode);
2506
8ec3c5c2 2507 op0 = expand_normal (arg);
be5575b2 2508 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2509 alias_off = build_int_cst (alias_type, 0);
2510 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2511 sinp, alias_off));
2512 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2513 cosp, alias_off));
c3147c1a 2514
2515 /* Compute into target1 and target2.
2516 Set TARGET to wherever the result comes back. */
2517 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2518 gcc_assert (result);
2519
2520 /* Move target1 and target2 to the memory locations indicated
2521 by op1 and op2. */
2522 emit_move_insn (op1, target1);
2523 emit_move_insn (op2, target2);
2524
2525 return const0_rtx;
2526}
2527
d735c391 2528/* Expand a call to the internal cexpi builtin to the sincos math function.
2529 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2530 the result should be placed in TARGET. */
d735c391 2531
2532static rtx
f97eea22 2533expand_builtin_cexpi (tree exp, rtx target)
d735c391 2534{
2535 tree fndecl = get_callee_fndecl (exp);
d735c391 2536 tree arg, type;
3754d046 2537 machine_mode mode;
d735c391 2538 rtx op0, op1, op2;
389dd41b 2539 location_t loc = EXPR_LOCATION (exp);
d735c391 2540
c2f47e15 2541 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2542 return NULL_RTX;
d735c391 2543
c2f47e15 2544 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2545 type = TREE_TYPE (arg);
2546 mode = TYPE_MODE (TREE_TYPE (arg));
2547
2548 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2549 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2550 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2551 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2552 {
2553 op1 = gen_reg_rtx (mode);
2554 op2 = gen_reg_rtx (mode);
2555
f97eea22 2556 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2557
2558 /* Compute into op1 and op2. */
2559 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2560 }
30f690e0 2561 else if (targetm.libc_has_function (function_sincos))
d735c391 2562 {
c2f47e15 2563 tree call, fn = NULL_TREE;
d735c391 2564 tree top1, top2;
2565 rtx op1a, op2a;
2566
2567 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2568 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2569 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2570 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2571 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2572 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2573 else
2574 gcc_unreachable ();
48e1416a 2575
0ab48139 2576 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2577 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2578 op1a = copy_addr_to_reg (XEXP (op1, 0));
2579 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2580 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2581 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2582
d735c391 2583 /* Make sure not to fold the sincos call again. */
2584 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2585 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2586 call, 3, arg, top1, top2));
d735c391 2587 }
18b8d8ae 2588 else
2589 {
0ecbc158 2590 tree call, fn = NULL_TREE, narg;
18b8d8ae 2591 tree ctype = build_complex_type (type);
2592
0ecbc158 2593 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2594 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2595 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2596 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2597 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2598 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2599 else
2600 gcc_unreachable ();
fc0dfa6e 2601
2602 /* If we don't have a decl for cexp create one. This is the
2603 friendliest fallback if the user calls __builtin_cexpi
2604 without full target C99 function support. */
2605 if (fn == NULL_TREE)
2606 {
2607 tree fntype;
2608 const char *name = NULL;
2609
2610 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2611 name = "cexpf";
2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2613 name = "cexp";
2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2615 name = "cexpl";
2616
2617 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2618 fn = build_fn_decl (name, fntype);
2619 }
2620
389dd41b 2621 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2622 build_real (type, dconst0), arg);
2623
2624 /* Make sure not to fold the cexp call again. */
2625 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2626 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2627 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2628 }
d735c391 2629
2630 /* Now build the proper return type. */
2631 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2632 make_tree (TREE_TYPE (arg), op2),
2633 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2634 target, VOIDmode, EXPAND_NORMAL);
d735c391 2635}
2636
a65c4d64 2637/* Conveniently construct a function call expression. FNDECL names the
2638 function to be called, N is the number of arguments, and the "..."
2639 parameters are the argument expressions. Unlike build_call_exr
2640 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2641
2642static tree
2643build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2644{
2645 va_list ap;
2646 tree fntype = TREE_TYPE (fndecl);
2647 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2648
2649 va_start (ap, n);
2650 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2651 va_end (ap);
2652 SET_EXPR_LOCATION (fn, loc);
2653 return fn;
2654}
a65c4d64 2655
7d3afc77 2656/* Expand a call to one of the builtin rounding functions gcc defines
2657 as an extension (lfloor and lceil). As these are gcc extensions we
2658 do not need to worry about setting errno to EDOM.
ad52b9b7 2659 If expanding via optab fails, lower expression to (int)(floor(x)).
2660 EXP is the expression that is a call to the builtin function;
ff1b14e4 2661 if convenient, the result should be placed in TARGET. */
ad52b9b7 2662
2663static rtx
ff1b14e4 2664expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2665{
9c42dd28 2666 convert_optab builtin_optab;
1e0c0b35 2667 rtx op0, tmp;
2668 rtx_insn *insns;
ad52b9b7 2669 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2670 enum built_in_function fallback_fn;
2671 tree fallback_fndecl;
3754d046 2672 machine_mode mode;
4de0924f 2673 tree arg;
ad52b9b7 2674
c2f47e15 2675 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
23e01948 2676 return NULL_RTX;
ad52b9b7 2677
c2f47e15 2678 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2679
2680 switch (DECL_FUNCTION_CODE (fndecl))
2681 {
80ff6494 2682 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2683 CASE_FLT_FN (BUILT_IN_LCEIL):
2684 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2685 builtin_optab = lceil_optab;
2686 fallback_fn = BUILT_IN_CEIL;
2687 break;
2688
80ff6494 2689 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2690 CASE_FLT_FN (BUILT_IN_LFLOOR):
2691 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2692 builtin_optab = lfloor_optab;
2693 fallback_fn = BUILT_IN_FLOOR;
2694 break;
2695
2696 default:
2697 gcc_unreachable ();
2698 }
2699
2700 /* Make a suitable register to place result in. */
2701 mode = TYPE_MODE (TREE_TYPE (exp));
2702
9c42dd28 2703 target = gen_reg_rtx (mode);
ad52b9b7 2704
9c42dd28 2705 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2706 need to expand the argument again. This way, we will not perform
2707 side-effects more the once. */
abfea505 2708 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2709
ff1b14e4 2710 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2711
9c42dd28 2712 start_sequence ();
ad52b9b7 2713
9c42dd28 2714 /* Compute into TARGET. */
2715 if (expand_sfix_optab (target, op0, builtin_optab))
2716 {
2717 /* Output the entire sequence. */
2718 insns = get_insns ();
ad52b9b7 2719 end_sequence ();
9c42dd28 2720 emit_insn (insns);
2721 return target;
ad52b9b7 2722 }
2723
9c42dd28 2724 /* If we were unable to expand via the builtin, stop the sequence
2725 (without outputting the insns). */
2726 end_sequence ();
2727
ad52b9b7 2728 /* Fall back to floating point rounding optab. */
2729 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2730
2731 /* For non-C99 targets we may end up without a fallback fndecl here
2732 if the user called __builtin_lfloor directly. In this case emit
2733 a call to the floor/ceil variants nevertheless. This should result
2734 in the best user experience for not full C99 targets. */
2735 if (fallback_fndecl == NULL_TREE)
2736 {
2737 tree fntype;
2738 const char *name = NULL;
2739
2740 switch (DECL_FUNCTION_CODE (fndecl))
2741 {
80ff6494 2742 case BUILT_IN_ICEIL:
fc0dfa6e 2743 case BUILT_IN_LCEIL:
2744 case BUILT_IN_LLCEIL:
2745 name = "ceil";
2746 break;
80ff6494 2747 case BUILT_IN_ICEILF:
fc0dfa6e 2748 case BUILT_IN_LCEILF:
2749 case BUILT_IN_LLCEILF:
2750 name = "ceilf";
2751 break;
80ff6494 2752 case BUILT_IN_ICEILL:
fc0dfa6e 2753 case BUILT_IN_LCEILL:
2754 case BUILT_IN_LLCEILL:
2755 name = "ceill";
2756 break;
80ff6494 2757 case BUILT_IN_IFLOOR:
fc0dfa6e 2758 case BUILT_IN_LFLOOR:
2759 case BUILT_IN_LLFLOOR:
2760 name = "floor";
2761 break;
80ff6494 2762 case BUILT_IN_IFLOORF:
fc0dfa6e 2763 case BUILT_IN_LFLOORF:
2764 case BUILT_IN_LLFLOORF:
2765 name = "floorf";
2766 break;
80ff6494 2767 case BUILT_IN_IFLOORL:
fc0dfa6e 2768 case BUILT_IN_LFLOORL:
2769 case BUILT_IN_LLFLOORL:
2770 name = "floorl";
2771 break;
2772 default:
2773 gcc_unreachable ();
2774 }
2775
2776 fntype = build_function_type_list (TREE_TYPE (arg),
2777 TREE_TYPE (arg), NULL_TREE);
2778 fallback_fndecl = build_fn_decl (name, fntype);
2779 }
2780
0568e9c1 2781 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2782
d4c690af 2783 tmp = expand_normal (exp);
933eb13a 2784 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2785
2786 /* Truncate the result of floating point optab to integer
2787 via expand_fix (). */
2788 target = gen_reg_rtx (mode);
2789 expand_fix (target, tmp, 0);
2790
2791 return target;
2792}
2793
7d3afc77 2794/* Expand a call to one of the builtin math functions doing integer
2795 conversion (lrint).
2796 Return 0 if a normal call should be emitted rather than expanding the
2797 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2798 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2799
2800static rtx
ff1b14e4 2801expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2802{
5f51ee59 2803 convert_optab builtin_optab;
1e0c0b35 2804 rtx op0;
2805 rtx_insn *insns;
7d3afc77 2806 tree fndecl = get_callee_fndecl (exp);
4de0924f 2807 tree arg;
3754d046 2808 machine_mode mode;
e951f9a4 2809 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2810
c2f47e15 2811 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
23e01948 2812 return NULL_RTX;
48e1416a 2813
c2f47e15 2814 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2815
2816 switch (DECL_FUNCTION_CODE (fndecl))
2817 {
80ff6494 2818 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2819 fallback_fn = BUILT_IN_LRINT;
3c77f69c 2820 gcc_fallthrough ();
7d3afc77 2821 CASE_FLT_FN (BUILT_IN_LRINT):
2822 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2823 builtin_optab = lrint_optab;
2824 break;
80ff6494 2825
2826 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2827 fallback_fn = BUILT_IN_LROUND;
3c77f69c 2828 gcc_fallthrough ();
ef2f1a10 2829 CASE_FLT_FN (BUILT_IN_LROUND):
2830 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2831 builtin_optab = lround_optab;
2832 break;
80ff6494 2833
7d3afc77 2834 default:
2835 gcc_unreachable ();
2836 }
2837
e951f9a4 2838 /* There's no easy way to detect the case we need to set EDOM. */
2839 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2840 return NULL_RTX;
2841
7d3afc77 2842 /* Make a suitable register to place result in. */
2843 mode = TYPE_MODE (TREE_TYPE (exp));
2844
e951f9a4 2845 /* There's no easy way to detect the case we need to set EDOM. */
2846 if (!flag_errno_math)
2847 {
de2e453e 2848 rtx result = gen_reg_rtx (mode);
7d3afc77 2849
e951f9a4 2850 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2851 need to expand the argument again. This way, we will not perform
2852 side-effects more the once. */
2853 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2854
e951f9a4 2855 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2856
e951f9a4 2857 start_sequence ();
7d3afc77 2858
de2e453e 2859 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2860 {
2861 /* Output the entire sequence. */
2862 insns = get_insns ();
2863 end_sequence ();
2864 emit_insn (insns);
de2e453e 2865 return result;
e951f9a4 2866 }
2867
2868 /* If we were unable to expand via the builtin, stop the sequence
2869 (without outputting the insns) and call to the library function
2870 with the stabilized argument list. */
7d3afc77 2871 end_sequence ();
2872 }
2873
e951f9a4 2874 if (fallback_fn != BUILT_IN_NONE)
2875 {
2876 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2877 targets, (int) round (x) should never be transformed into
2878 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2879 a call to lround in the hope that the target provides at least some
2880 C99 functions. This should result in the best user experience for
2881 not full C99 targets. */
e3240774 2882 tree fallback_fndecl = mathfn_built_in_1
2883 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
e951f9a4 2884
2885 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2886 fallback_fndecl, 1, arg);
2887
2888 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2889 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2890 return convert_to_mode (mode, target, 0);
2891 }
5f51ee59 2892
de2e453e 2893 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2894}
2895
c2f47e15 2896/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2897 a normal call should be emitted rather than expanding the function
2898 in-line. EXP is the expression that is a call to the builtin
2899 function; if convenient, the result should be placed in TARGET. */
2900
2901static rtx
f97eea22 2902expand_builtin_powi (tree exp, rtx target)
757c219d 2903{
757c219d 2904 tree arg0, arg1;
2905 rtx op0, op1;
3754d046 2906 machine_mode mode;
2907 machine_mode mode2;
757c219d 2908
c2f47e15 2909 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2910 return NULL_RTX;
757c219d 2911
c2f47e15 2912 arg0 = CALL_EXPR_ARG (exp, 0);
2913 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2914 mode = TYPE_MODE (TREE_TYPE (exp));
2915
757c219d 2916 /* Emit a libcall to libgcc. */
2917
c2f47e15 2918 /* Mode of the 2nd argument must match that of an int. */
517be012 2919 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
d0405f40 2920
757c219d 2921 if (target == NULL_RTX)
2922 target = gen_reg_rtx (mode);
2923
f97eea22 2924 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2925 if (GET_MODE (op0) != mode)
2926 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2927 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2928 if (GET_MODE (op1) != mode2)
2929 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2930
f36b9f69 2931 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
9e9e5c15 2932 target, LCT_CONST, mode,
d0405f40 2933 op0, mode, op1, mode2);
757c219d 2934
2935 return target;
2936}
2937
48e1416a 2938/* Expand expression EXP which is a call to the strlen builtin. Return
864bd5de 2939 NULL_RTX if we failed and the caller should emit a normal call, otherwise
aed0bd19 2940 try to get the result in TARGET, if convenient. */
f7c44134 2941
53800dbe 2942static rtx
c2f47e15 2943expand_builtin_strlen (tree exp, rtx target,
3754d046 2944 machine_mode target_mode)
53800dbe 2945{
c2f47e15 2946 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2947 return NULL_RTX;
6248e345 2948
2e966e2a 2949 class expand_operand ops[4];
5c5d012b 2950 rtx pat;
2951 tree len;
2952 tree src = CALL_EXPR_ARG (exp, 0);
2953 rtx src_reg;
2954 rtx_insn *before_strlen;
2955 machine_mode insn_mode;
2956 enum insn_code icode = CODE_FOR_nothing;
2957 unsigned int align;
681fab1e 2958
5c5d012b 2959 /* If the length can be computed at compile-time, return it. */
2960 len = c_strlen (src, 0);
2961 if (len)
2962 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2963
2964 /* If the length can be computed at compile-time and is constant
2965 integer, but there are side-effects in src, evaluate
2966 src for side-effects, then return len.
2967 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2968 can be optimized into: i++; x = 3; */
2969 len = c_strlen (src, 1);
2970 if (len && TREE_CODE (len) == INTEGER_CST)
2971 {
2972 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2973 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2974 }
53800dbe 2975
5c5d012b 2976 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 2977
5c5d012b 2978 /* If SRC is not a pointer type, don't do this operation inline. */
2979 if (align == 0)
2980 return NULL_RTX;
2981
2982 /* Bail out if we can't compute strlen in the right mode. */
2983 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2984 {
2985 icode = optab_handler (strlen_optab, insn_mode);
2986 if (icode != CODE_FOR_nothing)
2987 break;
2988 }
2989 if (insn_mode == VOIDmode)
2990 return NULL_RTX;
53800dbe 2991
5c5d012b 2992 /* Make a place to hold the source address. We will not expand
2993 the actual source until we are sure that the expansion will
2994 not fail -- there are trees that cannot be expanded twice. */
2995 src_reg = gen_reg_rtx (Pmode);
53800dbe 2996
5c5d012b 2997 /* Mark the beginning of the strlen sequence so we can emit the
2998 source operand later. */
2999 before_strlen = get_last_insn ();
53800dbe 3000
5c5d012b 3001 create_output_operand (&ops[0], target, insn_mode);
3002 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3003 create_integer_operand (&ops[2], 0);
3004 create_integer_operand (&ops[3], align);
3005 if (!maybe_expand_insn (icode, 4, ops))
3006 return NULL_RTX;
911c0150 3007
5c5d012b 3008 /* Check to see if the argument was declared attribute nonstring
3009 and if so, issue a warning since at this point it's not known
3010 to be nul-terminated. */
3011 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
0c45740b 3012
5c5d012b 3013 /* Now that we are assured of success, expand the source. */
3014 start_sequence ();
3015 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3016 if (pat != src_reg)
3017 {
499eee58 3018#ifdef POINTERS_EXTEND_UNSIGNED
5c5d012b 3019 if (GET_MODE (pat) != Pmode)
3020 pat = convert_to_mode (Pmode, pat,
3021 POINTERS_EXTEND_UNSIGNED);
499eee58 3022#endif
5c5d012b 3023 emit_move_insn (src_reg, pat);
3024 }
3025 pat = get_insns ();
3026 end_sequence ();
bceb0d1f 3027
5c5d012b 3028 if (before_strlen)
3029 emit_insn_after (pat, before_strlen);
3030 else
3031 emit_insn_before (pat, get_insns ());
53800dbe 3032
5c5d012b 3033 /* Return the value in the proper mode for this function. */
3034 if (GET_MODE (ops[0].value) == target_mode)
3035 target = ops[0].value;
3036 else if (target != 0)
3037 convert_move (target, ops[0].value, 0);
3038 else
3039 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 3040
5c5d012b 3041 return target;
53800dbe 3042}
3043
864bd5de 3044/* Expand call EXP to the strnlen built-in, returning the result
3045 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3046
3047static rtx
3048expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3049{
3050 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3051 return NULL_RTX;
3052
3053 tree src = CALL_EXPR_ARG (exp, 0);
3054 tree bound = CALL_EXPR_ARG (exp, 1);
3055
3056 if (!bound)
3057 return NULL_RTX;
3058
3059 location_t loc = UNKNOWN_LOCATION;
3060 if (EXPR_HAS_LOCATION (exp))
3061 loc = EXPR_LOCATION (exp);
3062
3063 tree maxobjsize = max_object_size ();
3064 tree func = get_callee_fndecl (exp);
3065
55769ed6 3066 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3067 so these conversions aren't necessary. */
98d5ba5d 3068 c_strlen_data lendata = { };
3069 tree len = c_strlen (src, 0, &lendata, 1);
55769ed6 3070 if (len)
3071 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
864bd5de 3072
3073 if (TREE_CODE (bound) == INTEGER_CST)
3074 {
3075 if (!TREE_NO_WARNING (exp)
3076 && tree_int_cst_lt (maxobjsize, bound)
3077 && warning_at (loc, OPT_Wstringop_overflow_,
3078 "%K%qD specified bound %E "
3079 "exceeds maximum object size %E",
3080 exp, func, bound, maxobjsize))
94a62c5a 3081 TREE_NO_WARNING (exp) = true;
864bd5de 3082
fec27bf2 3083 bool exact = true;
864bd5de 3084 if (!len || TREE_CODE (len) != INTEGER_CST)
fec27bf2 3085 {
3086 /* Clear EXACT if LEN may be less than SRC suggests,
3087 such as in
3088 strnlen (&a[i], sizeof a)
3089 where the value of i is unknown. Unless i's value is
3090 zero, the call is unsafe because the bound is greater. */
98d5ba5d 3091 lendata.decl = unterminated_array (src, &len, &exact);
3092 if (!lendata.decl)
fec27bf2 3093 return NULL_RTX;
3094 }
3095
98d5ba5d 3096 if (lendata.decl
fec27bf2 3097 && !TREE_NO_WARNING (exp)
3098 && ((tree_int_cst_lt (len, bound))
3099 || !exact))
3100 {
3101 location_t warnloc
3102 = expansion_point_location_if_in_system_header (loc);
3103
3104 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3105 exact
3106 ? G_("%K%qD specified bound %E exceeds the size %E "
3107 "of unterminated array")
3108 : G_("%K%qD specified bound %E may exceed the size "
3109 "of at most %E of unterminated array"),
3110 exp, func, bound, len))
3111 {
98d5ba5d 3112 inform (DECL_SOURCE_LOCATION (lendata.decl),
fec27bf2 3113 "referenced argument declared here");
3114 TREE_NO_WARNING (exp) = true;
3115 return NULL_RTX;
3116 }
3117 }
3118
3119 if (!len)
864bd5de 3120 return NULL_RTX;
3121
864bd5de 3122 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3123 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3124 }
3125
3126 if (TREE_CODE (bound) != SSA_NAME)
3127 return NULL_RTX;
3128
3129 wide_int min, max;
be44111e 3130 enum value_range_kind rng = get_range_info (bound, &min, &max);
864bd5de 3131 if (rng != VR_RANGE)
3132 return NULL_RTX;
3133
3134 if (!TREE_NO_WARNING (exp)
cdde1804 3135 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
864bd5de 3136 && warning_at (loc, OPT_Wstringop_overflow_,
3137 "%K%qD specified bound [%wu, %wu] "
3138 "exceeds maximum object size %E",
3139 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
94a62c5a 3140 TREE_NO_WARNING (exp) = true;
864bd5de 3141
179f1960 3142 bool exact = true;
864bd5de 3143 if (!len || TREE_CODE (len) != INTEGER_CST)
179f1960 3144 {
98d5ba5d 3145 lendata.decl = unterminated_array (src, &len, &exact);
3146 if (!lendata.decl)
179f1960 3147 return NULL_RTX;
3148 }
864bd5de 3149
98d5ba5d 3150 if (lendata.decl
179f1960 3151 && !TREE_NO_WARNING (exp)
3152 && (wi::ltu_p (wi::to_wide (len), min)
3153 || !exact))
fec27bf2 3154 {
179f1960 3155 location_t warnloc
3156 = expansion_point_location_if_in_system_header (loc);
3157
3158 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3159 exact
3160 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3161 "the size %E of unterminated array")
3162 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3163 "the size of at most %E of unterminated array"),
3164 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3165 {
98d5ba5d 3166 inform (DECL_SOURCE_LOCATION (lendata.decl),
179f1960 3167 "referenced argument declared here");
3168 TREE_NO_WARNING (exp) = true;
3169 }
fec27bf2 3170 }
3171
98d5ba5d 3172 if (lendata.decl)
179f1960 3173 return NULL_RTX;
3174
864bd5de 3175 if (wi::gtu_p (min, wi::to_wide (len)))
3176 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3177
3178 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3179 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3180}
3181
6840589f 3182/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3183 bytes from constant string DATA + OFFSET and return it as target
3184 constant. */
3185
3186static rtx
aecda0d6 3187builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
f77c4496 3188 scalar_int_mode mode)
6840589f 3189{
3190 const char *str = (const char *) data;
3191
64db345d 3192 gcc_assert (offset >= 0
3193 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3194 <= strlen (str) + 1));
6840589f 3195
3196 return c_readstr (str + offset, mode);
3197}
3198
36d63243 3199/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 3200 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3201 In some cases we can make very likely guess on max size, then we
3202 set it into PROBABLE_MAX_SIZE. */
36d63243 3203
3204static void
3205determine_block_size (tree len, rtx len_rtx,
3206 unsigned HOST_WIDE_INT *min_size,
9db0f34d 3207 unsigned HOST_WIDE_INT *max_size,
3208 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 3209{
3210 if (CONST_INT_P (len_rtx))
3211 {
4e140a5c 3212 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 3213 return;
3214 }
3215 else
3216 {
9c1be15e 3217 wide_int min, max;
be44111e 3218 enum value_range_kind range_type = VR_UNDEFINED;
9db0f34d 3219
3220 /* Determine bounds from the type. */
3221 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3222 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3223 else
3224 *min_size = 0;
3225 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 3226 *probable_max_size = *max_size
3227 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 3228 else
3229 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3230
3231 if (TREE_CODE (len) == SSA_NAME)
3232 range_type = get_range_info (len, &min, &max);
3233 if (range_type == VR_RANGE)
36d63243 3234 {
fe5ad926 3235 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 3236 *min_size = min.to_uhwi ();
fe5ad926 3237 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 3238 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 3239 }
9db0f34d 3240 else if (range_type == VR_ANTI_RANGE)
36d63243 3241 {
4a474a5a 3242 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 3243 if (min == 0)
9db0f34d 3244 {
9c1be15e 3245 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3246 *min_size = max.to_uhwi () + 1;
9db0f34d 3247 }
3248 /* Code like
3249
3250 int n;
3251 if (n < 100)
4a474a5a 3252 memcpy (a, b, n)
9db0f34d 3253
3254 Produce anti range allowing negative values of N. We still
3255 can use the information and make a guess that N is not negative.
3256 */
fe5ad926 3257 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3258 *probable_max_size = min.to_uhwi () - 1;
36d63243 3259 }
3260 }
3261 gcc_checking_assert (*max_size <=
3262 (unsigned HOST_WIDE_INT)
3263 GET_MODE_MASK (GET_MODE (len_rtx)));
3264}
3265
5aef8938 3266/* Try to verify that the sizes and lengths of the arguments to a string
3267 manipulation function given by EXP are within valid bounds and that
e6a18b5a 3268 the operation does not lead to buffer overflow or read past the end.
3269 Arguments other than EXP may be null. When non-null, the arguments
3270 have the following meaning:
3271 DST is the destination of a copy call or NULL otherwise.
3272 SRC is the source of a copy call or NULL otherwise.
3273 DSTWRITE is the number of bytes written into the destination obtained
3274 from the user-supplied size argument to the function (such as in
3275 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3276 MAXREAD is the user-supplied bound on the length of the source sequence
5aef8938 3277 (such as in strncat(d, s, N). It specifies the upper limit on the number
e6a18b5a 3278 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3279 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3280 expression EXP is a string function call (as opposed to a memory call
3281 like memcpy). As an exception, SRCSTR can also be an integer denoting
3282 the precomputed size of the source string or object (for functions like
3283 memcpy).
3284 DSTSIZE is the size of the destination object specified by the last
5aef8938 3285 argument to the _chk builtins, typically resulting from the expansion
e6a18b5a 3286 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3287 DSTSIZE).
5aef8938 3288
e6a18b5a 3289 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
5aef8938 3290 SIZE_MAX.
3291
e6a18b5a 3292 If the call is successfully verified as safe return true, otherwise
3293 return false. */
5aef8938 3294
3295static bool
e6a18b5a 3296check_access (tree exp, tree, tree, tree dstwrite,
3297 tree maxread, tree srcstr, tree dstsize)
5aef8938 3298{
e6a18b5a 3299 int opt = OPT_Wstringop_overflow_;
3300
5aef8938 3301 /* The size of the largest object is half the address space, or
e6a18b5a 3302 PTRDIFF_MAX. (This is way too permissive.) */
3303 tree maxobjsize = max_object_size ();
5aef8938 3304
e6a18b5a 3305 /* Either the length of the source string for string functions or
3306 the size of the source object for raw memory functions. */
5aef8938 3307 tree slen = NULL_TREE;
3308
8d6c6ef5 3309 tree range[2] = { NULL_TREE, NULL_TREE };
3310
5aef8938 3311 /* Set to true when the exact number of bytes written by a string
3312 function like strcpy is not known and the only thing that is
3313 known is that it must be at least one (for the terminating nul). */
3314 bool at_least_one = false;
e6a18b5a 3315 if (srcstr)
5aef8938 3316 {
e6a18b5a 3317 /* SRCSTR is normally a pointer to string but as a special case
5aef8938 3318 it can be an integer denoting the length of a string. */
e6a18b5a 3319 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
5aef8938 3320 {
3321 /* Try to determine the range of lengths the source string
8d6c6ef5 3322 refers to. If it can be determined and is less than
e6a18b5a 3323 the upper bound given by MAXREAD add one to it for
5aef8938 3324 the terminating nul. Otherwise, set it to one for
e6a18b5a 3325 the same reason, or to MAXREAD as appropriate. */
14c286b1 3326 c_strlen_data lendata = { };
3327 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3328 range[0] = lendata.minlen;
3329 range[1] = lendata.maxbound;
e6a18b5a 3330 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
8d6c6ef5 3331 {
e6a18b5a 3332 if (maxread && tree_int_cst_le (maxread, range[0]))
3333 range[0] = range[1] = maxread;
8d6c6ef5 3334 else
3335 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3336 range[0], size_one_node);
3337
e6a18b5a 3338 if (maxread && tree_int_cst_le (maxread, range[1]))
3339 range[1] = maxread;
8d6c6ef5 3340 else if (!integer_all_onesp (range[1]))
3341 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3342 range[1], size_one_node);
3343
3344 slen = range[0];
3345 }
5aef8938 3346 else
3347 {
3348 at_least_one = true;
3349 slen = size_one_node;
3350 }
3351 }
3352 else
e6a18b5a 3353 slen = srcstr;
5aef8938 3354 }
3355
e6a18b5a 3356 if (!dstwrite && !maxread)
5aef8938 3357 {
3358 /* When the only available piece of data is the object size
3359 there is nothing to do. */
3360 if (!slen)
3361 return true;
3362
3363 /* Otherwise, when the length of the source sequence is known
e6a18b5a 3364 (as with strlen), set DSTWRITE to it. */
8d6c6ef5 3365 if (!range[0])
e6a18b5a 3366 dstwrite = slen;
5aef8938 3367 }
3368
e6a18b5a 3369 if (!dstsize)
3370 dstsize = maxobjsize;
5aef8938 3371
e6a18b5a 3372 if (dstwrite)
3373 get_size_range (dstwrite, range);
5aef8938 3374
e6a18b5a 3375 tree func = get_callee_fndecl (exp);
5aef8938 3376
3377 /* First check the number of bytes to be written against the maximum
3378 object size. */
c4183f31 3379 if (range[0]
3380 && TREE_CODE (range[0]) == INTEGER_CST
3381 && tree_int_cst_lt (maxobjsize, range[0]))
5aef8938 3382 {
864bd5de 3383 if (TREE_NO_WARNING (exp))
3384 return false;
3385
5aef8938 3386 location_t loc = tree_nonartificial_location (exp);
4d317237 3387 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3388
864bd5de 3389 bool warned;
5aef8938 3390 if (range[0] == range[1])
864bd5de 3391 warned = warning_at (loc, opt,
3392 "%K%qD specified size %E "
3393 "exceeds maximum object size %E",
3394 exp, func, range[0], maxobjsize);
3395 else
3396 warned = warning_at (loc, opt,
3397 "%K%qD specified size between %E and %E "
3398 "exceeds maximum object size %E",
3399 exp, func,
3400 range[0], range[1], maxobjsize);
3401 if (warned)
3402 TREE_NO_WARNING (exp) = true;
3403
5aef8938 3404 return false;
3405 }
3406
e6a18b5a 3407 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3408 constant, and in range of unsigned HOST_WIDE_INT. */
3409 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3410
5aef8938 3411 /* Next check the number of bytes to be written against the destination
3412 object size. */
e6a18b5a 3413 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
5aef8938 3414 {
3415 if (range[0]
c4183f31 3416 && TREE_CODE (range[0]) == INTEGER_CST
e6a18b5a 3417 && ((tree_fits_uhwi_p (dstsize)
3418 && tree_int_cst_lt (dstsize, range[0]))
c4183f31 3419 || (dstwrite
3420 && tree_fits_uhwi_p (dstwrite)
e6a18b5a 3421 && tree_int_cst_lt (dstwrite, range[0]))))
5aef8938 3422 {
080a1363 3423 if (TREE_NO_WARNING (exp))
3424 return false;
3425
5aef8938 3426 location_t loc = tree_nonartificial_location (exp);
4d317237 3427 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3428
e6a18b5a 3429 if (dstwrite == slen && at_least_one)
8d6c6ef5 3430 {
3431 /* This is a call to strcpy with a destination of 0 size
3432 and a source of unknown length. The call will write
3433 at least one byte past the end of the destination. */
3434 warning_at (loc, opt,
9098b938 3435 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3436 "of size %E overflows the destination",
e6a18b5a 3437 exp, func, range[0], dstsize);
8d6c6ef5 3438 }
3439 else if (tree_int_cst_equal (range[0], range[1]))
625a4dfc 3440 warning_n (loc, opt, tree_to_uhwi (range[0]),
3441 "%K%qD writing %E byte into a region "
3442 "of size %E overflows the destination",
3443 "%K%qD writing %E bytes into a region "
3444 "of size %E overflows the destination",
3445 exp, func, range[0], dstsize);
8d6c6ef5 3446 else if (tree_int_cst_sign_bit (range[1]))
3447 {
3448 /* Avoid printing the upper bound if it's invalid. */
3449 warning_at (loc, opt,
9098b938 3450 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3451 "of size %E overflows the destination",
e6a18b5a 3452 exp, func, range[0], dstsize);
8d6c6ef5 3453 }
5aef8938 3454 else
3455 warning_at (loc, opt,
9098b938 3456 "%K%qD writing between %E and %E bytes into "
8d6c6ef5 3457 "a region of size %E overflows the destination",
e6a18b5a 3458 exp, func, range[0], range[1],
3459 dstsize);
5aef8938 3460
3461 /* Return error when an overflow has been detected. */
3462 return false;
3463 }
3464 }
3465
3466 /* Check the maximum length of the source sequence against the size
3467 of the destination object if known, or against the maximum size
3468 of an object. */
e6a18b5a 3469 if (maxread)
5aef8938 3470 {
e6a18b5a 3471 get_size_range (maxread, range);
3472
3473 /* Use the lower end for MAXREAD from now on. */
3474 if (range[0])
3475 maxread = range[0];
5aef8938 3476
e6a18b5a 3477 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
5aef8938 3478 {
3479 location_t loc = tree_nonartificial_location (exp);
4d317237 3480 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3481
3482 if (tree_int_cst_lt (maxobjsize, range[0]))
3483 {
080a1363 3484 if (TREE_NO_WARNING (exp))
3485 return false;
3486
5aef8938 3487 /* Warn about crazy big sizes first since that's more
3488 likely to be meaningful than saying that the bound
3489 is greater than the object size if both are big. */
3490 if (range[0] == range[1])
3491 warning_at (loc, opt,
9098b938 3492 "%K%qD specified bound %E "
8d6c6ef5 3493 "exceeds maximum object size %E",
e6a18b5a 3494 exp, func,
8d6c6ef5 3495 range[0], maxobjsize);
5aef8938 3496 else
3497 warning_at (loc, opt,
9098b938 3498 "%K%qD specified bound between %E and %E "
8d6c6ef5 3499 "exceeds maximum object size %E",
e6a18b5a 3500 exp, func,
8d6c6ef5 3501 range[0], range[1], maxobjsize);
5aef8938 3502
3503 return false;
3504 }
3505
e6a18b5a 3506 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
5aef8938 3507 {
080a1363 3508 if (TREE_NO_WARNING (exp))
3509 return false;
3510
8d6c6ef5 3511 if (tree_int_cst_equal (range[0], range[1]))
5aef8938 3512 warning_at (loc, opt,
9098b938 3513 "%K%qD specified bound %E "
8d6c6ef5 3514 "exceeds destination size %E",
e6a18b5a 3515 exp, func,
3516 range[0], dstsize);
5aef8938 3517 else
3518 warning_at (loc, opt,
9098b938 3519 "%K%qD specified bound between %E and %E "
8d6c6ef5 3520 "exceeds destination size %E",
e6a18b5a 3521 exp, func,
3522 range[0], range[1], dstsize);
5aef8938 3523 return false;
3524 }
3525 }
3526 }
3527
e6a18b5a 3528 /* Check for reading past the end of SRC. */
8d6c6ef5 3529 if (slen
e6a18b5a 3530 && slen == srcstr
3531 && dstwrite && range[0]
8d6c6ef5 3532 && tree_int_cst_lt (slen, range[0]))
3533 {
080a1363 3534 if (TREE_NO_WARNING (exp))
3535 return false;
3536
8d6c6ef5 3537 location_t loc = tree_nonartificial_location (exp);
3538
3539 if (tree_int_cst_equal (range[0], range[1]))
625a4dfc 3540 warning_n (loc, opt, tree_to_uhwi (range[0]),
3541 "%K%qD reading %E byte from a region of size %E",
3542 "%K%qD reading %E bytes from a region of size %E",
e6a18b5a 3543 exp, func, range[0], slen);
8d6c6ef5 3544 else if (tree_int_cst_sign_bit (range[1]))
3545 {
3546 /* Avoid printing the upper bound if it's invalid. */
3547 warning_at (loc, opt,
9098b938 3548 "%K%qD reading %E or more bytes from a region "
8d6c6ef5 3549 "of size %E",
e6a18b5a 3550 exp, func, range[0], slen);
8d6c6ef5 3551 }
3552 else
3553 warning_at (loc, opt,
9098b938 3554 "%K%qD reading between %E and %E bytes from a region "
8d6c6ef5 3555 "of size %E",
e6a18b5a 3556 exp, func, range[0], range[1], slen);
8d6c6ef5 3557 return false;
3558 }
3559
5aef8938 3560 return true;
3561}
3562
3563/* Helper to compute the size of the object referenced by the DEST
d8aad786 3564 expression which must have pointer type, using Object Size type
5aef8938 3565 OSTYPE (only the least significant 2 bits are used). Return
24e3b821 3566 an estimate of the size of the object if successful or NULL when
3567 the size cannot be determined. When the referenced object involves
3568 a non-constant offset in some range the returned value represents
3569 the largest size given the smallest non-negative offset in the
3570 range. The function is intended for diagnostics and should not
3571 be used to influence code generation or optimization. */
5aef8938 3572
d8aad786 3573tree
8d6c6ef5 3574compute_objsize (tree dest, int ostype)
5aef8938 3575{
3576 unsigned HOST_WIDE_INT size;
d8aad786 3577
3578 /* Only the two least significant bits are meaningful. */
3579 ostype &= 3;
3580
3581 if (compute_builtin_object_size (dest, ostype, &size))
5aef8938 3582 return build_int_cst (sizetype, size);
3583
d8aad786 3584 if (TREE_CODE (dest) == SSA_NAME)
3585 {
3586 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3587 if (!is_gimple_assign (stmt))
3588 return NULL_TREE;
3589
24e3b821 3590 dest = gimple_assign_rhs1 (stmt);
3591
d8aad786 3592 tree_code code = gimple_assign_rhs_code (stmt);
24e3b821 3593 if (code == POINTER_PLUS_EXPR)
3594 {
3595 /* compute_builtin_object_size fails for addresses with
3596 non-constant offsets. Try to determine the range of
c1a0c86c 3597 such an offset here and use it to adjust the constant
24e3b821 3598 size. */
3599 tree off = gimple_assign_rhs2 (stmt);
c1a0c86c 3600 if (TREE_CODE (off) == INTEGER_CST)
3601 {
3602 if (tree size = compute_objsize (dest, ostype))
3603 {
3604 wide_int wioff = wi::to_wide (off);
3605 wide_int wisiz = wi::to_wide (size);
3606
3607 /* Ignore negative offsets for now. For others,
3608 use the lower bound as the most optimistic
3609 estimate of the (remaining) size. */
3610 if (wi::sign_mask (wioff))
3611 ;
3612 else if (wi::ltu_p (wioff, wisiz))
3613 return wide_int_to_tree (TREE_TYPE (size),
3614 wi::sub (wisiz, wioff));
3615 else
3616 return size_zero_node;
3617 }
3618 }
3619 else if (TREE_CODE (off) == SSA_NAME
24e3b821 3620 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3621 {
3622 wide_int min, max;
be44111e 3623 enum value_range_kind rng = get_range_info (off, &min, &max);
24e3b821 3624
3625 if (rng == VR_RANGE)
3626 {
3627 if (tree size = compute_objsize (dest, ostype))
3628 {
3629 wide_int wisiz = wi::to_wide (size);
3630
3631 /* Ignore negative offsets for now. For others,
3632 use the lower bound as the most optimistic
3633 estimate of the (remaining)size. */
9f85b0fb 3634 if (wi::sign_mask (min)
3635 || wi::sign_mask (max))
24e3b821 3636 ;
3637 else if (wi::ltu_p (min, wisiz))
3638 return wide_int_to_tree (TREE_TYPE (size),
3639 wi::sub (wisiz, min));
3640 else
3641 return size_zero_node;
3642 }
3643 }
3644 }
3645 }
3646 else if (code != ADDR_EXPR)
d8aad786 3647 return NULL_TREE;
d8aad786 3648 }
3649
24e3b821 3650 /* Unless computing the largest size (for memcpy and other raw memory
3651 functions), try to determine the size of the object from its type. */
3652 if (!ostype)
3653 return NULL_TREE;
3654
7059d0a5 3655 if (TREE_CODE (dest) == MEM_REF)
3656 {
3657 tree ref = TREE_OPERAND (dest, 0);
3658 tree off = TREE_OPERAND (dest, 1);
3659 if (tree size = compute_objsize (ref, ostype))
3660 {
3661 if (tree_int_cst_lt (off, size))
3662 return fold_build2 (MINUS_EXPR, size_type_node, size, off);
3663 return integer_zero_node;
3664 }
3665
3666 return NULL_TREE;
3667 }
3668
d8aad786 3669 if (TREE_CODE (dest) != ADDR_EXPR)
3670 return NULL_TREE;
3671
3672 tree type = TREE_TYPE (dest);
3673 if (TREE_CODE (type) == POINTER_TYPE)
3674 type = TREE_TYPE (type);
3675
3676 type = TYPE_MAIN_VARIANT (type);
3677
3678 if (TREE_CODE (type) == ARRAY_TYPE
d4ad98ea 3679 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
d8aad786 3680 {
3681 /* Return the constant size unless it's zero (that's a zero-length
3682 array likely at the end of a struct). */
3683 tree size = TYPE_SIZE_UNIT (type);
3684 if (size && TREE_CODE (size) == INTEGER_CST
3685 && !integer_zerop (size))
3686 return size;
3687 }
3688
5aef8938 3689 return NULL_TREE;
3690}
3691
3692/* Helper to determine and check the sizes of the source and the destination
8d6c6ef5 3693 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3694 call expression, DEST is the destination argument, SRC is the source
3695 argument or null, and LEN is the number of bytes. Use Object Size type-0
3696 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5aef8938 3697 (no overflow or invalid sizes), false otherwise. */
3698
3699static bool
e6a18b5a 3700check_memop_access (tree exp, tree dest, tree src, tree size)
5aef8938 3701{
5aef8938 3702 /* For functions like memset and memcpy that operate on raw memory
8d6c6ef5 3703 try to determine the size of the largest source and destination
3704 object using type-0 Object Size regardless of the object size
3705 type specified by the option. */
3706 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3707 tree dstsize = compute_objsize (dest, 0);
5aef8938 3708
e6a18b5a 3709 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3710 srcsize, dstsize);
8d6c6ef5 3711}
3712
3713/* Validate memchr arguments without performing any expansion.
3714 Return NULL_RTX. */
3715
3716static rtx
3717expand_builtin_memchr (tree exp, rtx)
3718{
3719 if (!validate_arglist (exp,
3720 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3721 return NULL_RTX;
3722
3723 tree arg1 = CALL_EXPR_ARG (exp, 0);
3724 tree len = CALL_EXPR_ARG (exp, 2);
3725
3726 /* Diagnose calls where the specified length exceeds the size
3727 of the object. */
3728 if (warn_stringop_overflow)
3729 {
3730 tree size = compute_objsize (arg1, 0);
e6a18b5a 3731 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3732 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
8d6c6ef5 3733 }
3734
3735 return NULL_RTX;
5aef8938 3736}
3737
c2f47e15 3738/* Expand a call EXP to the memcpy builtin.
3739 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3740 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3741 mode MODE if that's convenient). */
c2f47e15 3742
53800dbe 3743static rtx
a65c4d64 3744expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3745{
c2f47e15 3746 if (!validate_arglist (exp,
3747 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3748 return NULL_RTX;
5aef8938 3749
3750 tree dest = CALL_EXPR_ARG (exp, 0);
3751 tree src = CALL_EXPR_ARG (exp, 1);
3752 tree len = CALL_EXPR_ARG (exp, 2);
3753
e6a18b5a 3754 check_memop_access (exp, dest, src, len);
5aef8938 3755
d0fbba1a 3756 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
02aa6d73 3757 /*retmode=*/ RETURN_BEGIN);
f21337ef 3758}
6840589f 3759
4d317237 3760/* Check a call EXP to the memmove built-in for validity.
3761 Return NULL_RTX on both success and failure. */
3762
3763static rtx
3764expand_builtin_memmove (tree exp, rtx)
3765{
3766 if (!validate_arglist (exp,
3767 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3768 return NULL_RTX;
3769
3770 tree dest = CALL_EXPR_ARG (exp, 0);
8d6c6ef5 3771 tree src = CALL_EXPR_ARG (exp, 1);
4d317237 3772 tree len = CALL_EXPR_ARG (exp, 2);
3773
e6a18b5a 3774 check_memop_access (exp, dest, src, len);
4d317237 3775
3776 return NULL_RTX;
3777}
3778
c2f47e15 3779/* Expand a call EXP to the mempcpy builtin.
3780 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3781 otherwise try to get the result in TARGET, if convenient (and in
02aa6d73 3782 mode MODE if that's convenient). */
647661c6 3783
3784static rtx
d0fbba1a 3785expand_builtin_mempcpy (tree exp, rtx target)
647661c6 3786{
c2f47e15 3787 if (!validate_arglist (exp,
3788 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3789 return NULL_RTX;
5aef8938 3790
3791 tree dest = CALL_EXPR_ARG (exp, 0);
3792 tree src = CALL_EXPR_ARG (exp, 1);
3793 tree len = CALL_EXPR_ARG (exp, 2);
3794
24e3b821 3795 /* Policy does not generally allow using compute_objsize (which
3796 is used internally by check_memop_size) to change code generation
3797 or drive optimization decisions.
3798
3799 In this instance it is safe because the code we generate has
3800 the same semantics regardless of the return value of
3801 check_memop_sizes. Exactly the same amount of data is copied
3802 and the return value is exactly the same in both cases.
3803
3804 Furthermore, check_memop_size always uses mode 0 for the call to
3805 compute_objsize, so the imprecise nature of compute_objsize is
3806 avoided. */
3807
5aef8938 3808 /* Avoid expanding mempcpy into memcpy when the call is determined
3809 to overflow the buffer. This also prevents the same overflow
3810 from being diagnosed again when expanding memcpy. */
e6a18b5a 3811 if (!check_memop_access (exp, dest, src, len))
5aef8938 3812 return NULL_RTX;
3813
3814 return expand_builtin_mempcpy_args (dest, src, len,
02aa6d73 3815 target, exp, /*retmode=*/ RETURN_END);
f21337ef 3816}
3817
d0fbba1a 3818/* Helper function to do the actual work for expand of memory copy family
3819 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
02aa6d73 3820 of memory from SRC to DEST and assign to TARGET if convenient. Return
3821 value is based on RETMODE argument. */
c2f47e15 3822
3823static rtx
d0fbba1a 3824expand_builtin_memory_copy_args (tree dest, tree src, tree len,
02aa6d73 3825 rtx target, tree exp, memop_ret retmode)
c2f47e15 3826{
d0fbba1a 3827 const char *src_str;
3828 unsigned int src_align = get_pointer_alignment (src);
3829 unsigned int dest_align = get_pointer_alignment (dest);
3830 rtx dest_mem, src_mem, dest_addr, len_rtx;
3831 HOST_WIDE_INT expected_size = -1;
3832 unsigned int expected_align = 0;
3833 unsigned HOST_WIDE_INT min_size;
3834 unsigned HOST_WIDE_INT max_size;
3835 unsigned HOST_WIDE_INT probable_max_size;
f21337ef 3836
048f6aad 3837 bool is_move_done;
3838
d0fbba1a 3839 /* If DEST is not a pointer type, call the normal function. */
3840 if (dest_align == 0)
3841 return NULL_RTX;
a0c938f0 3842
d0fbba1a 3843 /* If either SRC is not a pointer type, don't do this
3844 operation in-line. */
3845 if (src_align == 0)
3846 return NULL_RTX;
9fe0e1b8 3847
d0fbba1a 3848 if (currently_expanding_gimple_stmt)
3849 stringop_block_profile (currently_expanding_gimple_stmt,
3850 &expected_align, &expected_size);
0862b7e9 3851
d0fbba1a 3852 if (expected_align < dest_align)
3853 expected_align = dest_align;
3854 dest_mem = get_memory_rtx (dest, len);
3855 set_mem_align (dest_mem, dest_align);
3856 len_rtx = expand_normal (len);
3857 determine_block_size (len, len_rtx, &min_size, &max_size,
3858 &probable_max_size);
3859 src_str = c_getstr (src);
647661c6 3860
d0fbba1a 3861 /* If SRC is a string constant and block move would be done
3862 by pieces, we can avoid loading the string from memory
3863 and only stored the computed constants. */
3864 if (src_str
3865 && CONST_INT_P (len_rtx)
3866 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3867 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3868 CONST_CAST (char *, src_str),
3869 dest_align, false))
3870 {
3871 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3872 builtin_memcpy_read_str,
d72123ce 3873 CONST_CAST (char *, src_str),
02aa6d73 3874 dest_align, false, retmode);
d0fbba1a 3875 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3876 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3877 return dest_mem;
3878 }
647661c6 3879
d0fbba1a 3880 src_mem = get_memory_rtx (src, len);
3881 set_mem_align (src_mem, src_align);
9fe0e1b8 3882
d0fbba1a 3883 /* Copy word part most expediently. */
21781799 3884 enum block_op_methods method = BLOCK_OP_NORMAL;
02aa6d73 3885 if (CALL_EXPR_TAILCALL (exp)
3886 && (retmode == RETURN_BEGIN || target == const0_rtx))
21781799 3887 method = BLOCK_OP_TAILCALL;
048f6aad 3888 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3889 && retmode == RETURN_END
3890 && target != const0_rtx);
3891 if (use_mempcpy_call)
21781799 3892 method = BLOCK_OP_NO_LIBCALL_RET;
3893 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
d0fbba1a 3894 expected_align, expected_size,
048f6aad 3895 min_size, max_size, probable_max_size,
3896 use_mempcpy_call, &is_move_done);
3897
3898 /* Bail out when a mempcpy call would be expanded as libcall and when
3899 we have a target that provides a fast implementation
3900 of mempcpy routine. */
3901 if (!is_move_done)
3902 return NULL_RTX;
3903
21781799 3904 if (dest_addr == pc_rtx)
3905 return NULL_RTX;
d0fbba1a 3906
3907 if (dest_addr == 0)
3908 {
3909 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3910 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3911 }
3912
02aa6d73 3913 if (retmode != RETURN_BEGIN && target != const0_rtx)
d0fbba1a 3914 {
3915 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3916 /* stpcpy pointer to last byte. */
02aa6d73 3917 if (retmode == RETURN_END_MINUS_ONE)
d0fbba1a 3918 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
d72123ce 3919 }
d0fbba1a 3920
3921 return dest_addr;
3922}
3923
3924static rtx
3925expand_builtin_mempcpy_args (tree dest, tree src, tree len,
02aa6d73 3926 rtx target, tree orig_exp, memop_ret retmode)
d0fbba1a 3927{
3928 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
02aa6d73 3929 retmode);
647661c6 3930}
3931
c2f47e15 3932/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3933 we failed, the caller should emit a normal call, otherwise try to
02aa6d73 3934 get the result in TARGET, if convenient.
3935 Return value is based on RETMODE argument. */
727c62dd 3936
3937static rtx
02aa6d73 3938expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
727c62dd 3939{
2e966e2a 3940 class expand_operand ops[3];
727c62dd 3941 rtx dest_mem;
3942 rtx src_mem;
727c62dd 3943
8d74dc42 3944 if (!targetm.have_movstr ())
c2f47e15 3945 return NULL_RTX;
727c62dd 3946
d8ae1baa 3947 dest_mem = get_memory_rtx (dest, NULL);
3948 src_mem = get_memory_rtx (src, NULL);
74a1cc74 3949 if (retmode == RETURN_BEGIN)
727c62dd 3950 {
3951 target = force_reg (Pmode, XEXP (dest_mem, 0));
3952 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3953 }
3954
24c9d723 3955 create_output_operand (&ops[0],
3956 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
8786db1e 3957 create_fixed_operand (&ops[1], dest_mem);
3958 create_fixed_operand (&ops[2], src_mem);
8d74dc42 3959 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
1e1d5623 3960 return NULL_RTX;
727c62dd 3961
02aa6d73 3962 if (retmode != RETURN_BEGIN && target != const0_rtx)
c5aba89c 3963 {
8786db1e 3964 target = ops[0].value;
3965 /* movstr is supposed to set end to the address of the NUL
3966 terminator. If the caller requested a mempcpy-like return value,
3967 adjust it. */
02aa6d73 3968 if (retmode == RETURN_END)
8786db1e 3969 {
29c05e22 3970 rtx tem = plus_constant (GET_MODE (target),
3971 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3972 emit_move_insn (target, force_operand (tem, NULL_RTX));
3973 }
c5aba89c 3974 }
727c62dd 3975 return target;
3976}
3977
5aef8938 3978/* Do some very basic size validation of a call to the strcpy builtin
3979 given by EXP. Return NULL_RTX to have the built-in expand to a call
3980 to the library function. */
3981
3982static rtx
3983expand_builtin_strcat (tree exp, rtx)
3984{
3985 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3986 || !warn_stringop_overflow)
3987 return NULL_RTX;
3988
3989 tree dest = CALL_EXPR_ARG (exp, 0);
3990 tree src = CALL_EXPR_ARG (exp, 1);
3991
3992 /* There is no way here to determine the length of the string in
3993 the destination to which the SRC string is being appended so
3994 just diagnose cases when the souce string is longer than
3995 the destination object. */
3996
8d6c6ef5 3997 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3998
e6a18b5a 3999 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4000 destsize);
5aef8938 4001
4002 return NULL_RTX;
4003}
4004
48e1416a 4005/* Expand expression EXP, which is a call to the strcpy builtin. Return
4006 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 4007 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 4008 convenient). */
902de8ed 4009
53800dbe 4010static rtx
a65c4d64 4011expand_builtin_strcpy (tree exp, rtx target)
53800dbe 4012{
5aef8938 4013 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4014 return NULL_RTX;
4015
4016 tree dest = CALL_EXPR_ARG (exp, 0);
4017 tree src = CALL_EXPR_ARG (exp, 1);
4018
4019 if (warn_stringop_overflow)
4020 {
8d6c6ef5 4021 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
e6a18b5a 4022 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4023 src, destsize);
5aef8938 4024 }
4025
a788aa5f 4026 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
0b39ade8 4027 {
4028 /* Check to see if the argument was declared attribute nonstring
4029 and if so, issue a warning since at this point it's not known
4030 to be nul-terminated. */
4031 tree fndecl = get_callee_fndecl (exp);
4032 maybe_warn_nonstring_arg (fndecl, exp);
4033 return ret;
4034 }
4035
4036 return NULL_RTX;
c2f47e15 4037}
4038
4039/* Helper function to do the actual work for expand_builtin_strcpy. The
4040 arguments to the builtin_strcpy call DEST and SRC are broken out
4041 so that this can also be called without constructing an actual CALL_EXPR.
4042 The other arguments and return value are the same as for
4043 expand_builtin_strcpy. */
4044
4045static rtx
a788aa5f 4046expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
c2f47e15 4047{
a788aa5f 4048 /* Detect strcpy calls with unterminated arrays.. */
4049 if (tree nonstr = unterminated_array (src))
4050 {
4051 /* NONSTR refers to the non-nul terminated constant array. */
4052 if (!TREE_NO_WARNING (exp))
4053 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4054 return NULL_RTX;
4055 }
4056
02aa6d73 4057 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
53800dbe 4058}
4059
c2f47e15 4060/* Expand a call EXP to the stpcpy builtin.
4061 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 4062 otherwise try to get the result in TARGET, if convenient (and in
4063 mode MODE if that's convenient). */
4064
4065static rtx
df6e8b42 4066expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3b824fa6 4067{
c2f47e15 4068 tree dst, src;
389dd41b 4069 location_t loc = EXPR_LOCATION (exp);
c2f47e15 4070
4071 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4072 return NULL_RTX;
4073
4074 dst = CALL_EXPR_ARG (exp, 0);
4075 src = CALL_EXPR_ARG (exp, 1);
4076
4d317237 4077 if (warn_stringop_overflow)
4078 {
8d6c6ef5 4079 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
e6a18b5a 4080 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4081 src, destsize);
4d317237 4082 }
4083
727c62dd 4084 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 4085 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 4086 {
b9a16870 4087 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 4088 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 4089 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 4090 }
3b824fa6 4091 else
4092 {
c2f47e15 4093 tree len, lenp1;
727c62dd 4094 rtx ret;
647661c6 4095
9fe0e1b8 4096 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 4097 compile-time, not an expression containing a string. This is
4098 because the latter will potentially produce pessimized code
4099 when used to produce the return value. */
98d5ba5d 4100 c_strlen_data lendata = { };
50e57712 4101 if (!c_getstr (src, NULL)
98d5ba5d 4102 || !(len = c_strlen (src, 0, &lendata, 1)))
02aa6d73 4103 return expand_movstr (dst, src, target,
4104 /*retmode=*/ RETURN_END_MINUS_ONE);
3b824fa6 4105
98d5ba5d 4106 if (lendata.decl && !TREE_NO_WARNING (exp))
4107 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
50e57712 4108
389dd41b 4109 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 4110 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
02aa6d73 4111 target, exp,
4112 /*retmode=*/ RETURN_END_MINUS_ONE);
727c62dd 4113
4114 if (ret)
4115 return ret;
4116
4117 if (TREE_CODE (len) == INTEGER_CST)
4118 {
8ec3c5c2 4119 rtx len_rtx = expand_normal (len);
727c62dd 4120
971ba038 4121 if (CONST_INT_P (len_rtx))
727c62dd 4122 {
a788aa5f 4123 ret = expand_builtin_strcpy_args (exp, dst, src, target);
727c62dd 4124
4125 if (ret)
4126 {
4127 if (! target)
7ac87324 4128 {
4129 if (mode != VOIDmode)
4130 target = gen_reg_rtx (mode);
4131 else
4132 target = gen_reg_rtx (GET_MODE (ret));
4133 }
727c62dd 4134 if (GET_MODE (target) != GET_MODE (ret))
4135 ret = gen_lowpart (GET_MODE (target), ret);
4136
29c05e22 4137 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 4138 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 4139 gcc_assert (ret);
727c62dd 4140
4141 return target;
4142 }
4143 }
4144 }
4145
02aa6d73 4146 return expand_movstr (dst, src, target,
4147 /*retmode=*/ RETURN_END_MINUS_ONE);
3b824fa6 4148 }
4149}
4150
df6e8b42 4151/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4152 arguments while being careful to avoid duplicate warnings (which could
4153 be issued if the expander were to expand the call, resulting in it
4154 being emitted in expand_call(). */
4155
4156static rtx
4157expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4158{
4159 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4160 {
4161 /* The call has been successfully expanded. Check for nonstring
4162 arguments and issue warnings as appropriate. */
4163 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4164 return ret;
4165 }
4166
4167 return NULL_RTX;
4168}
4169
4d317237 4170/* Check a call EXP to the stpncpy built-in for validity.
4171 Return NULL_RTX on both success and failure. */
4172
4173static rtx
4174expand_builtin_stpncpy (tree exp, rtx)
4175{
4176 if (!validate_arglist (exp,
4177 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4178 || !warn_stringop_overflow)
4179 return NULL_RTX;
4180
aca1a787 4181 /* The source and destination of the call. */
4d317237 4182 tree dest = CALL_EXPR_ARG (exp, 0);
4183 tree src = CALL_EXPR_ARG (exp, 1);
4184
aca1a787 4185 /* The exact number of bytes to write (not the maximum). */
4d317237 4186 tree len = CALL_EXPR_ARG (exp, 2);
4d317237 4187
aca1a787 4188 /* The size of the destination object. */
8d6c6ef5 4189 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4d317237 4190
e6a18b5a 4191 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4d317237 4192
4193 return NULL_RTX;
4194}
4195
6840589f 4196/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4197 bytes from constant string DATA + OFFSET and return it as target
4198 constant. */
4199
09879952 4200rtx
aecda0d6 4201builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
f77c4496 4202 scalar_int_mode mode)
6840589f 4203{
4204 const char *str = (const char *) data;
4205
4206 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4207 return const0_rtx;
4208
4209 return c_readstr (str + offset, mode);
4210}
4211
5aef8938 4212/* Helper to check the sizes of sequences and the destination of calls
4213 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4214 success (no overflow or invalid sizes), false otherwise. */
4215
4216static bool
4217check_strncat_sizes (tree exp, tree objsize)
4218{
4219 tree dest = CALL_EXPR_ARG (exp, 0);
4220 tree src = CALL_EXPR_ARG (exp, 1);
e6a18b5a 4221 tree maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 4222
4223 /* Try to determine the range of lengths that the source expression
4224 refers to. */
14c286b1 4225 c_strlen_data lendata = { };
4226 get_range_strlen (src, &lendata, /* eltsize = */ 1);
5aef8938 4227
4228 /* Try to verify that the destination is big enough for the shortest
4229 string. */
4230
4231 if (!objsize && warn_stringop_overflow)
4232 {
4233 /* If it hasn't been provided by __strncat_chk, try to determine
4234 the size of the destination object into which the source is
4235 being copied. */
8d6c6ef5 4236 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 4237 }
4238
4239 /* Add one for the terminating nul. */
14c286b1 4240 tree srclen = (lendata.minlen
4241 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
5aef8938 4242 size_one_node)
4243 : NULL_TREE);
4244
e6a18b5a 4245 /* The strncat function copies at most MAXREAD bytes and always appends
4246 the terminating nul so the specified upper bound should never be equal
4247 to (or greater than) the size of the destination. */
4248 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4249 && tree_int_cst_equal (objsize, maxread))
5aef8938 4250 {
4d317237 4251 location_t loc = tree_nonartificial_location (exp);
4252 loc = expansion_point_location_if_in_system_header (loc);
4253
4254 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 4255 "%K%qD specified bound %E equals destination size",
e6a18b5a 4256 exp, get_callee_fndecl (exp), maxread);
5aef8938 4257
4258 return false;
4259 }
4260
4261 if (!srclen
e6a18b5a 4262 || (maxread && tree_fits_uhwi_p (maxread)
5aef8938 4263 && tree_fits_uhwi_p (srclen)
e6a18b5a 4264 && tree_int_cst_lt (maxread, srclen)))
4265 srclen = maxread;
5aef8938 4266
e6a18b5a 4267 /* The number of bytes to write is LEN but check_access will also
5aef8938 4268 check SRCLEN if LEN's value isn't known. */
e6a18b5a 4269 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4270 objsize);
5aef8938 4271}
4272
4273/* Similar to expand_builtin_strcat, do some very basic size validation
4274 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4275 the built-in expand to a call to the library function. */
4276
4277static rtx
4278expand_builtin_strncat (tree exp, rtx)
4279{
4280 if (!validate_arglist (exp,
4281 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4282 || !warn_stringop_overflow)
4283 return NULL_RTX;
4284
4285 tree dest = CALL_EXPR_ARG (exp, 0);
4286 tree src = CALL_EXPR_ARG (exp, 1);
4287 /* The upper bound on the number of bytes to write. */
e6a18b5a 4288 tree maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 4289 /* The length of the source sequence. */
4290 tree slen = c_strlen (src, 1);
4291
4292 /* Try to determine the range of lengths that the source expression
14c286b1 4293 refers to. Since the lengths are only used for warning and not
4294 for code generation disable strict mode below. */
4295 tree maxlen = slen;
4296 if (!maxlen)
4297 {
4298 c_strlen_data lendata = { };
4299 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4300 maxlen = lendata.maxbound;
4301 }
5aef8938 4302
4303 /* Try to verify that the destination is big enough for the shortest
4304 string. First try to determine the size of the destination object
4305 into which the source is being copied. */
8d6c6ef5 4306 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 4307
4308 /* Add one for the terminating nul. */
14c286b1 4309 tree srclen = (maxlen
4310 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
5aef8938 4311 size_one_node)
4312 : NULL_TREE);
4313
e6a18b5a 4314 /* The strncat function copies at most MAXREAD bytes and always appends
4315 the terminating nul so the specified upper bound should never be equal
4316 to (or greater than) the size of the destination. */
4317 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4318 && tree_int_cst_equal (destsize, maxread))
5aef8938 4319 {
4d317237 4320 location_t loc = tree_nonartificial_location (exp);
4321 loc = expansion_point_location_if_in_system_header (loc);
4322
4323 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 4324 "%K%qD specified bound %E equals destination size",
e6a18b5a 4325 exp, get_callee_fndecl (exp), maxread);
5aef8938 4326
4327 return NULL_RTX;
4328 }
4329
4330 if (!srclen
e6a18b5a 4331 || (maxread && tree_fits_uhwi_p (maxread)
5aef8938 4332 && tree_fits_uhwi_p (srclen)
e6a18b5a 4333 && tree_int_cst_lt (maxread, srclen)))
4334 srclen = maxread;
5aef8938 4335
e6a18b5a 4336 /* The number of bytes to write is SRCLEN. */
4337 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
5aef8938 4338
4339 return NULL_RTX;
4340}
4341
48e1416a 4342/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 4343 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 4344
4345static rtx
a65c4d64 4346expand_builtin_strncpy (tree exp, rtx target)
ed09096d 4347{
389dd41b 4348 location_t loc = EXPR_LOCATION (exp);
c2f47e15 4349
4350 if (validate_arglist (exp,
4351 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 4352 {
c2f47e15 4353 tree dest = CALL_EXPR_ARG (exp, 0);
4354 tree src = CALL_EXPR_ARG (exp, 1);
5aef8938 4355 /* The number of bytes to write (not the maximum). */
c2f47e15 4356 tree len = CALL_EXPR_ARG (exp, 2);
5aef8938 4357 /* The length of the source sequence. */
c2f47e15 4358 tree slen = c_strlen (src, 1);
6840589f 4359
e6a18b5a 4360 if (warn_stringop_overflow)
4361 {
4362 tree destsize = compute_objsize (dest,
4363 warn_stringop_overflow - 1);
4364
4365 /* The number of bytes to write is LEN but check_access will also
4366 check SLEN if LEN's value isn't known. */
4367 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4368 destsize);
4369 }
5aef8938 4370
8ff6a5cd 4371 /* We must be passed a constant len and src parameter. */
e913b5cd 4372 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 4373 return NULL_RTX;
ed09096d 4374
389dd41b 4375 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 4376
4377 /* We're required to pad with trailing zeros if the requested
a0c938f0 4378 len is greater than strlen(s2)+1. In that case try to
6840589f 4379 use store_by_pieces, if it fails, punt. */
ed09096d 4380 if (tree_int_cst_lt (slen, len))
6840589f 4381 {
957d0361 4382 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 4383 const char *p = c_getstr (src);
6840589f 4384 rtx dest_mem;
4385
e913b5cd 4386 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4387 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 4388 builtin_strncpy_read_str,
364c0c59 4389 CONST_CAST (char *, p),
4390 dest_align, false))
c2f47e15 4391 return NULL_RTX;
6840589f 4392
d8ae1baa 4393 dest_mem = get_memory_rtx (dest, len);
e913b5cd 4394 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 4395 builtin_strncpy_read_str,
02aa6d73 4396 CONST_CAST (char *, p), dest_align, false,
4397 RETURN_BEGIN);
a65c4d64 4398 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 4399 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 4400 return dest_mem;
6840589f 4401 }
ed09096d 4402 }
c2f47e15 4403 return NULL_RTX;
ed09096d 4404}
4405
ecc318ff 4406/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4407 bytes from constant string DATA + OFFSET and return it as target
4408 constant. */
4409
f656b751 4410rtx
aecda0d6 4411builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
f77c4496 4412 scalar_int_mode mode)
ecc318ff 4413{
4414 const char *c = (const char *) data;
364c0c59 4415 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 4416
4417 memset (p, *c, GET_MODE_SIZE (mode));
4418
4419 return c_readstr (p, mode);
4420}
4421
a7ec6974 4422/* Callback routine for store_by_pieces. Return the RTL of a register
4423 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4424 char value given in the RTL register data. For example, if mode is
4425 4 bytes wide, return the RTL for 0x01010101*data. */
4426
4427static rtx
aecda0d6 4428builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
f77c4496 4429 scalar_int_mode mode)
a7ec6974 4430{
4431 rtx target, coeff;
4432 size_t size;
4433 char *p;
4434
4435 size = GET_MODE_SIZE (mode);
f0ce3b1f 4436 if (size == 1)
4437 return (rtx) data;
a7ec6974 4438
364c0c59 4439 p = XALLOCAVEC (char, size);
a7ec6974 4440 memset (p, 1, size);
4441 coeff = c_readstr (p, mode);
4442
f0ce3b1f 4443 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 4444 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4445 return force_reg (mode, target);
4446}
4447
48e1416a 4448/* Expand expression EXP, which is a call to the memset builtin. Return
4449 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 4450 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 4451 convenient). */
902de8ed 4452
53800dbe 4453static rtx
3754d046 4454expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 4455{
c2f47e15 4456 if (!validate_arglist (exp,
4457 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4458 return NULL_RTX;
5aef8938 4459
4460 tree dest = CALL_EXPR_ARG (exp, 0);
4461 tree val = CALL_EXPR_ARG (exp, 1);
4462 tree len = CALL_EXPR_ARG (exp, 2);
4463
e6a18b5a 4464 check_memop_access (exp, dest, NULL_TREE, len);
5aef8938 4465
4466 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
c2f47e15 4467}
53800dbe 4468
c2f47e15 4469/* Helper function to do the actual work for expand_builtin_memset. The
4470 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4471 so that this can also be called without constructing an actual CALL_EXPR.
4472 The other arguments and return value are the same as for
4473 expand_builtin_memset. */
6b961939 4474
c2f47e15 4475static rtx
4476expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 4477 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 4478{
4479 tree fndecl, fn;
4480 enum built_in_function fcode;
3754d046 4481 machine_mode val_mode;
c2f47e15 4482 char c;
4483 unsigned int dest_align;
4484 rtx dest_mem, dest_addr, len_rtx;
4485 HOST_WIDE_INT expected_size = -1;
4486 unsigned int expected_align = 0;
36d63243 4487 unsigned HOST_WIDE_INT min_size;
4488 unsigned HOST_WIDE_INT max_size;
9db0f34d 4489 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 4490
957d0361 4491 dest_align = get_pointer_alignment (dest);
162719b3 4492
c2f47e15 4493 /* If DEST is not a pointer type, don't do this operation in-line. */
4494 if (dest_align == 0)
4495 return NULL_RTX;
6f428e8b 4496
8cee8dc0 4497 if (currently_expanding_gimple_stmt)
4498 stringop_block_profile (currently_expanding_gimple_stmt,
4499 &expected_align, &expected_size);
75a70cf9 4500
c2f47e15 4501 if (expected_align < dest_align)
4502 expected_align = dest_align;
6b961939 4503
c2f47e15 4504 /* If the LEN parameter is zero, return DEST. */
4505 if (integer_zerop (len))
4506 {
4507 /* Evaluate and ignore VAL in case it has side-effects. */
4508 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4509 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4510 }
7a3e5564 4511
c2f47e15 4512 /* Stabilize the arguments in case we fail. */
4513 dest = builtin_save_expr (dest);
4514 val = builtin_save_expr (val);
4515 len = builtin_save_expr (len);
a7ec6974 4516
c2f47e15 4517 len_rtx = expand_normal (len);
9db0f34d 4518 determine_block_size (len, len_rtx, &min_size, &max_size,
4519 &probable_max_size);
c2f47e15 4520 dest_mem = get_memory_rtx (dest, len);
03a5dda9 4521 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 4522
c2f47e15 4523 if (TREE_CODE (val) != INTEGER_CST)
4524 {
4525 rtx val_rtx;
a7ec6974 4526
c2f47e15 4527 val_rtx = expand_normal (val);
03a5dda9 4528 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 4529
c2f47e15 4530 /* Assume that we can memset by pieces if we can store
4531 * the coefficients by pieces (in the required modes).
4532 * We can't pass builtin_memset_gen_str as that emits RTL. */
4533 c = 1;
e913b5cd 4534 if (tree_fits_uhwi_p (len)
4535 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4536 builtin_memset_read_str, &c, dest_align,
4537 true))
c2f47e15 4538 {
03a5dda9 4539 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 4540 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4541 builtin_memset_gen_str, val_rtx, dest_align,
02aa6d73 4542 true, RETURN_BEGIN);
c2f47e15 4543 }
4544 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4545 dest_align, expected_align,
9db0f34d 4546 expected_size, min_size, max_size,
4547 probable_max_size))
6b961939 4548 goto do_libcall;
48e1416a 4549
c2f47e15 4550 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4551 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4552 return dest_mem;
4553 }
53800dbe 4554
c2f47e15 4555 if (target_char_cast (val, &c))
4556 goto do_libcall;
ecc318ff 4557
c2f47e15 4558 if (c)
4559 {
e913b5cd 4560 if (tree_fits_uhwi_p (len)
4561 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4562 builtin_memset_read_str, &c, dest_align,
4563 true))
e913b5cd 4564 store_by_pieces (dest_mem, tree_to_uhwi (len),
02aa6d73 4565 builtin_memset_read_str, &c, dest_align, true,
4566 RETURN_BEGIN);
03a5dda9 4567 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4568 gen_int_mode (c, val_mode),
c2f47e15 4569 dest_align, expected_align,
9db0f34d 4570 expected_size, min_size, max_size,
4571 probable_max_size))
c2f47e15 4572 goto do_libcall;
48e1416a 4573
c2f47e15 4574 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4575 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4576 return dest_mem;
4577 }
ecc318ff 4578
c2f47e15 4579 set_mem_align (dest_mem, dest_align);
4580 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4581 CALL_EXPR_TAILCALL (orig_exp)
4582 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 4583 expected_align, expected_size,
9db0f34d 4584 min_size, max_size,
4585 probable_max_size);
53800dbe 4586
c2f47e15 4587 if (dest_addr == 0)
4588 {
4589 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4590 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4591 }
53800dbe 4592
c2f47e15 4593 return dest_addr;
6b961939 4594
c2f47e15 4595 do_libcall:
4596 fndecl = get_callee_fndecl (orig_exp);
4597 fcode = DECL_FUNCTION_CODE (fndecl);
1e42d5c6 4598 if (fcode == BUILT_IN_MEMSET)
0568e9c1 4599 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4600 dest, val, len);
c2f47e15 4601 else if (fcode == BUILT_IN_BZERO)
0568e9c1 4602 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4603 dest, len);
c2f47e15 4604 else
4605 gcc_unreachable ();
a65c4d64 4606 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4607 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 4608 return expand_call (fn, target, target == const0_rtx);
53800dbe 4609}
4610
48e1416a 4611/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 4612 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 4613
ffc83088 4614static rtx
0b25db21 4615expand_builtin_bzero (tree exp)
ffc83088 4616{
c2f47e15 4617 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 4618 return NULL_RTX;
ffc83088 4619
5aef8938 4620 tree dest = CALL_EXPR_ARG (exp, 0);
4621 tree size = CALL_EXPR_ARG (exp, 1);
4622
e6a18b5a 4623 check_memop_access (exp, dest, NULL_TREE, size);
bf8e3599 4624
7369e7ba 4625 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 4626 memset(ptr x, int 0, size_t y). This is done this way
4627 so that if it isn't expanded inline, we fallback to
4628 calling bzero instead of memset. */
bf8e3599 4629
5aef8938 4630 location_t loc = EXPR_LOCATION (exp);
4631
c2f47e15 4632 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 4633 fold_convert_loc (loc,
4634 size_type_node, size),
c2f47e15 4635 const0_rtx, VOIDmode, exp);
ffc83088 4636}
4637
d6f01a40 4638/* Try to expand cmpstr operation ICODE with the given operands.
4639 Return the result rtx on success, otherwise return null. */
4640
4641static rtx
4642expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4643 HOST_WIDE_INT align)
4644{
4645 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4646
4647 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4648 target = NULL_RTX;
4649
2e966e2a 4650 class expand_operand ops[4];
d6f01a40 4651 create_output_operand (&ops[0], target, insn_mode);
4652 create_fixed_operand (&ops[1], arg1_rtx);
4653 create_fixed_operand (&ops[2], arg2_rtx);
4654 create_integer_operand (&ops[3], align);
4655 if (maybe_expand_insn (icode, 4, ops))
4656 return ops[0].value;
4657 return NULL_RTX;
4658}
4659
7a3f89b5 4660/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 4661 Return NULL_RTX if we failed and the caller should emit a normal call,
3e346f54 4662 otherwise try to get the result in TARGET, if convenient.
4663 RESULT_EQ is true if we can relax the returned value to be either zero
4664 or nonzero, without caring about the sign. */
27d0c333 4665
53800dbe 4666static rtx
3e346f54 4667expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
53800dbe 4668{
c2f47e15 4669 if (!validate_arglist (exp,
4670 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4671 return NULL_RTX;
6f428e8b 4672
ea368aac 4673 tree arg1 = CALL_EXPR_ARG (exp, 0);
4674 tree arg2 = CALL_EXPR_ARG (exp, 1);
4675 tree len = CALL_EXPR_ARG (exp, 2);
a950155e 4676 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4677 bool no_overflow = true;
8d6c6ef5 4678
4679 /* Diagnose calls where the specified length exceeds the size of either
4680 object. */
a950155e 4681 tree size = compute_objsize (arg1, 0);
4682 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4683 len, /*maxread=*/NULL_TREE, size,
4684 /*objsize=*/NULL_TREE);
b3e6ae76 4685 if (no_overflow)
a950155e 4686 {
4687 size = compute_objsize (arg2, 0);
4688 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4689 len, /*maxread=*/NULL_TREE, size,
4690 /*objsize=*/NULL_TREE);
b3e6ae76 4691 }
a950155e 4692
ee1b788e 4693 /* If the specified length exceeds the size of either object,
4694 call the function. */
4695 if (!no_overflow)
4696 return NULL_RTX;
4697
b3e6ae76 4698 /* Due to the performance benefit, always inline the calls first
a950155e 4699 when result_eq is false. */
4700 rtx result = NULL_RTX;
b3e6ae76 4701
ee1b788e 4702 if (!result_eq && fcode != BUILT_IN_BCMP)
8d6c6ef5 4703 {
0dbefa15 4704 result = inline_expand_builtin_string_cmp (exp, target);
a950155e 4705 if (result)
4706 return result;
8d6c6ef5 4707 }
4708
3e346f54 4709 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4710 location_t loc = EXPR_LOCATION (exp);
b428c0a5 4711
ea368aac 4712 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4713 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
53800dbe 4714
ea368aac 4715 /* If we don't have POINTER_TYPE, call the function. */
4716 if (arg1_align == 0 || arg2_align == 0)
4717 return NULL_RTX;
53800dbe 4718
ea368aac 4719 rtx arg1_rtx = get_memory_rtx (arg1, len);
4720 rtx arg2_rtx = get_memory_rtx (arg2, len);
3e346f54 4721 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
53800dbe 4722
ea368aac 4723 /* Set MEM_SIZE as appropriate. */
3e346f54 4724 if (CONST_INT_P (len_rtx))
ea368aac 4725 {
3e346f54 4726 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4727 set_mem_size (arg2_rtx, INTVAL (len_rtx));
ea368aac 4728 }
83f88f8e 4729
3e346f54 4730 by_pieces_constfn constfn = NULL;
4731
719f3058 4732 const char *src_str = c_getstr (arg2);
4733 if (result_eq && src_str == NULL)
4734 {
4735 src_str = c_getstr (arg1);
4736 if (src_str != NULL)
092db747 4737 std::swap (arg1_rtx, arg2_rtx);
719f3058 4738 }
3e346f54 4739
4740 /* If SRC is a string constant and block move would be done
4741 by pieces, we can avoid loading the string from memory
4742 and only stored the computed constants. */
4743 if (src_str
4744 && CONST_INT_P (len_rtx)
4745 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4746 constfn = builtin_memcpy_read_str;
4747
a950155e 4748 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4749 TREE_TYPE (len), target,
4750 result_eq, constfn,
4751 CONST_CAST (char *, src_str));
3e346f54 4752
ea368aac 4753 if (result)
4754 {
4755 /* Return the value in the proper mode for this function. */
4756 if (GET_MODE (result) == mode)
4757 return result;
83f88f8e 4758
ea368aac 4759 if (target != 0)
4760 {
4761 convert_move (target, result, 0);
4762 return target;
4763 }
0cd832f0 4764
53800dbe 4765 return convert_to_mode (mode, result, 0);
ea368aac 4766 }
53800dbe 4767
61ffc71a 4768 return NULL_RTX;
6f428e8b 4769}
4770
c2f47e15 4771/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4772 if we failed the caller should emit a normal call, otherwise try to get
4773 the result in TARGET, if convenient. */
902de8ed 4774
53800dbe 4775static rtx
a65c4d64 4776expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4777{
c2f47e15 4778 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4779 return NULL_RTX;
bf8e3599 4780
a950155e 4781 /* Due to the performance benefit, always inline the calls first. */
4782 rtx result = NULL_RTX;
0dbefa15 4783 result = inline_expand_builtin_string_cmp (exp, target);
a950155e 4784 if (result)
4785 return result;
4786
d6f01a40 4787 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4788 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5c5d012b 4789 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4790 return NULL_RTX;
a0c938f0 4791
5c5d012b 4792 tree arg1 = CALL_EXPR_ARG (exp, 0);
4793 tree arg2 = CALL_EXPR_ARG (exp, 1);
6ac5504b 4794
5c5d012b 4795 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4796 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7a3f89b5 4797
5c5d012b 4798 /* If we don't have POINTER_TYPE, call the function. */
4799 if (arg1_align == 0 || arg2_align == 0)
4800 return NULL_RTX;
7a3f89b5 4801
5c5d012b 4802 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4803 arg1 = builtin_save_expr (arg1);
4804 arg2 = builtin_save_expr (arg2);
53800dbe 4805
5c5d012b 4806 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4807 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
d6f01a40 4808
5c5d012b 4809 /* Try to call cmpstrsi. */
4810 if (cmpstr_icode != CODE_FOR_nothing)
4811 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4812 MIN (arg1_align, arg2_align));
6ac5504b 4813
5c5d012b 4814 /* Try to determine at least one length and call cmpstrnsi. */
4815 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4816 {
4817 tree len;
4818 rtx arg3_rtx;
4819
4820 tree len1 = c_strlen (arg1, 1);
4821 tree len2 = c_strlen (arg2, 1);
4822
4823 if (len1)
4824 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4825 if (len2)
4826 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4827
4828 /* If we don't have a constant length for the first, use the length
4829 of the second, if we know it. We don't require a constant for
4830 this case; some cost analysis could be done if both are available
4831 but neither is constant. For now, assume they're equally cheap,
4832 unless one has side effects. If both strings have constant lengths,
4833 use the smaller. */
4834
4835 if (!len1)
4836 len = len2;
4837 else if (!len2)
4838 len = len1;
4839 else if (TREE_SIDE_EFFECTS (len1))
4840 len = len2;
4841 else if (TREE_SIDE_EFFECTS (len2))
4842 len = len1;
4843 else if (TREE_CODE (len1) != INTEGER_CST)
4844 len = len2;
4845 else if (TREE_CODE (len2) != INTEGER_CST)
4846 len = len1;
4847 else if (tree_int_cst_lt (len1, len2))
4848 len = len1;
4849 else
4850 len = len2;
3f8aefe2 4851
5c5d012b 4852 /* If both arguments have side effects, we cannot optimize. */
4853 if (len && !TREE_SIDE_EFFECTS (len))
6ac5504b 4854 {
5c5d012b 4855 arg3_rtx = expand_normal (len);
4856 result = expand_cmpstrn_or_cmpmem
4857 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4858 arg3_rtx, MIN (arg1_align, arg2_align));
6ac5504b 4859 }
5c5d012b 4860 }
4861
5c5d012b 4862 tree fndecl = get_callee_fndecl (exp);
5c5d012b 4863 if (result)
4864 {
0b39ade8 4865 /* Check to see if the argument was declared attribute nonstring
4866 and if so, issue a warning since at this point it's not known
4867 to be nul-terminated. */
4868 maybe_warn_nonstring_arg (fndecl, exp);
4869
5c5d012b 4870 /* Return the value in the proper mode for this function. */
4871 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4872 if (GET_MODE (result) == mode)
4873 return result;
4874 if (target == 0)
4875 return convert_to_mode (mode, result, 0);
4876 convert_move (target, result, 0);
4877 return target;
6ac5504b 4878 }
5c5d012b 4879
4880 /* Expand the library call ourselves using a stabilized argument
4881 list to avoid re-evaluating the function's arguments twice. */
4882 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4883 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4884 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4885 return expand_call (fn, target, target == const0_rtx);
83d79705 4886}
53800dbe 4887
48e1416a 4888/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4889 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4890 the result in TARGET, if convenient. */
27d0c333 4891
ed09096d 4892static rtx
a65c4d64 4893expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4894 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4895{
c2f47e15 4896 if (!validate_arglist (exp,
4897 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4898 return NULL_RTX;
ed09096d 4899
a950155e 4900 /* Due to the performance benefit, always inline the calls first. */
4901 rtx result = NULL_RTX;
0dbefa15 4902 result = inline_expand_builtin_string_cmp (exp, target);
a950155e 4903 if (result)
4904 return result;
4905
6e34e617 4906 /* If c_strlen can determine an expression for one of the string
6ac5504b 4907 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4908 using length MIN(strlen(string)+1, arg3). */
d6f01a40 4909 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5c5d012b 4910 if (cmpstrn_icode == CODE_FOR_nothing)
4911 return NULL_RTX;
27d0c333 4912
5c5d012b 4913 tree len;
4914
4915 tree arg1 = CALL_EXPR_ARG (exp, 0);
4916 tree arg2 = CALL_EXPR_ARG (exp, 1);
4917 tree arg3 = CALL_EXPR_ARG (exp, 2);
4918
4919 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4920 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4921
4922 tree len1 = c_strlen (arg1, 1);
4923 tree len2 = c_strlen (arg2, 1);
4924
4925 location_t loc = EXPR_LOCATION (exp);
4926
4927 if (len1)
4928 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4929 if (len2)
4930 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4931
4932 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4933
4934 /* If we don't have a constant length for the first, use the length
4935 of the second, if we know it. If neither string is constant length,
4936 use the given length argument. We don't require a constant for
4937 this case; some cost analysis could be done if both are available
4938 but neither is constant. For now, assume they're equally cheap,
4939 unless one has side effects. If both strings have constant lengths,
4940 use the smaller. */
4941
4942 if (!len1 && !len2)
4943 len = len3;
4944 else if (!len1)
4945 len = len2;
4946 else if (!len2)
4947 len = len1;
4948 else if (TREE_SIDE_EFFECTS (len1))
4949 len = len2;
4950 else if (TREE_SIDE_EFFECTS (len2))
4951 len = len1;
4952 else if (TREE_CODE (len1) != INTEGER_CST)
4953 len = len2;
4954 else if (TREE_CODE (len2) != INTEGER_CST)
4955 len = len1;
4956 else if (tree_int_cst_lt (len1, len2))
4957 len = len1;
4958 else
4959 len = len2;
4960
4961 /* If we are not using the given length, we must incorporate it here.
4962 The actual new length parameter will be MIN(len,arg3) in this case. */
4963 if (len != len3)
a55f0871 4964 {
4965 len = fold_convert_loc (loc, sizetype, len);
4966 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4967 }
5c5d012b 4968 rtx arg1_rtx = get_memory_rtx (arg1, len);
4969 rtx arg2_rtx = get_memory_rtx (arg2, len);
4970 rtx arg3_rtx = expand_normal (len);
a950155e 4971 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4972 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4973 MIN (arg1_align, arg2_align));
5c5d012b 4974
5c5d012b 4975 tree fndecl = get_callee_fndecl (exp);
5c5d012b 4976 if (result)
4977 {
0b39ade8 4978 /* Check to see if the argument was declared attribute nonstring
4979 and if so, issue a warning since at this point it's not known
4980 to be nul-terminated. */
4981 maybe_warn_nonstring_arg (fndecl, exp);
4982
5c5d012b 4983 /* Return the value in the proper mode for this function. */
4984 mode = TYPE_MODE (TREE_TYPE (exp));
4985 if (GET_MODE (result) == mode)
4986 return result;
4987 if (target == 0)
4988 return convert_to_mode (mode, result, 0);
4989 convert_move (target, result, 0);
4990 return target;
4991 }
4992
4993 /* Expand the library call ourselves using a stabilized argument
4994 list to avoid re-evaluating the function's arguments twice. */
4995 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4996 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4997 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4998 return expand_call (fn, target, target == const0_rtx);
49f0327b 4999}
5000
a66c9326 5001/* Expand a call to __builtin_saveregs, generating the result in TARGET,
5002 if that's convenient. */
902de8ed 5003
a66c9326 5004rtx
aecda0d6 5005expand_builtin_saveregs (void)
53800dbe 5006{
1e0c0b35 5007 rtx val;
5008 rtx_insn *seq;
53800dbe 5009
5010 /* Don't do __builtin_saveregs more than once in a function.
5011 Save the result of the first call and reuse it. */
5012 if (saveregs_value != 0)
5013 return saveregs_value;
53800dbe 5014
a66c9326 5015 /* When this function is called, it means that registers must be
5016 saved on entry to this function. So we migrate the call to the
5017 first insn of this function. */
5018
5019 start_sequence ();
53800dbe 5020
a66c9326 5021 /* Do whatever the machine needs done in this case. */
45550790 5022 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 5023
a66c9326 5024 seq = get_insns ();
5025 end_sequence ();
53800dbe 5026
a66c9326 5027 saveregs_value = val;
53800dbe 5028
31d3e01c 5029 /* Put the insns after the NOTE that starts the function. If this
5030 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 5031 the code is placed at the start of the function. */
5032 push_topmost_sequence ();
0ec80471 5033 emit_insn_after (seq, entry_of_function ());
a66c9326 5034 pop_topmost_sequence ();
5035
5036 return val;
53800dbe 5037}
5038
79012a9d 5039/* Expand a call to __builtin_next_arg. */
27d0c333 5040
53800dbe 5041static rtx
79012a9d 5042expand_builtin_next_arg (void)
53800dbe 5043{
79012a9d 5044 /* Checking arguments is already done in fold_builtin_next_arg
5045 that must be called before this function. */
940ddc5c 5046 return expand_binop (ptr_mode, add_optab,
abe32cce 5047 crtl->args.internal_arg_pointer,
5048 crtl->args.arg_offset_rtx,
53800dbe 5049 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5050}
5051
a66c9326 5052/* Make it easier for the backends by protecting the valist argument
5053 from multiple evaluations. */
5054
5055static tree
389dd41b 5056stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 5057{
5f57a8b1 5058 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5059
182cf5a9 5060 /* The current way of determining the type of valist is completely
5061 bogus. We should have the information on the va builtin instead. */
5062 if (!vatype)
5063 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 5064
5065 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 5066 {
2d47cc32 5067 if (TREE_SIDE_EFFECTS (valist))
5068 valist = save_expr (valist);
11a61dea 5069
2d47cc32 5070 /* For this case, the backends will be expecting a pointer to
5f57a8b1 5071 vatype, but it's possible we've actually been given an array
5072 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 5073 So fix it. */
5074 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 5075 {
5f57a8b1 5076 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 5077 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 5078 }
a66c9326 5079 }
11a61dea 5080 else
a66c9326 5081 {
182cf5a9 5082 tree pt = build_pointer_type (vatype);
11a61dea 5083
2d47cc32 5084 if (! needs_lvalue)
5085 {
11a61dea 5086 if (! TREE_SIDE_EFFECTS (valist))
5087 return valist;
bf8e3599 5088
389dd41b 5089 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 5090 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 5091 }
2d47cc32 5092
11a61dea 5093 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 5094 valist = save_expr (valist);
182cf5a9 5095 valist = fold_build2_loc (loc, MEM_REF,
5096 vatype, valist, build_int_cst (pt, 0));
a66c9326 5097 }
5098
5099 return valist;
5100}
5101
2e15d750 5102/* The "standard" definition of va_list is void*. */
5103
5104tree
5105std_build_builtin_va_list (void)
5106{
5107 return ptr_type_node;
5108}
5109
5f57a8b1 5110/* The "standard" abi va_list is va_list_type_node. */
5111
5112tree
5113std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5114{
5115 return va_list_type_node;
5116}
5117
5118/* The "standard" type of va_list is va_list_type_node. */
5119
5120tree
5121std_canonical_va_list_type (tree type)
5122{
5123 tree wtype, htype;
5124
5f57a8b1 5125 wtype = va_list_type_node;
5126 htype = type;
b6da2e41 5127
5128 if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 5129 {
5130 /* If va_list is an array type, the argument may have decayed
5131 to a pointer type, e.g. by being passed to another function.
5132 In that case, unwrap both types so that we can compare the
5133 underlying records. */
5134 if (TREE_CODE (htype) == ARRAY_TYPE
5135 || POINTER_TYPE_P (htype))
5136 {
5137 wtype = TREE_TYPE (wtype);
5138 htype = TREE_TYPE (htype);
5139 }
5140 }
5141 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5142 return va_list_type_node;
5143
5144 return NULL_TREE;
5145}
5146
a66c9326 5147/* The "standard" implementation of va_start: just assign `nextarg' to
5148 the variable. */
27d0c333 5149
a66c9326 5150void
aecda0d6 5151std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 5152{
f03c17bc 5153 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5154 convert_move (va_r, nextarg, 0);
a66c9326 5155}
5156
c2f47e15 5157/* Expand EXP, a call to __builtin_va_start. */
27d0c333 5158
a66c9326 5159static rtx
c2f47e15 5160expand_builtin_va_start (tree exp)
a66c9326 5161{
5162 rtx nextarg;
c2f47e15 5163 tree valist;
389dd41b 5164 location_t loc = EXPR_LOCATION (exp);
a66c9326 5165
c2f47e15 5166 if (call_expr_nargs (exp) < 2)
cb166087 5167 {
389dd41b 5168 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 5169 return const0_rtx;
5170 }
a66c9326 5171
c2f47e15 5172 if (fold_builtin_next_arg (exp, true))
79012a9d 5173 return const0_rtx;
7c2f0500 5174
79012a9d 5175 nextarg = expand_builtin_next_arg ();
389dd41b 5176 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 5177
8a58ed0a 5178 if (targetm.expand_builtin_va_start)
5179 targetm.expand_builtin_va_start (valist, nextarg);
5180 else
5181 std_expand_builtin_va_start (valist, nextarg);
a66c9326 5182
5183 return const0_rtx;
5184}
5185
c2f47e15 5186/* Expand EXP, a call to __builtin_va_end. */
f7c44134 5187
a66c9326 5188static rtx
c2f47e15 5189expand_builtin_va_end (tree exp)
a66c9326 5190{
c2f47e15 5191 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 5192
8a15c04a 5193 /* Evaluate for side effects, if needed. I hate macros that don't
5194 do that. */
5195 if (TREE_SIDE_EFFECTS (valist))
5196 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 5197
5198 return const0_rtx;
5199}
5200
c2f47e15 5201/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 5202 builtin rather than just as an assignment in stdarg.h because of the
5203 nastiness of array-type va_list types. */
f7c44134 5204
a66c9326 5205static rtx
c2f47e15 5206expand_builtin_va_copy (tree exp)
a66c9326 5207{
5208 tree dst, src, t;
389dd41b 5209 location_t loc = EXPR_LOCATION (exp);
a66c9326 5210
c2f47e15 5211 dst = CALL_EXPR_ARG (exp, 0);
5212 src = CALL_EXPR_ARG (exp, 1);
a66c9326 5213
389dd41b 5214 dst = stabilize_va_list_loc (loc, dst, 1);
5215 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 5216
5f57a8b1 5217 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5218
5219 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 5220 {
5f57a8b1 5221 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 5222 TREE_SIDE_EFFECTS (t) = 1;
5223 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5224 }
5225 else
5226 {
11a61dea 5227 rtx dstb, srcb, size;
5228
5229 /* Evaluate to pointers. */
5230 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5231 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 5232 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5233 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 5234
85d654dd 5235 dstb = convert_memory_address (Pmode, dstb);
5236 srcb = convert_memory_address (Pmode, srcb);
726ec87c 5237
11a61dea 5238 /* "Dereference" to BLKmode memories. */
5239 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 5240 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 5241 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 5242 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 5243 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 5244 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 5245
5246 /* Copy. */
0378dbdc 5247 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 5248 }
5249
5250 return const0_rtx;
5251}
5252
53800dbe 5253/* Expand a call to one of the builtin functions __builtin_frame_address or
5254 __builtin_return_address. */
27d0c333 5255
53800dbe 5256static rtx
c2f47e15 5257expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 5258{
53800dbe 5259 /* The argument must be a nonnegative integer constant.
5260 It counts the number of frames to scan up the stack.
5b252e95 5261 The value is either the frame pointer value or the return
5262 address saved in that frame. */
c2f47e15 5263 if (call_expr_nargs (exp) == 0)
53800dbe 5264 /* Warning about missing arg was already issued. */
5265 return const0_rtx;
e913b5cd 5266 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 5267 {
5b252e95 5268 error ("invalid argument to %qD", fndecl);
53800dbe 5269 return const0_rtx;
5270 }
5271 else
5272 {
5b252e95 5273 /* Number of frames to scan up the stack. */
5274 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5275
5276 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
53800dbe 5277
5278 /* Some ports cannot access arbitrary stack frames. */
5279 if (tem == NULL)
5280 {
5b252e95 5281 warning (0, "unsupported argument to %qD", fndecl);
53800dbe 5282 return const0_rtx;
5283 }
5284
5b252e95 5285 if (count)
5286 {
5287 /* Warn since no effort is made to ensure that any frame
5288 beyond the current one exists or can be safely reached. */
5289 warning (OPT_Wframe_address, "calling %qD with "
5290 "a nonzero argument is unsafe", fndecl);
5291 }
5292
53800dbe 5293 /* For __builtin_frame_address, return what we've got. */
5294 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5295 return tem;
5296
8ad4c111 5297 if (!REG_P (tem)
53800dbe 5298 && ! CONSTANT_P (tem))
99182918 5299 tem = copy_addr_to_reg (tem);
53800dbe 5300 return tem;
5301 }
5302}
5303
990495a7 5304/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
2b29cc6a 5305 failed and the caller should emit a normal call. */
15c6cf6b 5306
53800dbe 5307static rtx
2b29cc6a 5308expand_builtin_alloca (tree exp)
53800dbe 5309{
5310 rtx op0;
15c6cf6b 5311 rtx result;
581bf1c2 5312 unsigned int align;
370e45b9 5313 tree fndecl = get_callee_fndecl (exp);
2b34677f 5314 HOST_WIDE_INT max_size;
5315 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
2b29cc6a 5316 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
370e45b9 5317 bool valid_arglist
2b34677f 5318 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5319 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5320 VOID_TYPE)
5321 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5322 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5323 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
581bf1c2 5324
5325 if (!valid_arglist)
c2f47e15 5326 return NULL_RTX;
53800dbe 5327
8e18705e 5328 if ((alloca_for_var
5329 && warn_vla_limit >= HOST_WIDE_INT_MAX
5330 && warn_alloc_size_limit < warn_vla_limit)
5331 || (!alloca_for_var
5332 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5333 && warn_alloc_size_limit < warn_alloca_limit
5334 ))
370e45b9 5335 {
8e18705e 5336 /* -Walloca-larger-than and -Wvla-larger-than settings of
5337 less than HOST_WIDE_INT_MAX override the more general
5338 -Walloc-size-larger-than so unless either of the former
5339 options is smaller than the last one (wchich would imply
5340 that the call was already checked), check the alloca
5341 arguments for overflow. */
370e45b9 5342 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5343 int idx[] = { 0, -1 };
5344 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5345 }
5346
53800dbe 5347 /* Compute the argument. */
c2f47e15 5348 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 5349
581bf1c2 5350 /* Compute the alignment. */
2b34677f 5351 align = (fcode == BUILT_IN_ALLOCA
5352 ? BIGGEST_ALIGNMENT
5353 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5354
5355 /* Compute the maximum size. */
5356 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5357 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5358 : -1);
581bf1c2 5359
2b29cc6a 5360 /* Allocate the desired space. If the allocation stems from the declaration
5361 of a variable-sized object, it cannot accumulate. */
2b34677f 5362 result
5363 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
85d654dd 5364 result = convert_memory_address (ptr_mode, result);
15c6cf6b 5365
5366 return result;
53800dbe 5367}
5368
829e6a9b 5369/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5370 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5371 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5372 handle_builtin_stack_restore function. */
d08919a7 5373
5374static rtx
5375expand_asan_emit_allocas_unpoison (tree exp)
5376{
5377 tree arg0 = CALL_EXPR_ARG (exp, 0);
829e6a9b 5378 tree arg1 = CALL_EXPR_ARG (exp, 1);
cd2ee6ee 5379 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
829e6a9b 5380 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5381 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5382 stack_pointer_rtx, NULL_RTX, 0,
5383 OPTAB_LIB_WIDEN);
5384 off = convert_modes (ptr_mode, Pmode, off, 0);
5385 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5386 OPTAB_LIB_WIDEN);
d08919a7 5387 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
9e9e5c15 5388 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5389 top, ptr_mode, bot, ptr_mode);
d08919a7 5390 return ret;
5391}
5392
74bdbe96 5393/* Expand a call to bswap builtin in EXP.
5394 Return NULL_RTX if a normal call should be emitted rather than expanding the
5395 function in-line. If convenient, the result should be placed in TARGET.
5396 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 5397
5398static rtx
3754d046 5399expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 5400 rtx subtarget)
42791117 5401{
42791117 5402 tree arg;
5403 rtx op0;
5404
c2f47e15 5405 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5406 return NULL_RTX;
42791117 5407
c2f47e15 5408 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 5409 op0 = expand_expr (arg,
5410 subtarget && GET_MODE (subtarget) == target_mode
5411 ? subtarget : NULL_RTX,
5412 target_mode, EXPAND_NORMAL);
5413 if (GET_MODE (op0) != target_mode)
5414 op0 = convert_to_mode (target_mode, op0, 1);
42791117 5415
74bdbe96 5416 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 5417
5418 gcc_assert (target);
5419
74bdbe96 5420 return convert_to_mode (target_mode, target, 1);
42791117 5421}
5422
c2f47e15 5423/* Expand a call to a unary builtin in EXP.
5424 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 5425 function in-line. If convenient, the result should be placed in TARGET.
5426 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 5427
53800dbe 5428static rtx
3754d046 5429expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 5430 rtx subtarget, optab op_optab)
53800dbe 5431{
5432 rtx op0;
c2f47e15 5433
5434 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5435 return NULL_RTX;
53800dbe 5436
5437 /* Compute the argument. */
f97eea22 5438 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5439 (subtarget
5440 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5441 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 5442 VOIDmode, EXPAND_NORMAL);
6a08d0ab 5443 /* Compute op, into TARGET if possible.
53800dbe 5444 Set TARGET to wherever the result comes back. */
c2f47e15 5445 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 5446 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 5447 gcc_assert (target);
7d3f6cc7 5448
efb070c8 5449 return convert_to_mode (target_mode, target, 0);
53800dbe 5450}
89cfe6e5 5451
48e1416a 5452/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 5453 as the builtin_expect semantic should've been already executed by
5454 tree branch prediction pass. */
89cfe6e5 5455
5456static rtx
c2f47e15 5457expand_builtin_expect (tree exp, rtx target)
89cfe6e5 5458{
1e4adcfc 5459 tree arg;
89cfe6e5 5460
c2f47e15 5461 if (call_expr_nargs (exp) < 2)
89cfe6e5 5462 return const0_rtx;
c2f47e15 5463 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 5464
c2f47e15 5465 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 5466 /* When guessing was done, the hints should be already stripped away. */
07311427 5467 gcc_assert (!flag_guess_branch_prob
852f689e 5468 || optimize == 0 || seen_error ());
89cfe6e5 5469 return target;
5470}
689df48e 5471
01107f42 5472/* Expand a call to __builtin_expect_with_probability. We just return our
5473 argument as the builtin_expect semantic should've been already executed by
5474 tree branch prediction pass. */
5475
5476static rtx
5477expand_builtin_expect_with_probability (tree exp, rtx target)
5478{
5479 tree arg;
5480
5481 if (call_expr_nargs (exp) < 3)
5482 return const0_rtx;
5483 arg = CALL_EXPR_ARG (exp, 0);
5484
5485 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5486 /* When guessing was done, the hints should be already stripped away. */
5487 gcc_assert (!flag_guess_branch_prob
5488 || optimize == 0 || seen_error ());
5489 return target;
5490}
5491
5492
fca0886c 5493/* Expand a call to __builtin_assume_aligned. We just return our first
5494 argument as the builtin_assume_aligned semantic should've been already
5495 executed by CCP. */
5496
5497static rtx
5498expand_builtin_assume_aligned (tree exp, rtx target)
5499{
5500 if (call_expr_nargs (exp) < 2)
5501 return const0_rtx;
5502 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5503 EXPAND_NORMAL);
5504 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5505 && (call_expr_nargs (exp) < 3
5506 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5507 return target;
5508}
5509
c22de3f0 5510void
aecda0d6 5511expand_builtin_trap (void)
a0ef1725 5512{
4db8dd0c 5513 if (targetm.have_trap ())
f73960eb 5514 {
4db8dd0c 5515 rtx_insn *insn = emit_insn (targetm.gen_trap ());
f73960eb 5516 /* For trap insns when not accumulating outgoing args force
5517 REG_ARGS_SIZE note to prevent crossjumping of calls with
5518 different args sizes. */
5519 if (!ACCUMULATE_OUTGOING_ARGS)
f6a1fc98 5520 add_args_size_note (insn, stack_pointer_delta);
f73960eb 5521 }
a0ef1725 5522 else
61ffc71a 5523 {
5524 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5525 tree call_expr = build_call_expr (fn, 0);
5526 expand_call (call_expr, NULL_RTX, false);
5527 }
5528
a0ef1725 5529 emit_barrier ();
5530}
78a74442 5531
d2b48f0c 5532/* Expand a call to __builtin_unreachable. We do nothing except emit
5533 a barrier saying that control flow will not pass here.
5534
5535 It is the responsibility of the program being compiled to ensure
5536 that control flow does never reach __builtin_unreachable. */
5537static void
5538expand_builtin_unreachable (void)
5539{
5540 emit_barrier ();
5541}
5542
c2f47e15 5543/* Expand EXP, a call to fabs, fabsf or fabsl.
5544 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 5545 the function inline. If convenient, the result should be placed
5546 in TARGET. SUBTARGET may be used as the target for computing
5547 the operand. */
5548
5549static rtx
c2f47e15 5550expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 5551{
3754d046 5552 machine_mode mode;
78a74442 5553 tree arg;
5554 rtx op0;
5555
c2f47e15 5556 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5557 return NULL_RTX;
78a74442 5558
c2f47e15 5559 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 5560 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 5561 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 5562 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 5563 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5564}
5565
c2f47e15 5566/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 5567 Return NULL is a normal call should be emitted rather than expanding the
5568 function inline. If convenient, the result should be placed in TARGET.
5569 SUBTARGET may be used as the target for computing the operand. */
5570
5571static rtx
c2f47e15 5572expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 5573{
5574 rtx op0, op1;
5575 tree arg;
5576
c2f47e15 5577 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5578 return NULL_RTX;
270436f3 5579
c2f47e15 5580 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 5581 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 5582
c2f47e15 5583 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 5584 op1 = expand_normal (arg);
270436f3 5585
5586 return expand_copysign (op0, op1, target);
5587}
5588
ac8fb6db 5589/* Expand a call to __builtin___clear_cache. */
5590
5591static rtx
32e17df0 5592expand_builtin___clear_cache (tree exp)
ac8fb6db 5593{
32e17df0 5594 if (!targetm.code_for_clear_cache)
5595 {
ac8fb6db 5596#ifdef CLEAR_INSN_CACHE
32e17df0 5597 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5598 does something. Just do the default expansion to a call to
5599 __clear_cache(). */
5600 return NULL_RTX;
ac8fb6db 5601#else
32e17df0 5602 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5603 does nothing. There is no need to call it. Do nothing. */
5604 return const0_rtx;
ac8fb6db 5605#endif /* CLEAR_INSN_CACHE */
32e17df0 5606 }
5607
ac8fb6db 5608 /* We have a "clear_cache" insn, and it will handle everything. */
5609 tree begin, end;
5610 rtx begin_rtx, end_rtx;
ac8fb6db 5611
5612 /* We must not expand to a library call. If we did, any
5613 fallback library function in libgcc that might contain a call to
5614 __builtin___clear_cache() would recurse infinitely. */
5615 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5616 {
5617 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5618 return const0_rtx;
5619 }
5620
32e17df0 5621 if (targetm.have_clear_cache ())
ac8fb6db 5622 {
2e966e2a 5623 class expand_operand ops[2];
ac8fb6db 5624
5625 begin = CALL_EXPR_ARG (exp, 0);
5626 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5627
5628 end = CALL_EXPR_ARG (exp, 1);
5629 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5630
8786db1e 5631 create_address_operand (&ops[0], begin_rtx);
5632 create_address_operand (&ops[1], end_rtx);
32e17df0 5633 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8786db1e 5634 return const0_rtx;
ac8fb6db 5635 }
5636 return const0_rtx;
ac8fb6db 5637}
5638
4ee9c684 5639/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5640
5641static rtx
5642round_trampoline_addr (rtx tramp)
5643{
5644 rtx temp, addend, mask;
5645
5646 /* If we don't need too much alignment, we'll have been guaranteed
5647 proper alignment by get_trampoline_type. */
5648 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5649 return tramp;
5650
5651 /* Round address up to desired boundary. */
5652 temp = gen_reg_rtx (Pmode);
0359f9f5 5653 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5654 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 5655
5656 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5657 temp, 0, OPTAB_LIB_WIDEN);
5658 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5659 temp, 0, OPTAB_LIB_WIDEN);
5660
5661 return tramp;
5662}
5663
5664static rtx
c307f106 5665expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 5666{
5667 tree t_tramp, t_func, t_chain;
82c7907c 5668 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 5669
c2f47e15 5670 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 5671 POINTER_TYPE, VOID_TYPE))
5672 return NULL_RTX;
5673
c2f47e15 5674 t_tramp = CALL_EXPR_ARG (exp, 0);
5675 t_func = CALL_EXPR_ARG (exp, 1);
5676 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 5677
8ec3c5c2 5678 r_tramp = expand_normal (t_tramp);
82c7907c 5679 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5680 MEM_NOTRAP_P (m_tramp) = 1;
5681
c307f106 5682 /* If ONSTACK, the TRAMP argument should be the address of a field
5683 within the local function's FRAME decl. Either way, let's see if
5684 we can fill in the MEM_ATTRs for this memory. */
82c7907c 5685 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 5686 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 5687
c307f106 5688 /* Creator of a heap trampoline is responsible for making sure the
5689 address is aligned to at least STACK_BOUNDARY. Normally malloc
5690 will ensure this anyhow. */
82c7907c 5691 tmp = round_trampoline_addr (r_tramp);
5692 if (tmp != r_tramp)
5693 {
5694 m_tramp = change_address (m_tramp, BLKmode, tmp);
5695 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 5696 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 5697 }
5698
5699 /* The FUNC argument should be the address of the nested function.
5700 Extract the actual function decl to pass to the hook. */
5701 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5702 t_func = TREE_OPERAND (t_func, 0);
5703 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5704
8ec3c5c2 5705 r_chain = expand_normal (t_chain);
4ee9c684 5706
5707 /* Generate insns to initialize the trampoline. */
82c7907c 5708 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 5709
c307f106 5710 if (onstack)
5711 {
5712 trampolines_created = 1;
8bc8a8f4 5713
a27e3913 5714 if (targetm.calls.custom_function_descriptors != 0)
5715 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5716 "trampoline generated for nested function %qD", t_func);
c307f106 5717 }
8bc8a8f4 5718
4ee9c684 5719 return const0_rtx;
5720}
5721
5722static rtx
c2f47e15 5723expand_builtin_adjust_trampoline (tree exp)
4ee9c684 5724{
5725 rtx tramp;
5726
c2f47e15 5727 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 5728 return NULL_RTX;
5729
c2f47e15 5730 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 5731 tramp = round_trampoline_addr (tramp);
82c7907c 5732 if (targetm.calls.trampoline_adjust_address)
5733 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 5734
5735 return tramp;
5736}
5737
a27e3913 5738/* Expand a call to the builtin descriptor initialization routine.
5739 A descriptor is made up of a couple of pointers to the static
5740 chain and the code entry in this order. */
5741
5742static rtx
5743expand_builtin_init_descriptor (tree exp)
5744{
5745 tree t_descr, t_func, t_chain;
5746 rtx m_descr, r_descr, r_func, r_chain;
5747
5748 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5749 VOID_TYPE))
5750 return NULL_RTX;
5751
5752 t_descr = CALL_EXPR_ARG (exp, 0);
5753 t_func = CALL_EXPR_ARG (exp, 1);
5754 t_chain = CALL_EXPR_ARG (exp, 2);
5755
5756 r_descr = expand_normal (t_descr);
5757 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5758 MEM_NOTRAP_P (m_descr) = 1;
5a8d6207 5759 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
a27e3913 5760
5761 r_func = expand_normal (t_func);
5762 r_chain = expand_normal (t_chain);
5763
5764 /* Generate insns to initialize the descriptor. */
5765 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5766 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5767 POINTER_SIZE / BITS_PER_UNIT), r_func);
5768
5769 return const0_rtx;
5770}
5771
5772/* Expand a call to the builtin descriptor adjustment routine. */
5773
5774static rtx
5775expand_builtin_adjust_descriptor (tree exp)
5776{
5777 rtx tramp;
5778
5779 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5780 return NULL_RTX;
5781
5782 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5783
5784 /* Unalign the descriptor to allow runtime identification. */
5785 tramp = plus_constant (ptr_mode, tramp,
5786 targetm.calls.custom_function_descriptors);
5787
5788 return force_operand (tramp, NULL_RTX);
5789}
5790
93f564d6 5791/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5792 function. The function first checks whether the back end provides
5793 an insn to implement signbit for the respective mode. If not, it
5794 checks whether the floating point format of the value is such that
10902624 5795 the sign bit can be extracted. If that is not the case, error out.
5796 EXP is the expression that is a call to the builtin function; if
5797 convenient, the result should be placed in TARGET. */
27f261ef 5798static rtx
5799expand_builtin_signbit (tree exp, rtx target)
5800{
5801 const struct real_format *fmt;
299dd9fa 5802 scalar_float_mode fmode;
f77c4496 5803 scalar_int_mode rmode, imode;
c2f47e15 5804 tree arg;
ca4f1f5b 5805 int word, bitpos;
27eda240 5806 enum insn_code icode;
27f261ef 5807 rtx temp;
389dd41b 5808 location_t loc = EXPR_LOCATION (exp);
27f261ef 5809
c2f47e15 5810 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5811 return NULL_RTX;
27f261ef 5812
c2f47e15 5813 arg = CALL_EXPR_ARG (exp, 0);
299dd9fa 5814 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
03b7a719 5815 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
27f261ef 5816 fmt = REAL_MODE_FORMAT (fmode);
5817
93f564d6 5818 arg = builtin_save_expr (arg);
5819
5820 /* Expand the argument yielding a RTX expression. */
5821 temp = expand_normal (arg);
5822
5823 /* Check if the back end provides an insn that handles signbit for the
5824 argument's mode. */
d6bf3b14 5825 icode = optab_handler (signbit_optab, fmode);
27eda240 5826 if (icode != CODE_FOR_nothing)
93f564d6 5827 {
1e0c0b35 5828 rtx_insn *last = get_last_insn ();
93f564d6 5829 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 5830 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5831 return target;
5832 delete_insns_since (last);
93f564d6 5833 }
5834
27f261ef 5835 /* For floating point formats without a sign bit, implement signbit
5836 as "ARG < 0.0". */
8d564692 5837 bitpos = fmt->signbit_ro;
ca4f1f5b 5838 if (bitpos < 0)
27f261ef 5839 {
5840 /* But we can't do this if the format supports signed zero. */
10902624 5841 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
27f261ef 5842
389dd41b 5843 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 5844 build_real (TREE_TYPE (arg), dconst0));
27f261ef 5845 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5846 }
5847
ca4f1f5b 5848 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 5849 {
2cf1bb25 5850 imode = int_mode_for_mode (fmode).require ();
ca4f1f5b 5851 temp = gen_lowpart (imode, temp);
24fd4260 5852 }
5853 else
5854 {
ca4f1f5b 5855 imode = word_mode;
5856 /* Handle targets with different FP word orders. */
5857 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5858 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5859 else
a0c938f0 5860 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5861 temp = operand_subword_force (temp, word, fmode);
5862 bitpos = bitpos % BITS_PER_WORD;
5863 }
5864
44b0f1d0 5865 /* Force the intermediate word_mode (or narrower) result into a
5866 register. This avoids attempting to create paradoxical SUBREGs
5867 of floating point modes below. */
5868 temp = force_reg (imode, temp);
5869
ca4f1f5b 5870 /* If the bitpos is within the "result mode" lowpart, the operation
5871 can be implement with a single bitwise AND. Otherwise, we need
5872 a right shift and an AND. */
5873
5874 if (bitpos < GET_MODE_BITSIZE (rmode))
5875 {
796b6678 5876 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5877
4a46f016 5878 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5879 temp = gen_lowpart (rmode, temp);
24fd4260 5880 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5881 immed_wide_int_const (mask, rmode),
ca4f1f5b 5882 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5883 }
ca4f1f5b 5884 else
5885 {
5886 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5887 significant bit, then truncate the result to the desired mode
ca4f1f5b 5888 and mask just this bit. */
f5ff0b21 5889 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5890 temp = gen_lowpart (rmode, temp);
5891 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5892 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5893 }
5894
27f261ef 5895 return temp;
5896}
73673831 5897
5898/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5899 call. EXP is the call. FN is the
73673831 5900 identificator of the actual function. IGNORE is nonzero if the
5901 value is to be ignored. */
5902
5903static rtx
c2f47e15 5904expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5905{
5906 tree id, decl;
5907 tree call;
5908
5909 /* If we are not profiling, just call the function. */
5910 if (!profile_arc_flag)
5911 return NULL_RTX;
5912
5913 /* Otherwise call the wrapper. This should be equivalent for the rest of
5914 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5915 code necessary for keeping the profiling sane. */
73673831 5916
5917 switch (DECL_FUNCTION_CODE (fn))
5918 {
5919 case BUILT_IN_FORK:
5920 id = get_identifier ("__gcov_fork");
5921 break;
5922
5923 case BUILT_IN_EXECL:
5924 id = get_identifier ("__gcov_execl");
5925 break;
5926
5927 case BUILT_IN_EXECV:
5928 id = get_identifier ("__gcov_execv");
5929 break;
5930
5931 case BUILT_IN_EXECLP:
5932 id = get_identifier ("__gcov_execlp");
5933 break;
5934
5935 case BUILT_IN_EXECLE:
5936 id = get_identifier ("__gcov_execle");
5937 break;
5938
5939 case BUILT_IN_EXECVP:
5940 id = get_identifier ("__gcov_execvp");
5941 break;
5942
5943 case BUILT_IN_EXECVE:
5944 id = get_identifier ("__gcov_execve");
5945 break;
5946
5947 default:
64db345d 5948 gcc_unreachable ();
73673831 5949 }
5950
e60a6f7b 5951 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5952 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5953 DECL_EXTERNAL (decl) = 1;
5954 TREE_PUBLIC (decl) = 1;
5955 DECL_ARTIFICIAL (decl) = 1;
5956 TREE_NOTHROW (decl) = 1;
e82d310b 5957 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5958 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5959 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5960 return expand_call (call, target, ignore);
c2f47e15 5961 }
48e1416a 5962
b6a5fc45 5963
5964\f
3e272de8 5965/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5966 the pointer in these functions is void*, the tree optimizers may remove
5967 casts. The mode computed in expand_builtin isn't reliable either, due
5968 to __sync_bool_compare_and_swap.
5969
5970 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5971 group of builtins. This gives us log2 of the mode size. */
5972
3754d046 5973static inline machine_mode
3e272de8 5974get_builtin_sync_mode (int fcode_diff)
5975{
ad3a13b5 5976 /* The size is not negotiable, so ask not to get BLKmode in return
5977 if the target indicates that a smaller size would be better. */
517be012 5978 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
3e272de8 5979}
5980
041e0215 5981/* Expand the memory expression LOC and return the appropriate memory operand
5982 for the builtin_sync operations. */
5983
5984static rtx
3754d046 5985get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5986{
5987 rtx addr, mem;
fcbc2234 5988 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5989 ? TREE_TYPE (TREE_TYPE (loc))
5990 : TREE_TYPE (loc));
5991 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
041e0215 5992
fcbc2234 5993 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
ed825d83 5994 addr = convert_memory_address (addr_mode, addr);
041e0215 5995
5996 /* Note that we explicitly do not want any alias information for this
5997 memory, so that we kill all other live memories. Otherwise we don't
5998 satisfy the full barrier semantics of the intrinsic. */
fcbc2234 5999 mem = gen_rtx_MEM (mode, addr);
6000
6001 set_mem_addr_space (mem, addr_space);
6002
6003 mem = validize_mem (mem);
041e0215 6004
153c3b50 6005 /* The alignment needs to be at least according to that of the mode. */
6006 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 6007 get_pointer_alignment (loc)));
c94cfd1c 6008 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 6009 MEM_VOLATILE_P (mem) = 1;
6010
6011 return mem;
6012}
6013
1cd6e20d 6014/* Make sure an argument is in the right mode.
6015 EXP is the tree argument.
6016 MODE is the mode it should be in. */
6017
6018static rtx
3754d046 6019expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 6020{
6021 rtx val;
3754d046 6022 machine_mode old_mode;
1cd6e20d 6023
6024 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6025 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6026 of CONST_INTs, where we know the old_mode only from the call argument. */
6027
6028 old_mode = GET_MODE (val);
6029 if (old_mode == VOIDmode)
6030 old_mode = TYPE_MODE (TREE_TYPE (exp));
6031 val = convert_modes (mode, old_mode, val, 1);
6032 return val;
6033}
6034
6035
b6a5fc45 6036/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 6037 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 6038 that corresponds to the arithmetic or logical operation from the name;
6039 an exception here is that NOT actually means NAND. TARGET is an optional
6040 place for us to store the results; AFTER is true if this is the
1cd6e20d 6041 fetch_and_xxx form. */
b6a5fc45 6042
6043static rtx
3754d046 6044expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 6045 enum rtx_code code, bool after,
1cd6e20d 6046 rtx target)
b6a5fc45 6047{
041e0215 6048 rtx val, mem;
e60a6f7b 6049 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 6050
cf73e559 6051 if (code == NOT && warn_sync_nand)
6052 {
6053 tree fndecl = get_callee_fndecl (exp);
6054 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6055
6056 static bool warned_f_a_n, warned_n_a_f;
6057
6058 switch (fcode)
6059 {
2797f13a 6060 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6061 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6062 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6063 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6064 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 6065 if (warned_f_a_n)
6066 break;
6067
b9a16870 6068 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 6069 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 6070 warned_f_a_n = true;
6071 break;
6072
2797f13a 6073 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6074 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6075 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6076 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6077 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 6078 if (warned_n_a_f)
6079 break;
6080
b9a16870 6081 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 6082 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 6083 warned_n_a_f = true;
6084 break;
6085
6086 default:
6087 gcc_unreachable ();
6088 }
6089 }
6090
b6a5fc45 6091 /* Expand the operands. */
c2f47e15 6092 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 6093 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 6094
a372f7ca 6095 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 6096 after);
b6a5fc45 6097}
6098
6099/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 6100 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 6101 true if this is the boolean form. TARGET is a place for us to store the
6102 results; this is NOT optional if IS_BOOL is true. */
6103
6104static rtx
3754d046 6105expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 6106 bool is_bool, rtx target)
b6a5fc45 6107{
041e0215 6108 rtx old_val, new_val, mem;
ba885f6a 6109 rtx *pbool, *poval;
b6a5fc45 6110
6111 /* Expand the operands. */
c2f47e15 6112 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 6113 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6114 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 6115
ba885f6a 6116 pbool = poval = NULL;
6117 if (target != const0_rtx)
6118 {
6119 if (is_bool)
6120 pbool = &target;
6121 else
6122 poval = &target;
6123 }
6124 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 6125 false, MEMMODEL_SYNC_SEQ_CST,
6126 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 6127 return NULL_RTX;
c2f47e15 6128
1cd6e20d 6129 return target;
b6a5fc45 6130}
6131
6132/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6133 general form is actually an atomic exchange, and some targets only
6134 support a reduced form with the second argument being a constant 1.
48e1416a 6135 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 6136 the results. */
b6a5fc45 6137
6138static rtx
3754d046 6139expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 6140 rtx target)
b6a5fc45 6141{
041e0215 6142 rtx val, mem;
b6a5fc45 6143
6144 /* Expand the operands. */
c2f47e15 6145 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 6146 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6147
7821cde1 6148 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 6149}
6150
6151/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6152
6153static void
3754d046 6154expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 6155{
6156 rtx mem;
6157
6158 /* Expand the operands. */
6159 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6160
a372f7ca 6161 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 6162}
6163
6164/* Given an integer representing an ``enum memmodel'', verify its
6165 correctness and return the memory model enum. */
6166
6167static enum memmodel
6168get_memmodel (tree exp)
6169{
6170 rtx op;
7f738025 6171 unsigned HOST_WIDE_INT val;
be1e7283 6172 location_t loc
2cb724f9 6173 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 6174
6175 /* If the parameter is not a constant, it's a run time value so we'll just
6176 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6177 if (TREE_CODE (exp) != INTEGER_CST)
6178 return MEMMODEL_SEQ_CST;
6179
6180 op = expand_normal (exp);
7f738025 6181
6182 val = INTVAL (op);
6183 if (targetm.memmodel_check)
6184 val = targetm.memmodel_check (val);
6185 else if (val & ~MEMMODEL_MASK)
6186 {
2cb724f9 6187 warning_at (loc, OPT_Winvalid_memory_model,
6188 "unknown architecture specifier in memory model to builtin");
7f738025 6189 return MEMMODEL_SEQ_CST;
6190 }
6191
a372f7ca 6192 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6193 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 6194 {
2cb724f9 6195 warning_at (loc, OPT_Winvalid_memory_model,
6196 "invalid memory model argument to builtin");
1cd6e20d 6197 return MEMMODEL_SEQ_CST;
6198 }
7f738025 6199
3070f133 6200 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6201 be conservative and promote consume to acquire. */
6202 if (val == MEMMODEL_CONSUME)
6203 val = MEMMODEL_ACQUIRE;
6204
7f738025 6205 return (enum memmodel) val;
1cd6e20d 6206}
6207
6208/* Expand the __atomic_exchange intrinsic:
6209 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6210 EXP is the CALL_EXPR.
6211 TARGET is an optional place for us to store the results. */
6212
6213static rtx
3754d046 6214expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 6215{
6216 rtx val, mem;
6217 enum memmodel model;
6218
6219 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 6220
6221 if (!flag_inline_atomics)
6222 return NULL_RTX;
6223
6224 /* Expand the operands. */
6225 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6226 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6227
7821cde1 6228 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 6229}
6230
6231/* Expand the __atomic_compare_exchange intrinsic:
6232 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6233 TYPE desired, BOOL weak,
6234 enum memmodel success,
6235 enum memmodel failure)
6236 EXP is the CALL_EXPR.
6237 TARGET is an optional place for us to store the results. */
6238
6239static rtx
3754d046 6240expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 6241 rtx target)
6242{
1e0c0b35 6243 rtx expect, desired, mem, oldval;
6244 rtx_code_label *label;
1cd6e20d 6245 enum memmodel success, failure;
6246 tree weak;
6247 bool is_weak;
be1e7283 6248 location_t loc
2cb724f9 6249 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 6250
6251 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6252 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6253
086f4e33 6254 if (failure > success)
6255 {
2cb724f9 6256 warning_at (loc, OPT_Winvalid_memory_model,
6257 "failure memory model cannot be stronger than success "
6258 "memory model for %<__atomic_compare_exchange%>");
086f4e33 6259 success = MEMMODEL_SEQ_CST;
6260 }
6261
a372f7ca 6262 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 6263 {
2cb724f9 6264 warning_at (loc, OPT_Winvalid_memory_model,
6265 "invalid failure memory model for "
6266 "%<__atomic_compare_exchange%>");
086f4e33 6267 failure = MEMMODEL_SEQ_CST;
6268 success = MEMMODEL_SEQ_CST;
1cd6e20d 6269 }
6270
086f4e33 6271
1cd6e20d 6272 if (!flag_inline_atomics)
6273 return NULL_RTX;
6274
6275 /* Expand the operands. */
6276 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6277
6278 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6279 expect = convert_memory_address (Pmode, expect);
c401b131 6280 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 6281 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6282
6283 weak = CALL_EXPR_ARG (exp, 3);
6284 is_weak = false;
e913b5cd 6285 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 6286 is_weak = true;
6287
d86e3752 6288 if (target == const0_rtx)
6289 target = NULL;
d86e3752 6290
3c29a9ea 6291 /* Lest the rtl backend create a race condition with an imporoper store
6292 to memory, always create a new pseudo for OLDVAL. */
6293 oldval = NULL;
6294
6295 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 6296 is_weak, success, failure))
1cd6e20d 6297 return NULL_RTX;
6298
d86e3752 6299 /* Conditionally store back to EXPECT, lest we create a race condition
6300 with an improper store to memory. */
6301 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6302 the normal case where EXPECT is totally private, i.e. a register. At
6303 which point the store can be unconditional. */
6304 label = gen_label_rtx ();
62589f76 6305 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6306 GET_MODE (target), 1, label);
d86e3752 6307 emit_move_insn (expect, oldval);
6308 emit_label (label);
c401b131 6309
1cd6e20d 6310 return target;
6311}
6312
5a5ef659 6313/* Helper function for expand_ifn_atomic_compare_exchange - expand
6314 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6315 call. The weak parameter must be dropped to match the expected parameter
6316 list and the expected argument changed from value to pointer to memory
6317 slot. */
6318
6319static void
6320expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6321{
6322 unsigned int z;
6323 vec<tree, va_gc> *vec;
6324
6325 vec_alloc (vec, 5);
6326 vec->quick_push (gimple_call_arg (call, 0));
6327 tree expected = gimple_call_arg (call, 1);
6328 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6329 TREE_TYPE (expected));
6330 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6331 if (expd != x)
6332 emit_move_insn (x, expd);
6333 tree v = make_tree (TREE_TYPE (expected), x);
6334 vec->quick_push (build1 (ADDR_EXPR,
6335 build_pointer_type (TREE_TYPE (expected)), v));
6336 vec->quick_push (gimple_call_arg (call, 2));
6337 /* Skip the boolean weak parameter. */
6338 for (z = 4; z < 6; z++)
6339 vec->quick_push (gimple_call_arg (call, z));
5eaf31bb 6340 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
52acb7ae 6341 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5eaf31bb 6342 gcc_assert (bytes_log2 < 5);
5a5ef659 6343 built_in_function fncode
6344 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5eaf31bb 6345 + bytes_log2);
5a5ef659 6346 tree fndecl = builtin_decl_explicit (fncode);
6347 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6348 fndecl);
6349 tree exp = build_call_vec (boolean_type_node, fn, vec);
6350 tree lhs = gimple_call_lhs (call);
6351 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6352 if (lhs)
6353 {
6354 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6355 if (GET_MODE (boolret) != mode)
6356 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6357 x = force_reg (mode, x);
6358 write_complex_part (target, boolret, true);
6359 write_complex_part (target, x, false);
6360 }
6361}
6362
6363/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6364
6365void
6366expand_ifn_atomic_compare_exchange (gcall *call)
6367{
6368 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6369 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
517be012 6370 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5a5ef659 6371 rtx expect, desired, mem, oldval, boolret;
6372 enum memmodel success, failure;
6373 tree lhs;
6374 bool is_weak;
be1e7283 6375 location_t loc
5a5ef659 6376 = expansion_point_location_if_in_system_header (gimple_location (call));
6377
6378 success = get_memmodel (gimple_call_arg (call, 4));
6379 failure = get_memmodel (gimple_call_arg (call, 5));
6380
6381 if (failure > success)
6382 {
6383 warning_at (loc, OPT_Winvalid_memory_model,
6384 "failure memory model cannot be stronger than success "
6385 "memory model for %<__atomic_compare_exchange%>");
6386 success = MEMMODEL_SEQ_CST;
6387 }
6388
6389 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6390 {
6391 warning_at (loc, OPT_Winvalid_memory_model,
6392 "invalid failure memory model for "
6393 "%<__atomic_compare_exchange%>");
6394 failure = MEMMODEL_SEQ_CST;
6395 success = MEMMODEL_SEQ_CST;
6396 }
6397
6398 if (!flag_inline_atomics)
6399 {
6400 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6401 return;
6402 }
6403
6404 /* Expand the operands. */
6405 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6406
6407 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6408 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6409
6410 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6411
6412 boolret = NULL;
6413 oldval = NULL;
6414
6415 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6416 is_weak, success, failure))
6417 {
6418 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6419 return;
6420 }
6421
6422 lhs = gimple_call_lhs (call);
6423 if (lhs)
6424 {
6425 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6426 if (GET_MODE (boolret) != mode)
6427 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6428 write_complex_part (target, boolret, true);
6429 write_complex_part (target, oldval, false);
6430 }
6431}
6432
1cd6e20d 6433/* Expand the __atomic_load intrinsic:
6434 TYPE __atomic_load (TYPE *object, enum memmodel)
6435 EXP is the CALL_EXPR.
6436 TARGET is an optional place for us to store the results. */
6437
6438static rtx
3754d046 6439expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 6440{
6441 rtx mem;
6442 enum memmodel model;
6443
6444 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 6445 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 6446 {
be1e7283 6447 location_t loc
2cb724f9 6448 = expansion_point_location_if_in_system_header (input_location);
6449 warning_at (loc, OPT_Winvalid_memory_model,
6450 "invalid memory model for %<__atomic_load%>");
086f4e33 6451 model = MEMMODEL_SEQ_CST;
1cd6e20d 6452 }
6453
6454 if (!flag_inline_atomics)
6455 return NULL_RTX;
6456
6457 /* Expand the operand. */
6458 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6459
6460 return expand_atomic_load (target, mem, model);
6461}
6462
6463
6464/* Expand the __atomic_store intrinsic:
6465 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6466 EXP is the CALL_EXPR.
6467 TARGET is an optional place for us to store the results. */
6468
6469static rtx
3754d046 6470expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 6471{
6472 rtx mem, val;
6473 enum memmodel model;
6474
6475 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 6476 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6477 || is_mm_release (model)))
1cd6e20d 6478 {
be1e7283 6479 location_t loc
2cb724f9 6480 = expansion_point_location_if_in_system_header (input_location);
6481 warning_at (loc, OPT_Winvalid_memory_model,
6482 "invalid memory model for %<__atomic_store%>");
086f4e33 6483 model = MEMMODEL_SEQ_CST;
1cd6e20d 6484 }
6485
6486 if (!flag_inline_atomics)
6487 return NULL_RTX;
6488
6489 /* Expand the operands. */
6490 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6491 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6492
8808bf16 6493 return expand_atomic_store (mem, val, model, false);
1cd6e20d 6494}
6495
6496/* Expand the __atomic_fetch_XXX intrinsic:
6497 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6498 EXP is the CALL_EXPR.
6499 TARGET is an optional place for us to store the results.
6500 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6501 FETCH_AFTER is true if returning the result of the operation.
6502 FETCH_AFTER is false if returning the value before the operation.
6503 IGNORE is true if the result is not used.
6504 EXT_CALL is the correct builtin for an external call if this cannot be
6505 resolved to an instruction sequence. */
6506
6507static rtx
3754d046 6508expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 6509 enum rtx_code code, bool fetch_after,
6510 bool ignore, enum built_in_function ext_call)
6511{
6512 rtx val, mem, ret;
6513 enum memmodel model;
6514 tree fndecl;
6515 tree addr;
6516
6517 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6518
6519 /* Expand the operands. */
6520 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6521 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6522
6523 /* Only try generating instructions if inlining is turned on. */
6524 if (flag_inline_atomics)
6525 {
6526 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6527 if (ret)
6528 return ret;
6529 }
6530
6531 /* Return if a different routine isn't needed for the library call. */
6532 if (ext_call == BUILT_IN_NONE)
6533 return NULL_RTX;
6534
6535 /* Change the call to the specified function. */
6536 fndecl = get_callee_fndecl (exp);
6537 addr = CALL_EXPR_FN (exp);
6538 STRIP_NOPS (addr);
6539
6540 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 6541 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 6542
f4d3c071 6543 /* If we will emit code after the call, the call cannot be a tail call.
a2f95d97 6544 If it is emitted as a tail call, a barrier is emitted after it, and
6545 then all trailing code is removed. */
6546 if (!ignore)
6547 CALL_EXPR_TAILCALL (exp) = 0;
6548
1cd6e20d 6549 /* Expand the call here so we can emit trailing code. */
6550 ret = expand_call (exp, target, ignore);
6551
6552 /* Replace the original function just in case it matters. */
6553 TREE_OPERAND (addr, 0) = fndecl;
6554
6555 /* Then issue the arithmetic correction to return the right result. */
6556 if (!ignore)
c449f851 6557 {
6558 if (code == NOT)
6559 {
6560 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6561 OPTAB_LIB_WIDEN);
6562 ret = expand_simple_unop (mode, NOT, ret, target, true);
6563 }
6564 else
6565 ret = expand_simple_binop (mode, code, ret, val, target, true,
6566 OPTAB_LIB_WIDEN);
6567 }
1cd6e20d 6568 return ret;
6569}
6570
9c1a31e4 6571/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6572
6573void
6574expand_ifn_atomic_bit_test_and (gcall *call)
6575{
6576 tree ptr = gimple_call_arg (call, 0);
6577 tree bit = gimple_call_arg (call, 1);
6578 tree flag = gimple_call_arg (call, 2);
6579 tree lhs = gimple_call_lhs (call);
6580 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6581 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6582 enum rtx_code code;
6583 optab optab;
2e966e2a 6584 class expand_operand ops[5];
9c1a31e4 6585
6586 gcc_assert (flag_inline_atomics);
6587
6588 if (gimple_call_num_args (call) == 4)
6589 model = get_memmodel (gimple_call_arg (call, 3));
6590
6591 rtx mem = get_builtin_sync_mem (ptr, mode);
6592 rtx val = expand_expr_force_mode (bit, mode);
6593
6594 switch (gimple_call_internal_fn (call))
6595 {
6596 case IFN_ATOMIC_BIT_TEST_AND_SET:
6597 code = IOR;
6598 optab = atomic_bit_test_and_set_optab;
6599 break;
6600 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6601 code = XOR;
6602 optab = atomic_bit_test_and_complement_optab;
6603 break;
6604 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6605 code = AND;
6606 optab = atomic_bit_test_and_reset_optab;
6607 break;
6608 default:
6609 gcc_unreachable ();
6610 }
6611
6612 if (lhs == NULL_TREE)
6613 {
6614 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6615 val, NULL_RTX, true, OPTAB_DIRECT);
6616 if (code == AND)
6617 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6618 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6619 return;
6620 }
6621
6622 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6623 enum insn_code icode = direct_optab_handler (optab, mode);
6624 gcc_assert (icode != CODE_FOR_nothing);
6625 create_output_operand (&ops[0], target, mode);
6626 create_fixed_operand (&ops[1], mem);
6627 create_convert_operand_to (&ops[2], val, mode, true);
6628 create_integer_operand (&ops[3], model);
6629 create_integer_operand (&ops[4], integer_onep (flag));
6630 if (maybe_expand_insn (icode, 5, ops))
6631 return;
6632
6633 rtx bitval = val;
6634 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6635 val, NULL_RTX, true, OPTAB_DIRECT);
6636 rtx maskval = val;
6637 if (code == AND)
6638 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6639 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6640 code, model, false);
6641 if (integer_onep (flag))
6642 {
6643 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6644 NULL_RTX, true, OPTAB_DIRECT);
6645 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6646 true, OPTAB_DIRECT);
6647 }
6648 else
6649 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6650 OPTAB_DIRECT);
6651 if (result != target)
6652 emit_move_insn (target, result);
6653}
6654
10b744a3 6655/* Expand an atomic clear operation.
6656 void _atomic_clear (BOOL *obj, enum memmodel)
6657 EXP is the call expression. */
6658
6659static rtx
6660expand_builtin_atomic_clear (tree exp)
6661{
3754d046 6662 machine_mode mode;
10b744a3 6663 rtx mem, ret;
6664 enum memmodel model;
6665
517be012 6666 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
10b744a3 6667 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6668 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6669
a372f7ca 6670 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 6671 {
be1e7283 6672 location_t loc
2cb724f9 6673 = expansion_point_location_if_in_system_header (input_location);
6674 warning_at (loc, OPT_Winvalid_memory_model,
6675 "invalid memory model for %<__atomic_store%>");
086f4e33 6676 model = MEMMODEL_SEQ_CST;
10b744a3 6677 }
6678
6679 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6680 Failing that, a store is issued by __atomic_store. The only way this can
6681 fail is if the bool type is larger than a word size. Unlikely, but
6682 handle it anyway for completeness. Assume a single threaded model since
6683 there is no atomic support in this case, and no barriers are required. */
6684 ret = expand_atomic_store (mem, const0_rtx, model, true);
6685 if (!ret)
6686 emit_move_insn (mem, const0_rtx);
6687 return const0_rtx;
6688}
6689
6690/* Expand an atomic test_and_set operation.
6691 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6692 EXP is the call expression. */
6693
6694static rtx
7821cde1 6695expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 6696{
7821cde1 6697 rtx mem;
10b744a3 6698 enum memmodel model;
3754d046 6699 machine_mode mode;
10b744a3 6700
517be012 6701 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
10b744a3 6702 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6703 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6704
7821cde1 6705 return expand_atomic_test_and_set (target, mem, model);
10b744a3 6706}
6707
6708
1cd6e20d 6709/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6710 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6711
6712static tree
6713fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6714{
6715 int size;
3754d046 6716 machine_mode mode;
1cd6e20d 6717 unsigned int mode_align, type_align;
6718
6719 if (TREE_CODE (arg0) != INTEGER_CST)
6720 return NULL_TREE;
b6a5fc45 6721
517be012 6722 /* We need a corresponding integer mode for the access to be lock-free. */
1cd6e20d 6723 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
517be012 6724 if (!int_mode_for_size (size, 0).exists (&mode))
6725 return boolean_false_node;
6726
1cd6e20d 6727 mode_align = GET_MODE_ALIGNMENT (mode);
6728
4ca99588 6729 if (TREE_CODE (arg1) == INTEGER_CST)
6730 {
6731 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6732
6733 /* Either this argument is null, or it's a fake pointer encoding
6734 the alignment of the object. */
ac29ece2 6735 val = least_bit_hwi (val);
4ca99588 6736 val *= BITS_PER_UNIT;
6737
6738 if (val == 0 || mode_align < val)
6739 type_align = mode_align;
6740 else
6741 type_align = val;
6742 }
1cd6e20d 6743 else
6744 {
6745 tree ttype = TREE_TYPE (arg1);
6746
6747 /* This function is usually invoked and folded immediately by the front
6748 end before anything else has a chance to look at it. The pointer
6749 parameter at this point is usually cast to a void *, so check for that
6750 and look past the cast. */
2f8a2ead 6751 if (CONVERT_EXPR_P (arg1)
6752 && POINTER_TYPE_P (ttype)
6753 && VOID_TYPE_P (TREE_TYPE (ttype))
6754 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
1cd6e20d 6755 arg1 = TREE_OPERAND (arg1, 0);
6756
6757 ttype = TREE_TYPE (arg1);
6758 gcc_assert (POINTER_TYPE_P (ttype));
6759
6760 /* Get the underlying type of the object. */
6761 ttype = TREE_TYPE (ttype);
6762 type_align = TYPE_ALIGN (ttype);
6763 }
6764
47ae02b7 6765 /* If the object has smaller alignment, the lock free routines cannot
1cd6e20d 6766 be used. */
6767 if (type_align < mode_align)
06308d2a 6768 return boolean_false_node;
1cd6e20d 6769
6770 /* Check if a compare_and_swap pattern exists for the mode which represents
6771 the required size. The pattern is not allowed to fail, so the existence
d5f5fa27 6772 of the pattern indicates support is present. Also require that an
6773 atomic load exists for the required size. */
6774 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
06308d2a 6775 return boolean_true_node;
1cd6e20d 6776 else
06308d2a 6777 return boolean_false_node;
1cd6e20d 6778}
6779
6780/* Return true if the parameters to call EXP represent an object which will
6781 always generate lock free instructions. The first argument represents the
6782 size of the object, and the second parameter is a pointer to the object
6783 itself. If NULL is passed for the object, then the result is based on
6784 typical alignment for an object of the specified size. Otherwise return
6785 false. */
6786
6787static rtx
6788expand_builtin_atomic_always_lock_free (tree exp)
6789{
6790 tree size;
6791 tree arg0 = CALL_EXPR_ARG (exp, 0);
6792 tree arg1 = CALL_EXPR_ARG (exp, 1);
6793
6794 if (TREE_CODE (arg0) != INTEGER_CST)
6795 {
85b9be9b 6796 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
1cd6e20d 6797 return const0_rtx;
6798 }
6799
6800 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 6801 if (size == boolean_true_node)
1cd6e20d 6802 return const1_rtx;
6803 return const0_rtx;
6804}
6805
6806/* Return a one or zero if it can be determined that object ARG1 of size ARG
6807 is lock free on this architecture. */
6808
6809static tree
6810fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6811{
6812 if (!flag_inline_atomics)
6813 return NULL_TREE;
6814
6815 /* If it isn't always lock free, don't generate a result. */
06308d2a 6816 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6817 return boolean_true_node;
1cd6e20d 6818
6819 return NULL_TREE;
6820}
6821
6822/* Return true if the parameters to call EXP represent an object which will
6823 always generate lock free instructions. The first argument represents the
6824 size of the object, and the second parameter is a pointer to the object
6825 itself. If NULL is passed for the object, then the result is based on
6826 typical alignment for an object of the specified size. Otherwise return
6827 NULL*/
6828
6829static rtx
6830expand_builtin_atomic_is_lock_free (tree exp)
6831{
6832 tree size;
6833 tree arg0 = CALL_EXPR_ARG (exp, 0);
6834 tree arg1 = CALL_EXPR_ARG (exp, 1);
6835
6836 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6837 {
85b9be9b 6838 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
1cd6e20d 6839 return NULL_RTX;
6840 }
6841
6842 if (!flag_inline_atomics)
6843 return NULL_RTX;
6844
6845 /* If the value is known at compile time, return the RTX for it. */
6846 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 6847 if (size == boolean_true_node)
1cd6e20d 6848 return const1_rtx;
6849
6850 return NULL_RTX;
6851}
6852
1cd6e20d 6853/* Expand the __atomic_thread_fence intrinsic:
6854 void __atomic_thread_fence (enum memmodel)
6855 EXP is the CALL_EXPR. */
6856
6857static void
6858expand_builtin_atomic_thread_fence (tree exp)
6859{
fe54c06b 6860 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6861 expand_mem_thread_fence (model);
1cd6e20d 6862}
6863
6864/* Expand the __atomic_signal_fence intrinsic:
6865 void __atomic_signal_fence (enum memmodel)
6866 EXP is the CALL_EXPR. */
6867
6868static void
6869expand_builtin_atomic_signal_fence (tree exp)
6870{
fe54c06b 6871 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6872 expand_mem_signal_fence (model);
b6a5fc45 6873}
6874
6875/* Expand the __sync_synchronize intrinsic. */
6876
6877static void
2797f13a 6878expand_builtin_sync_synchronize (void)
b6a5fc45 6879{
a372f7ca 6880 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 6881}
6882
badaa04c 6883static rtx
6884expand_builtin_thread_pointer (tree exp, rtx target)
6885{
6886 enum insn_code icode;
6887 if (!validate_arglist (exp, VOID_TYPE))
6888 return const0_rtx;
6889 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6890 if (icode != CODE_FOR_nothing)
6891 {
2e966e2a 6892 class expand_operand op;
3ed779c3 6893 /* If the target is not sutitable then create a new target. */
6894 if (target == NULL_RTX
6895 || !REG_P (target)
6896 || GET_MODE (target) != Pmode)
badaa04c 6897 target = gen_reg_rtx (Pmode);
6898 create_output_operand (&op, target, Pmode);
6899 expand_insn (icode, 1, &op);
6900 return target;
6901 }
2f6d557f 6902 error ("%<__builtin_thread_pointer%> is not supported on this target");
badaa04c 6903 return const0_rtx;
6904}
6905
6906static void
6907expand_builtin_set_thread_pointer (tree exp)
6908{
6909 enum insn_code icode;
6910 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6911 return;
6912 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6913 if (icode != CODE_FOR_nothing)
6914 {
2e966e2a 6915 class expand_operand op;
badaa04c 6916 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6917 Pmode, EXPAND_NORMAL);
6f343c10 6918 create_input_operand (&op, val, Pmode);
badaa04c 6919 expand_insn (icode, 1, &op);
6920 return;
6921 }
2f6d557f 6922 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
badaa04c 6923}
6924
53800dbe 6925\f
0e80b01d 6926/* Emit code to restore the current value of stack. */
6927
6928static void
6929expand_stack_restore (tree var)
6930{
1e0c0b35 6931 rtx_insn *prev;
6932 rtx sa = expand_normal (var);
0e80b01d 6933
6934 sa = convert_memory_address (Pmode, sa);
6935
6936 prev = get_last_insn ();
6937 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 6938
6939 record_new_stack_level ();
6940
0e80b01d 6941 fixup_args_size_notes (prev, get_last_insn (), 0);
6942}
6943
0e80b01d 6944/* Emit code to save the current value of stack. */
6945
6946static rtx
6947expand_stack_save (void)
6948{
6949 rtx ret = NULL_RTX;
6950
0e80b01d 6951 emit_stack_save (SAVE_BLOCK, &ret);
6952 return ret;
6953}
6954
a7babc1e 6955/* Emit code to get the openacc gang, worker or vector id or size. */
6956
6957static rtx
6958expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6959{
6960 const char *name;
6961 rtx fallback_retval;
6962 rtx_insn *(*gen_fn) (rtx, rtx);
6963 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6964 {
6965 case BUILT_IN_GOACC_PARLEVEL_ID:
6966 name = "__builtin_goacc_parlevel_id";
6967 fallback_retval = const0_rtx;
6968 gen_fn = targetm.gen_oacc_dim_pos;
6969 break;
6970 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6971 name = "__builtin_goacc_parlevel_size";
6972 fallback_retval = const1_rtx;
6973 gen_fn = targetm.gen_oacc_dim_size;
6974 break;
6975 default:
6976 gcc_unreachable ();
6977 }
6978
6979 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6980 {
6981 error ("%qs only supported in OpenACC code", name);
6982 return const0_rtx;
6983 }
6984
6985 tree arg = CALL_EXPR_ARG (exp, 0);
6986 if (TREE_CODE (arg) != INTEGER_CST)
6987 {
6988 error ("non-constant argument 0 to %qs", name);
6989 return const0_rtx;
6990 }
6991
6992 int dim = TREE_INT_CST_LOW (arg);
6993 switch (dim)
6994 {
6995 case GOMP_DIM_GANG:
6996 case GOMP_DIM_WORKER:
6997 case GOMP_DIM_VECTOR:
6998 break;
6999 default:
7000 error ("illegal argument 0 to %qs", name);
7001 return const0_rtx;
7002 }
7003
7004 if (ignore)
7005 return target;
7006
2b895374 7007 if (target == NULL_RTX)
7008 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7009
a7babc1e 7010 if (!targetm.have_oacc_dim_size ())
7011 {
7012 emit_move_insn (target, fallback_retval);
7013 return target;
7014 }
7015
7016 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7017 emit_insn (gen_fn (reg, GEN_INT (dim)));
7018 if (reg != target)
7019 emit_move_insn (target, reg);
7020
7021 return target;
7022}
ca4c3545 7023
b3e6ae76 7024/* Expand a string compare operation using a sequence of char comparison
a950155e 7025 to get rid of the calling overhead, with result going to TARGET if
7026 that's convenient.
7027
7028 VAR_STR is the variable string source;
7029 CONST_STR is the constant string source;
7030 LENGTH is the number of chars to compare;
7031 CONST_STR_N indicates which source string is the constant string;
7032 IS_MEMCMP indicates whether it's a memcmp or strcmp.
b3e6ae76 7033
a950155e 7034 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7035
0dbefa15 7036 target = (int) (unsigned char) var_str[0]
7037 - (int) (unsigned char) const_str[0];
a950155e 7038 if (target != 0)
7039 goto ne_label;
7040 ...
0dbefa15 7041 target = (int) (unsigned char) var_str[length - 2]
7042 - (int) (unsigned char) const_str[length - 2];
a950155e 7043 if (target != 0)
7044 goto ne_label;
0dbefa15 7045 target = (int) (unsigned char) var_str[length - 1]
7046 - (int) (unsigned char) const_str[length - 1];
a950155e 7047 ne_label:
7048 */
7049
7050static rtx
b3e6ae76 7051inline_string_cmp (rtx target, tree var_str, const char *const_str,
a950155e 7052 unsigned HOST_WIDE_INT length,
0dbefa15 7053 int const_str_n, machine_mode mode)
a950155e 7054{
7055 HOST_WIDE_INT offset = 0;
b3e6ae76 7056 rtx var_rtx_array
a950155e 7057 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7058 rtx var_rtx = NULL_RTX;
b3e6ae76 7059 rtx const_rtx = NULL_RTX;
7060 rtx result = target ? target : gen_reg_rtx (mode);
7061 rtx_code_label *ne_label = gen_label_rtx ();
0dbefa15 7062 tree unit_type_node = unsigned_char_type_node;
b3e6ae76 7063 scalar_int_mode unit_mode
7064 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
a950155e 7065
7066 start_sequence ();
7067
7068 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7069 {
b3e6ae76 7070 var_rtx
a950155e 7071 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
b3e6ae76 7072 const_rtx = c_readstr (const_str + offset, unit_mode);
a950155e 7073 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7074 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
b3e6ae76 7075
0dbefa15 7076 op0 = convert_modes (mode, unit_mode, op0, 1);
7077 op1 = convert_modes (mode, unit_mode, op1, 1);
b3e6ae76 7078 result = expand_simple_binop (mode, MINUS, op0, op1,
0dbefa15 7079 result, 1, OPTAB_WIDEN);
b3e6ae76 7080 if (i < length - 1)
7081 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7082 mode, true, ne_label);
7083 offset += GET_MODE_SIZE (unit_mode);
a950155e 7084 }
7085
7086 emit_label (ne_label);
7087 rtx_insn *insns = get_insns ();
7088 end_sequence ();
7089 emit_insn (insns);
7090
7091 return result;
7092}
7093
b3e6ae76 7094/* Inline expansion a call to str(n)cmp, with result going to
a950155e 7095 TARGET if that's convenient.
7096 If the call is not been inlined, return NULL_RTX. */
7097static rtx
0dbefa15 7098inline_expand_builtin_string_cmp (tree exp, rtx target)
a950155e 7099{
7100 tree fndecl = get_callee_fndecl (exp);
7101 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7102 unsigned HOST_WIDE_INT length = 0;
7103 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7104
9c7661c8 7105 /* Do NOT apply this inlining expansion when optimizing for size or
7106 optimization level below 2. */
7107 if (optimize < 2 || optimize_insn_for_size_p ())
7108 return NULL_RTX;
7109
a950155e 7110 gcc_checking_assert (fcode == BUILT_IN_STRCMP
b3e6ae76 7111 || fcode == BUILT_IN_STRNCMP
a950155e 7112 || fcode == BUILT_IN_MEMCMP);
7113
0dbefa15 7114 /* On a target where the type of the call (int) has same or narrower presicion
7115 than unsigned char, give up the inlining expansion. */
7116 if (TYPE_PRECISION (unsigned_char_type_node)
7117 >= TYPE_PRECISION (TREE_TYPE (exp)))
7118 return NULL_RTX;
7119
a950155e 7120 tree arg1 = CALL_EXPR_ARG (exp, 0);
7121 tree arg2 = CALL_EXPR_ARG (exp, 1);
7122 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7123
7124 unsigned HOST_WIDE_INT len1 = 0;
7125 unsigned HOST_WIDE_INT len2 = 0;
7126 unsigned HOST_WIDE_INT len3 = 0;
7127
7128 const char *src_str1 = c_getstr (arg1, &len1);
7129 const char *src_str2 = c_getstr (arg2, &len2);
b3e6ae76 7130
a950155e 7131 /* If neither strings is constant string, the call is not qualify. */
7132 if (!src_str1 && !src_str2)
7133 return NULL_RTX;
7134
7135 /* For strncmp, if the length is not a const, not qualify. */
41f96489 7136 if (is_ncmp)
7137 {
7138 if (!tree_fits_uhwi_p (len3_tree))
7139 return NULL_RTX;
7140 else
7141 len3 = tree_to_uhwi (len3_tree);
7142 }
7143
7144 if (src_str1 != NULL)
7145 len1 = strnlen (src_str1, len1) + 1;
7146
7147 if (src_str2 != NULL)
7148 len2 = strnlen (src_str2, len2) + 1;
a950155e 7149
7150 int const_str_n = 0;
7151 if (!len1)
7152 const_str_n = 2;
7153 else if (!len2)
7154 const_str_n = 1;
7155 else if (len2 > len1)
7156 const_str_n = 1;
7157 else
7158 const_str_n = 2;
7159
7160 gcc_checking_assert (const_str_n > 0);
7161 length = (const_str_n == 1) ? len1 : len2;
7162
41f96489 7163 if (is_ncmp && len3 < length)
a950155e 7164 length = len3;
7165
b3e6ae76 7166 /* If the length of the comparision is larger than the threshold,
a950155e 7167 do nothing. */
b3e6ae76 7168 if (length > (unsigned HOST_WIDE_INT)
a950155e 7169 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7170 return NULL_RTX;
7171
7172 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7173
7174 /* Now, start inline expansion the call. */
b3e6ae76 7175 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
a950155e 7176 (const_str_n == 1) ? src_str1 : src_str2, length,
0dbefa15 7177 const_str_n, mode);
a950155e 7178}
7179
123081ef 7180/* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7181 represents the size of the first argument to that call, or VOIDmode
7182 if the argument is a pointer. IGNORE will be true if the result
7183 isn't used. */
7184static rtx
7185expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7186 bool ignore)
7187{
7188 rtx val, failsafe;
7189 unsigned nargs = call_expr_nargs (exp);
7190
7191 tree arg0 = CALL_EXPR_ARG (exp, 0);
7192
7193 if (mode == VOIDmode)
7194 {
7195 mode = TYPE_MODE (TREE_TYPE (arg0));
7196 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7197 }
7198
7199 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7200
7201 /* An optional second argument can be used as a failsafe value on
7202 some machines. If it isn't present, then the failsafe value is
7203 assumed to be 0. */
7204 if (nargs > 1)
7205 {
7206 tree arg1 = CALL_EXPR_ARG (exp, 1);
7207 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7208 }
7209 else
7210 failsafe = const0_rtx;
7211
7212 /* If the result isn't used, the behavior is undefined. It would be
7213 nice to emit a warning here, but path splitting means this might
7214 happen with legitimate code. So simply drop the builtin
7215 expansion in that case; we've handled any side-effects above. */
7216 if (ignore)
7217 return const0_rtx;
7218
7219 /* If we don't have a suitable target, create one to hold the result. */
7220 if (target == NULL || GET_MODE (target) != mode)
7221 target = gen_reg_rtx (mode);
7222
7223 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7224 val = convert_modes (mode, VOIDmode, val, false);
7225
7226 return targetm.speculation_safe_value (mode, target, val, failsafe);
7227}
7228
53800dbe 7229/* Expand an expression EXP that calls a built-in function,
7230 with result going to TARGET if that's convenient
7231 (and in mode MODE if that's convenient).
7232 SUBTARGET may be used as the target for computing one of EXP's operands.
7233 IGNORE is nonzero if the value is to be ignored. */
7234
7235rtx
3754d046 7236expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 7237 int ignore)
53800dbe 7238{
c6e6ecb1 7239 tree fndecl = get_callee_fndecl (exp);
3754d046 7240 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 7241 int flags;
53800dbe 7242
4e2f4ed5 7243 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7244 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7245
f9acf11a 7246 /* When ASan is enabled, we don't want to expand some memory/string
7247 builtins and rely on libsanitizer's hooks. This allows us to avoid
7248 redundant checks and be sure, that possible overflow will be detected
7249 by ASan. */
7250
d1170f8d 7251 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
f9acf11a 7252 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7253 return expand_call (exp, target, ignore);
7254
53800dbe 7255 /* When not optimizing, generate calls to library functions for a certain
7256 set of builtins. */
cd9ff771 7257 if (!optimize
b6a5fc45 7258 && !called_as_built_in (fndecl)
73037a1e 7259 && fcode != BUILT_IN_FORK
7260 && fcode != BUILT_IN_EXECL
7261 && fcode != BUILT_IN_EXECV
7262 && fcode != BUILT_IN_EXECLP
7263 && fcode != BUILT_IN_EXECLE
7264 && fcode != BUILT_IN_EXECVP
7265 && fcode != BUILT_IN_EXECVE
2b34677f 7266 && !ALLOCA_FUNCTION_CODE_P (fcode)
1e42d5c6 7267 && fcode != BUILT_IN_FREE)
cd9ff771 7268 return expand_call (exp, target, ignore);
53800dbe 7269
8d6d7930 7270 /* The built-in function expanders test for target == const0_rtx
7271 to determine whether the function's result will be ignored. */
7272 if (ignore)
7273 target = const0_rtx;
7274
7275 /* If the result of a pure or const built-in function is ignored, and
7276 none of its arguments are volatile, we can avoid expanding the
7277 built-in call and just evaluate the arguments for side-effects. */
7278 if (target == const0_rtx
67fa4078 7279 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7280 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 7281 {
7282 bool volatilep = false;
7283 tree arg;
c2f47e15 7284 call_expr_arg_iterator iter;
8d6d7930 7285
c2f47e15 7286 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7287 if (TREE_THIS_VOLATILE (arg))
8d6d7930 7288 {
7289 volatilep = true;
7290 break;
7291 }
7292
7293 if (! volatilep)
7294 {
c2f47e15 7295 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7296 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 7297 return const0_rtx;
7298 }
7299 }
7300
53800dbe 7301 switch (fcode)
7302 {
4f35b1fc 7303 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 7304 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 7305 case BUILT_IN_FABSD32:
7306 case BUILT_IN_FABSD64:
7307 case BUILT_IN_FABSD128:
c2f47e15 7308 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 7309 if (target)
a0c938f0 7310 return target;
78a74442 7311 break;
7312
4f35b1fc 7313 CASE_FLT_FN (BUILT_IN_COPYSIGN):
012f068a 7314 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
c2f47e15 7315 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 7316 if (target)
7317 return target;
7318 break;
7319
7d3f6cc7 7320 /* Just do a normal library call if we were unable to fold
7321 the values. */
4f35b1fc 7322 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 7323 break;
53800dbe 7324
7e0713b1 7325 CASE_FLT_FN (BUILT_IN_FMA):
8c32188e 7326 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7e0713b1 7327 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7328 if (target)
7329 return target;
7330 break;
7331
a67a90e5 7332 CASE_FLT_FN (BUILT_IN_ILOGB):
7333 if (! flag_unsafe_math_optimizations)
7334 break;
12f08300 7335 gcc_fallthrough ();
7336 CASE_FLT_FN (BUILT_IN_ISINF):
7337 CASE_FLT_FN (BUILT_IN_FINITE):
7338 case BUILT_IN_ISFINITE:
7339 case BUILT_IN_ISNORMAL:
f97eea22 7340 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 7341 if (target)
7342 return target;
7343 break;
7344
80ff6494 7345 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 7346 CASE_FLT_FN (BUILT_IN_LCEIL):
7347 CASE_FLT_FN (BUILT_IN_LLCEIL):
7348 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 7349 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 7350 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 7351 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 7352 if (target)
7353 return target;
7354 break;
7355
80ff6494 7356 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 7357 CASE_FLT_FN (BUILT_IN_LRINT):
7358 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 7359 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 7360 CASE_FLT_FN (BUILT_IN_LROUND):
7361 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 7362 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 7363 if (target)
7364 return target;
7365 break;
7366
4f35b1fc 7367 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 7368 target = expand_builtin_powi (exp, target);
757c219d 7369 if (target)
7370 return target;
7371 break;
7372
d735c391 7373 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 7374 target = expand_builtin_cexpi (exp, target);
d735c391 7375 gcc_assert (target);
7376 return target;
7377
4f35b1fc 7378 CASE_FLT_FN (BUILT_IN_SIN):
7379 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 7380 if (! flag_unsafe_math_optimizations)
7381 break;
7382 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7383 if (target)
7384 return target;
7385 break;
7386
c3147c1a 7387 CASE_FLT_FN (BUILT_IN_SINCOS):
7388 if (! flag_unsafe_math_optimizations)
7389 break;
7390 target = expand_builtin_sincos (exp);
7391 if (target)
7392 return target;
7393 break;
7394
53800dbe 7395 case BUILT_IN_APPLY_ARGS:
7396 return expand_builtin_apply_args ();
7397
7398 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7399 FUNCTION with a copy of the parameters described by
7400 ARGUMENTS, and ARGSIZE. It returns a block of memory
7401 allocated on the stack into which is stored all the registers
7402 that might possibly be used for returning the result of a
7403 function. ARGUMENTS is the value returned by
7404 __builtin_apply_args. ARGSIZE is the number of bytes of
7405 arguments that must be copied. ??? How should this value be
7406 computed? We'll also need a safe worst case value for varargs
7407 functions. */
7408 case BUILT_IN_APPLY:
c2f47e15 7409 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 7410 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 7411 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 7412 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 7413 return const0_rtx;
7414 else
7415 {
53800dbe 7416 rtx ops[3];
7417
c2f47e15 7418 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7419 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7420 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 7421
7422 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7423 }
7424
7425 /* __builtin_return (RESULT) causes the function to return the
7426 value described by RESULT. RESULT is address of the block of
7427 memory returned by __builtin_apply. */
7428 case BUILT_IN_RETURN:
c2f47e15 7429 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7430 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 7431 return const0_rtx;
7432
7433 case BUILT_IN_SAVEREGS:
a66c9326 7434 return expand_builtin_saveregs ();
53800dbe 7435
48dc2227 7436 case BUILT_IN_VA_ARG_PACK:
7437 /* All valid uses of __builtin_va_arg_pack () are removed during
7438 inlining. */
b8c23db3 7439 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 7440 return const0_rtx;
7441
4e1d7ea4 7442 case BUILT_IN_VA_ARG_PACK_LEN:
7443 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7444 inlining. */
b8c23db3 7445 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 7446 return const0_rtx;
7447
53800dbe 7448 /* Return the address of the first anonymous stack arg. */
7449 case BUILT_IN_NEXT_ARG:
c2f47e15 7450 if (fold_builtin_next_arg (exp, false))
a0c938f0 7451 return const0_rtx;
79012a9d 7452 return expand_builtin_next_arg ();
53800dbe 7453
ac8fb6db 7454 case BUILT_IN_CLEAR_CACHE:
7455 target = expand_builtin___clear_cache (exp);
7456 if (target)
7457 return target;
7458 break;
7459
53800dbe 7460 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 7461 return expand_builtin_classify_type (exp);
53800dbe 7462
7463 case BUILT_IN_CONSTANT_P:
4ee9c684 7464 return const0_rtx;
53800dbe 7465
7466 case BUILT_IN_FRAME_ADDRESS:
7467 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 7468 return expand_builtin_frame_address (fndecl, exp);
53800dbe 7469
7470 /* Returns the address of the area where the structure is returned.
7471 0 otherwise. */
7472 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 7473 if (call_expr_nargs (exp) != 0
9342ee68 7474 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 7475 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 7476 return const0_rtx;
53800dbe 7477 else
9342ee68 7478 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 7479
2b34677f 7480 CASE_BUILT_IN_ALLOCA:
2b29cc6a 7481 target = expand_builtin_alloca (exp);
53800dbe 7482 if (target)
7483 return target;
7484 break;
7485
d08919a7 7486 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7487 return expand_asan_emit_allocas_unpoison (exp);
7488
4ee9c684 7489 case BUILT_IN_STACK_SAVE:
7490 return expand_stack_save ();
7491
7492 case BUILT_IN_STACK_RESTORE:
c2f47e15 7493 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 7494 return const0_rtx;
7495
74bdbe96 7496 case BUILT_IN_BSWAP16:
42791117 7497 case BUILT_IN_BSWAP32:
7498 case BUILT_IN_BSWAP64:
74bdbe96 7499 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 7500 if (target)
7501 return target;
7502 break;
7503
4f35b1fc 7504 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 7505 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7506 subtarget, ffs_optab);
6a08d0ab 7507 if (target)
7508 return target;
7509 break;
7510
4f35b1fc 7511 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 7512 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7513 subtarget, clz_optab);
6a08d0ab 7514 if (target)
7515 return target;
7516 break;
7517
4f35b1fc 7518 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 7519 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7520 subtarget, ctz_optab);
6a08d0ab 7521 if (target)
7522 return target;
7523 break;
7524
d8492bd3 7525 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 7526 target = expand_builtin_unop (target_mode, exp, target,
7527 subtarget, clrsb_optab);
7528 if (target)
7529 return target;
7530 break;
7531
4f35b1fc 7532 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 7533 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7534 subtarget, popcount_optab);
6a08d0ab 7535 if (target)
7536 return target;
7537 break;
7538
4f35b1fc 7539 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 7540 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7541 subtarget, parity_optab);
53800dbe 7542 if (target)
7543 return target;
7544 break;
7545
7546 case BUILT_IN_STRLEN:
c2f47e15 7547 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 7548 if (target)
7549 return target;
7550 break;
7551
864bd5de 7552 case BUILT_IN_STRNLEN:
7553 target = expand_builtin_strnlen (exp, target, target_mode);
7554 if (target)
7555 return target;
7556 break;
7557
5aef8938 7558 case BUILT_IN_STRCAT:
7559 target = expand_builtin_strcat (exp, target);
7560 if (target)
7561 return target;
7562 break;
7563
53800dbe 7564 case BUILT_IN_STRCPY:
a65c4d64 7565 target = expand_builtin_strcpy (exp, target);
53800dbe 7566 if (target)
7567 return target;
7568 break;
bf8e3599 7569
5aef8938 7570 case BUILT_IN_STRNCAT:
7571 target = expand_builtin_strncat (exp, target);
7572 if (target)
7573 return target;
7574 break;
7575
ed09096d 7576 case BUILT_IN_STRNCPY:
a65c4d64 7577 target = expand_builtin_strncpy (exp, target);
ed09096d 7578 if (target)
7579 return target;
7580 break;
bf8e3599 7581
3b824fa6 7582 case BUILT_IN_STPCPY:
dc369150 7583 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 7584 if (target)
7585 return target;
7586 break;
7587
4d317237 7588 case BUILT_IN_STPNCPY:
7589 target = expand_builtin_stpncpy (exp, target);
7590 if (target)
7591 return target;
7592 break;
7593
8d6c6ef5 7594 case BUILT_IN_MEMCHR:
7595 target = expand_builtin_memchr (exp, target);
7596 if (target)
7597 return target;
7598 break;
7599
53800dbe 7600 case BUILT_IN_MEMCPY:
a65c4d64 7601 target = expand_builtin_memcpy (exp, target);
3b824fa6 7602 if (target)
7603 return target;
7604 break;
7605
4d317237 7606 case BUILT_IN_MEMMOVE:
7607 target = expand_builtin_memmove (exp, target);
7608 if (target)
7609 return target;
7610 break;
7611
3b824fa6 7612 case BUILT_IN_MEMPCPY:
d0fbba1a 7613 target = expand_builtin_mempcpy (exp, target);
53800dbe 7614 if (target)
7615 return target;
7616 break;
7617
7618 case BUILT_IN_MEMSET:
c2f47e15 7619 target = expand_builtin_memset (exp, target, mode);
53800dbe 7620 if (target)
7621 return target;
7622 break;
7623
ffc83088 7624 case BUILT_IN_BZERO:
0b25db21 7625 target = expand_builtin_bzero (exp);
ffc83088 7626 if (target)
7627 return target;
7628 break;
7629
b3e6ae76 7630 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
72dbc21d 7631 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7632 when changing it to a strcmp call. */
7633 case BUILT_IN_STRCMP_EQ:
7634 target = expand_builtin_memcmp (exp, target, true);
7635 if (target)
7636 return target;
7637
7638 /* Change this call back to a BUILT_IN_STRCMP. */
b3e6ae76 7639 TREE_OPERAND (exp, 1)
72dbc21d 7640 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7641
7642 /* Delete the last parameter. */
7643 unsigned int i;
7644 vec<tree, va_gc> *arg_vec;
7645 vec_alloc (arg_vec, 2);
7646 for (i = 0; i < 2; i++)
7647 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7648 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7649 /* FALLTHROUGH */
7650
53800dbe 7651 case BUILT_IN_STRCMP:
a65c4d64 7652 target = expand_builtin_strcmp (exp, target);
53800dbe 7653 if (target)
7654 return target;
7655 break;
7656
72dbc21d 7657 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7658 back to a BUILT_IN_STRNCMP. */
7659 case BUILT_IN_STRNCMP_EQ:
7660 target = expand_builtin_memcmp (exp, target, true);
7661 if (target)
7662 return target;
7663
7664 /* Change it back to a BUILT_IN_STRNCMP. */
b3e6ae76 7665 TREE_OPERAND (exp, 1)
72dbc21d 7666 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7667 /* FALLTHROUGH */
7668
ed09096d 7669 case BUILT_IN_STRNCMP:
7670 target = expand_builtin_strncmp (exp, target, mode);
7671 if (target)
7672 return target;
7673 break;
7674
071f1696 7675 case BUILT_IN_BCMP:
53800dbe 7676 case BUILT_IN_MEMCMP:
3e346f54 7677 case BUILT_IN_MEMCMP_EQ:
7678 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
53800dbe 7679 if (target)
7680 return target;
3e346f54 7681 if (fcode == BUILT_IN_MEMCMP_EQ)
7682 {
7683 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7684 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7685 }
53800dbe 7686 break;
53800dbe 7687
7688 case BUILT_IN_SETJMP:
12f08300 7689 /* This should have been lowered to the builtins below. */
2c8a1497 7690 gcc_unreachable ();
7691
7692 case BUILT_IN_SETJMP_SETUP:
7693 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7694 and the receiver label. */
c2f47e15 7695 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 7696 {
c2f47e15 7697 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 7698 VOIDmode, EXPAND_NORMAL);
c2f47e15 7699 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 7700 rtx_insn *label_r = label_rtx (label);
2c8a1497 7701
7702 /* This is copied from the handling of non-local gotos. */
7703 expand_builtin_setjmp_setup (buf_addr, label_r);
7704 nonlocal_goto_handler_labels
a4de1c23 7705 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 7706 nonlocal_goto_handler_labels);
7707 /* ??? Do not let expand_label treat us as such since we would
7708 not want to be both on the list of non-local labels and on
7709 the list of forced labels. */
7710 FORCED_LABEL (label) = 0;
7711 return const0_rtx;
7712 }
7713 break;
7714
2c8a1497 7715 case BUILT_IN_SETJMP_RECEIVER:
7716 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 7717 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 7718 {
c2f47e15 7719 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 7720 rtx_insn *label_r = label_rtx (label);
2c8a1497 7721
7722 expand_builtin_setjmp_receiver (label_r);
7723 return const0_rtx;
7724 }
6b7f6858 7725 break;
53800dbe 7726
7727 /* __builtin_longjmp is passed a pointer to an array of five words.
7728 It's similar to the C library longjmp function but works with
7729 __builtin_setjmp above. */
7730 case BUILT_IN_LONGJMP:
c2f47e15 7731 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 7732 {
c2f47e15 7733 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 7734 VOIDmode, EXPAND_NORMAL);
c2f47e15 7735 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 7736
7737 if (value != const1_rtx)
7738 {
1e5fcbe2 7739 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 7740 return const0_rtx;
7741 }
7742
7743 expand_builtin_longjmp (buf_addr, value);
7744 return const0_rtx;
7745 }
2c8a1497 7746 break;
53800dbe 7747
4ee9c684 7748 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 7749 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 7750 if (target)
7751 return target;
7752 break;
7753
843d08a9 7754 /* This updates the setjmp buffer that is its argument with the value
7755 of the current stack pointer. */
7756 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 7757 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 7758 {
7759 rtx buf_addr
c2f47e15 7760 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 7761
7762 expand_builtin_update_setjmp_buf (buf_addr);
7763 return const0_rtx;
7764 }
7765 break;
7766
53800dbe 7767 case BUILT_IN_TRAP:
a0ef1725 7768 expand_builtin_trap ();
53800dbe 7769 return const0_rtx;
7770
d2b48f0c 7771 case BUILT_IN_UNREACHABLE:
7772 expand_builtin_unreachable ();
7773 return const0_rtx;
7774
4f35b1fc 7775 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 7776 case BUILT_IN_SIGNBITD32:
7777 case BUILT_IN_SIGNBITD64:
7778 case BUILT_IN_SIGNBITD128:
27f261ef 7779 target = expand_builtin_signbit (exp, target);
7780 if (target)
7781 return target;
7782 break;
7783
53800dbe 7784 /* Various hooks for the DWARF 2 __throw routine. */
7785 case BUILT_IN_UNWIND_INIT:
7786 expand_builtin_unwind_init ();
7787 return const0_rtx;
7788 case BUILT_IN_DWARF_CFA:
7789 return virtual_cfa_rtx;
7790#ifdef DWARF2_UNWIND_INFO
f8f023a5 7791 case BUILT_IN_DWARF_SP_COLUMN:
7792 return expand_builtin_dwarf_sp_column ();
695e919b 7793 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 7794 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 7795 return const0_rtx;
53800dbe 7796#endif
7797 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 7798 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 7799 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 7800 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 7801 case BUILT_IN_EH_RETURN:
c2f47e15 7802 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7803 CALL_EXPR_ARG (exp, 1));
53800dbe 7804 return const0_rtx;
df4b504c 7805 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 7806 return expand_builtin_eh_return_data_regno (exp);
26093bf4 7807 case BUILT_IN_EXTEND_POINTER:
c2f47e15 7808 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 7809 case BUILT_IN_EH_POINTER:
7810 return expand_builtin_eh_pointer (exp);
7811 case BUILT_IN_EH_FILTER:
7812 return expand_builtin_eh_filter (exp);
7813 case BUILT_IN_EH_COPY_VALUES:
7814 return expand_builtin_eh_copy_values (exp);
26093bf4 7815
7ccc713a 7816 case BUILT_IN_VA_START:
c2f47e15 7817 return expand_builtin_va_start (exp);
a66c9326 7818 case BUILT_IN_VA_END:
c2f47e15 7819 return expand_builtin_va_end (exp);
a66c9326 7820 case BUILT_IN_VA_COPY:
c2f47e15 7821 return expand_builtin_va_copy (exp);
89cfe6e5 7822 case BUILT_IN_EXPECT:
c2f47e15 7823 return expand_builtin_expect (exp, target);
01107f42 7824 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7825 return expand_builtin_expect_with_probability (exp, target);
fca0886c 7826 case BUILT_IN_ASSUME_ALIGNED:
7827 return expand_builtin_assume_aligned (exp, target);
5e3608d8 7828 case BUILT_IN_PREFETCH:
c2f47e15 7829 expand_builtin_prefetch (exp);
5e3608d8 7830 return const0_rtx;
7831
4ee9c684 7832 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 7833 return expand_builtin_init_trampoline (exp, true);
7834 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7835 return expand_builtin_init_trampoline (exp, false);
4ee9c684 7836 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 7837 return expand_builtin_adjust_trampoline (exp);
4ee9c684 7838
a27e3913 7839 case BUILT_IN_INIT_DESCRIPTOR:
7840 return expand_builtin_init_descriptor (exp);
7841 case BUILT_IN_ADJUST_DESCRIPTOR:
7842 return expand_builtin_adjust_descriptor (exp);
7843
73673831 7844 case BUILT_IN_FORK:
7845 case BUILT_IN_EXECL:
7846 case BUILT_IN_EXECV:
7847 case BUILT_IN_EXECLP:
7848 case BUILT_IN_EXECLE:
7849 case BUILT_IN_EXECVP:
7850 case BUILT_IN_EXECVE:
c2f47e15 7851 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 7852 if (target)
7853 return target;
7854 break;
53800dbe 7855
2797f13a 7856 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7857 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7858 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7859 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7860 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7861 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 7862 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 7863 if (target)
7864 return target;
7865 break;
7866
2797f13a 7867 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7868 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7869 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7870 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7871 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7872 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 7873 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 7874 if (target)
7875 return target;
7876 break;
7877
2797f13a 7878 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7879 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7880 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7881 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7882 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7883 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 7884 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 7885 if (target)
7886 return target;
7887 break;
7888
2797f13a 7889 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7890 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7891 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7892 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7893 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7894 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 7895 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 7896 if (target)
7897 return target;
7898 break;
7899
2797f13a 7900 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7901 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7902 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7903 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7904 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7905 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 7906 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 7907 if (target)
7908 return target;
7909 break;
7910
2797f13a 7911 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7912 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7913 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7914 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7915 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7916 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 7917 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 7918 if (target)
7919 return target;
7920 break;
7921
2797f13a 7922 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7923 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7924 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7925 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7926 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7927 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 7928 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 7929 if (target)
7930 return target;
7931 break;
7932
2797f13a 7933 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7934 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7935 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7936 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7937 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7938 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 7939 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 7940 if (target)
7941 return target;
7942 break;
7943
2797f13a 7944 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7945 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7946 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7947 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7948 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7949 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 7950 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 7951 if (target)
7952 return target;
7953 break;
7954
2797f13a 7955 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7956 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7957 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7958 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7959 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7960 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 7961 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 7962 if (target)
7963 return target;
7964 break;
7965
2797f13a 7966 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7967 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7968 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7969 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7970 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7971 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 7972 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 7973 if (target)
7974 return target;
7975 break;
7976
2797f13a 7977 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7978 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7979 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7980 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7981 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7982 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 7983 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 7984 if (target)
7985 return target;
7986 break;
7987
2797f13a 7988 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7989 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7990 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7991 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7992 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 7993 if (mode == VOIDmode)
7994 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 7995 if (!target || !register_operand (target, mode))
7996 target = gen_reg_rtx (mode);
3e272de8 7997
2797f13a 7998 mode = get_builtin_sync_mode
7999 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 8000 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 8001 if (target)
8002 return target;
8003 break;
8004
2797f13a 8005 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8006 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8007 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8008 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8009 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8010 mode = get_builtin_sync_mode
8011 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 8012 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 8013 if (target)
8014 return target;
8015 break;
8016
2797f13a 8017 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8018 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8019 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8020 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8021 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8022 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8023 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 8024 if (target)
8025 return target;
8026 break;
8027
2797f13a 8028 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8029 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8030 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8031 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8032 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8033 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8034 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 8035 return const0_rtx;
8036
2797f13a 8037 case BUILT_IN_SYNC_SYNCHRONIZE:
8038 expand_builtin_sync_synchronize ();
b6a5fc45 8039 return const0_rtx;
8040
1cd6e20d 8041 case BUILT_IN_ATOMIC_EXCHANGE_1:
8042 case BUILT_IN_ATOMIC_EXCHANGE_2:
8043 case BUILT_IN_ATOMIC_EXCHANGE_4:
8044 case BUILT_IN_ATOMIC_EXCHANGE_8:
8045 case BUILT_IN_ATOMIC_EXCHANGE_16:
8046 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8047 target = expand_builtin_atomic_exchange (mode, exp, target);
8048 if (target)
8049 return target;
8050 break;
8051
8052 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8053 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8054 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8055 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8056 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 8057 {
8058 unsigned int nargs, z;
f1f41a6c 8059 vec<tree, va_gc> *vec;
2c201ad1 8060
8061 mode =
8062 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8063 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8064 if (target)
8065 return target;
8066
8067 /* If this is turned into an external library call, the weak parameter
8068 must be dropped to match the expected parameter list. */
8069 nargs = call_expr_nargs (exp);
f1f41a6c 8070 vec_alloc (vec, nargs - 1);
2c201ad1 8071 for (z = 0; z < 3; z++)
f1f41a6c 8072 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 8073 /* Skip the boolean weak parameter. */
8074 for (z = 4; z < 6; z++)
f1f41a6c 8075 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 8076 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8077 break;
8078 }
1cd6e20d 8079
8080 case BUILT_IN_ATOMIC_LOAD_1:
8081 case BUILT_IN_ATOMIC_LOAD_2:
8082 case BUILT_IN_ATOMIC_LOAD_4:
8083 case BUILT_IN_ATOMIC_LOAD_8:
8084 case BUILT_IN_ATOMIC_LOAD_16:
8085 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8086 target = expand_builtin_atomic_load (mode, exp, target);
8087 if (target)
8088 return target;
8089 break;
8090
8091 case BUILT_IN_ATOMIC_STORE_1:
8092 case BUILT_IN_ATOMIC_STORE_2:
8093 case BUILT_IN_ATOMIC_STORE_4:
8094 case BUILT_IN_ATOMIC_STORE_8:
8095 case BUILT_IN_ATOMIC_STORE_16:
8096 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8097 target = expand_builtin_atomic_store (mode, exp);
8098 if (target)
8099 return const0_rtx;
8100 break;
8101
8102 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8103 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8104 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8105 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8106 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8107 {
8108 enum built_in_function lib;
8109 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8110 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8111 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8112 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8113 ignore, lib);
8114 if (target)
8115 return target;
8116 break;
8117 }
8118 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8119 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8120 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8121 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8122 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8123 {
8124 enum built_in_function lib;
8125 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8126 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8127 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8128 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8129 ignore, lib);
8130 if (target)
8131 return target;
8132 break;
8133 }
8134 case BUILT_IN_ATOMIC_AND_FETCH_1:
8135 case BUILT_IN_ATOMIC_AND_FETCH_2:
8136 case BUILT_IN_ATOMIC_AND_FETCH_4:
8137 case BUILT_IN_ATOMIC_AND_FETCH_8:
8138 case BUILT_IN_ATOMIC_AND_FETCH_16:
8139 {
8140 enum built_in_function lib;
8141 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8142 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8143 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8144 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8145 ignore, lib);
8146 if (target)
8147 return target;
8148 break;
8149 }
8150 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8151 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8152 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8153 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8154 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8155 {
8156 enum built_in_function lib;
8157 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8158 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8159 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8160 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8161 ignore, lib);
8162 if (target)
8163 return target;
8164 break;
8165 }
8166 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8167 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8168 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8169 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8170 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8171 {
8172 enum built_in_function lib;
8173 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8174 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8175 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8176 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8177 ignore, lib);
8178 if (target)
8179 return target;
8180 break;
8181 }
8182 case BUILT_IN_ATOMIC_OR_FETCH_1:
8183 case BUILT_IN_ATOMIC_OR_FETCH_2:
8184 case BUILT_IN_ATOMIC_OR_FETCH_4:
8185 case BUILT_IN_ATOMIC_OR_FETCH_8:
8186 case BUILT_IN_ATOMIC_OR_FETCH_16:
8187 {
8188 enum built_in_function lib;
8189 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8190 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8191 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8192 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8193 ignore, lib);
8194 if (target)
8195 return target;
8196 break;
8197 }
8198 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8199 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8200 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8201 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8202 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8203 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8204 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8205 ignore, BUILT_IN_NONE);
8206 if (target)
8207 return target;
8208 break;
8209
8210 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8211 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8212 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8213 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8214 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8215 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8216 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8217 ignore, BUILT_IN_NONE);
8218 if (target)
8219 return target;
8220 break;
8221
8222 case BUILT_IN_ATOMIC_FETCH_AND_1:
8223 case BUILT_IN_ATOMIC_FETCH_AND_2:
8224 case BUILT_IN_ATOMIC_FETCH_AND_4:
8225 case BUILT_IN_ATOMIC_FETCH_AND_8:
8226 case BUILT_IN_ATOMIC_FETCH_AND_16:
8227 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8228 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8229 ignore, BUILT_IN_NONE);
8230 if (target)
8231 return target;
8232 break;
8233
8234 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8235 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8236 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8237 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8238 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8239 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8240 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8241 ignore, BUILT_IN_NONE);
8242 if (target)
8243 return target;
8244 break;
8245
8246 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8247 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8248 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8249 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8250 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8251 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8252 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8253 ignore, BUILT_IN_NONE);
8254 if (target)
8255 return target;
8256 break;
8257
8258 case BUILT_IN_ATOMIC_FETCH_OR_1:
8259 case BUILT_IN_ATOMIC_FETCH_OR_2:
8260 case BUILT_IN_ATOMIC_FETCH_OR_4:
8261 case BUILT_IN_ATOMIC_FETCH_OR_8:
8262 case BUILT_IN_ATOMIC_FETCH_OR_16:
8263 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8264 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8265 ignore, BUILT_IN_NONE);
8266 if (target)
8267 return target;
8268 break;
10b744a3 8269
8270 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 8271 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 8272
8273 case BUILT_IN_ATOMIC_CLEAR:
8274 return expand_builtin_atomic_clear (exp);
1cd6e20d 8275
8276 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8277 return expand_builtin_atomic_always_lock_free (exp);
8278
8279 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8280 target = expand_builtin_atomic_is_lock_free (exp);
8281 if (target)
8282 return target;
8283 break;
8284
8285 case BUILT_IN_ATOMIC_THREAD_FENCE:
8286 expand_builtin_atomic_thread_fence (exp);
8287 return const0_rtx;
8288
8289 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8290 expand_builtin_atomic_signal_fence (exp);
8291 return const0_rtx;
8292
0a39fd54 8293 case BUILT_IN_OBJECT_SIZE:
8294 return expand_builtin_object_size (exp);
8295
8296 case BUILT_IN_MEMCPY_CHK:
8297 case BUILT_IN_MEMPCPY_CHK:
8298 case BUILT_IN_MEMMOVE_CHK:
8299 case BUILT_IN_MEMSET_CHK:
8300 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8301 if (target)
8302 return target;
8303 break;
8304
8305 case BUILT_IN_STRCPY_CHK:
8306 case BUILT_IN_STPCPY_CHK:
8307 case BUILT_IN_STRNCPY_CHK:
1063acde 8308 case BUILT_IN_STPNCPY_CHK:
0a39fd54 8309 case BUILT_IN_STRCAT_CHK:
b356dfef 8310 case BUILT_IN_STRNCAT_CHK:
0a39fd54 8311 case BUILT_IN_SNPRINTF_CHK:
8312 case BUILT_IN_VSNPRINTF_CHK:
8313 maybe_emit_chk_warning (exp, fcode);
8314 break;
8315
8316 case BUILT_IN_SPRINTF_CHK:
8317 case BUILT_IN_VSPRINTF_CHK:
8318 maybe_emit_sprintf_chk_warning (exp, fcode);
8319 break;
8320
2c281b15 8321 case BUILT_IN_FREE:
f74ea1c2 8322 if (warn_free_nonheap_object)
8323 maybe_emit_free_warning (exp);
2c281b15 8324 break;
8325
badaa04c 8326 case BUILT_IN_THREAD_POINTER:
8327 return expand_builtin_thread_pointer (exp, target);
8328
8329 case BUILT_IN_SET_THREAD_POINTER:
8330 expand_builtin_set_thread_pointer (exp);
8331 return const0_rtx;
8332
ca4c3545 8333 case BUILT_IN_ACC_ON_DEVICE:
1ae4e7aa 8334 /* Do library call, if we failed to expand the builtin when
8335 folding. */
ca4c3545 8336 break;
8337
a7babc1e 8338 case BUILT_IN_GOACC_PARLEVEL_ID:
8339 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8340 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8341
123081ef 8342 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8343 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8344
8345 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8346 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8347 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8348 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8349 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8350 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8351 return expand_speculation_safe_value (mode, exp, target, ignore);
8352
92482ee0 8353 default: /* just do library call, if unknown builtin */
146c1b4f 8354 break;
53800dbe 8355 }
8356
8357 /* The switch statement above can drop through to cause the function
8358 to be called normally. */
8359 return expand_call (exp, target, ignore);
8360}
650e4c94 8361
805e22b2 8362/* Determine whether a tree node represents a call to a built-in
52203a9d 8363 function. If the tree T is a call to a built-in function with
8364 the right number of arguments of the appropriate types, return
8365 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8366 Otherwise the return value is END_BUILTINS. */
aecda0d6 8367
805e22b2 8368enum built_in_function
b7bf20db 8369builtin_mathfn_code (const_tree t)
805e22b2 8370{
b7bf20db 8371 const_tree fndecl, arg, parmlist;
8372 const_tree argtype, parmtype;
8373 const_call_expr_arg_iterator iter;
805e22b2 8374
d44e3710 8375 if (TREE_CODE (t) != CALL_EXPR)
805e22b2 8376 return END_BUILTINS;
8377
c6e6ecb1 8378 fndecl = get_callee_fndecl (t);
a0e9bfbb 8379 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8380 return END_BUILTINS;
805e22b2 8381
52203a9d 8382 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 8383 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 8384 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 8385 {
52203a9d 8386 /* If a function doesn't take a variable number of arguments,
8387 the last element in the list will have type `void'. */
8388 parmtype = TREE_VALUE (parmlist);
8389 if (VOID_TYPE_P (parmtype))
8390 {
b7bf20db 8391 if (more_const_call_expr_args_p (&iter))
52203a9d 8392 return END_BUILTINS;
8393 return DECL_FUNCTION_CODE (fndecl);
8394 }
8395
b7bf20db 8396 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 8397 return END_BUILTINS;
48e1416a 8398
b7bf20db 8399 arg = next_const_call_expr_arg (&iter);
c2f47e15 8400 argtype = TREE_TYPE (arg);
52203a9d 8401
8402 if (SCALAR_FLOAT_TYPE_P (parmtype))
8403 {
8404 if (! SCALAR_FLOAT_TYPE_P (argtype))
8405 return END_BUILTINS;
8406 }
8407 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8408 {
8409 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8410 return END_BUILTINS;
8411 }
8412 else if (POINTER_TYPE_P (parmtype))
8413 {
8414 if (! POINTER_TYPE_P (argtype))
8415 return END_BUILTINS;
8416 }
8417 else if (INTEGRAL_TYPE_P (parmtype))
8418 {
8419 if (! INTEGRAL_TYPE_P (argtype))
8420 return END_BUILTINS;
8421 }
8422 else
e9f80ff5 8423 return END_BUILTINS;
e9f80ff5 8424 }
8425
52203a9d 8426 /* Variable-length argument list. */
805e22b2 8427 return DECL_FUNCTION_CODE (fndecl);
8428}
8429
c2f47e15 8430/* Fold a call to __builtin_constant_p, if we know its argument ARG will
8431 evaluate to a constant. */
650e4c94 8432
8433static tree
c2f47e15 8434fold_builtin_constant_p (tree arg)
650e4c94 8435{
650e4c94 8436 /* We return 1 for a numeric type that's known to be a constant
8437 value at compile-time or for an aggregate type that's a
8438 literal constant. */
c2f47e15 8439 STRIP_NOPS (arg);
650e4c94 8440
8441 /* If we know this is a constant, emit the constant of one. */
c2f47e15 8442 if (CONSTANT_CLASS_P (arg)
8443 || (TREE_CODE (arg) == CONSTRUCTOR
8444 && TREE_CONSTANT (arg)))
650e4c94 8445 return integer_one_node;
c2f47e15 8446 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 8447 {
c2f47e15 8448 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 8449 if (TREE_CODE (op) == STRING_CST
8450 || (TREE_CODE (op) == ARRAY_REF
8451 && integer_zerop (TREE_OPERAND (op, 1))
8452 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8453 return integer_one_node;
8454 }
650e4c94 8455
1fb4300c 8456 /* If this expression has side effects, show we don't know it to be a
8457 constant. Likewise if it's a pointer or aggregate type since in
8458 those case we only want literals, since those are only optimized
f97c71a1 8459 when generating RTL, not later.
8460 And finally, if we are compiling an initializer, not code, we
8461 need to return a definite result now; there's not going to be any
8462 more optimization done. */
c2f47e15 8463 if (TREE_SIDE_EFFECTS (arg)
8464 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8465 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 8466 || cfun == 0
0b049e15 8467 || folding_initializer
8468 || force_folding_builtin_constant_p)
650e4c94 8469 return integer_zero_node;
8470
c2f47e15 8471 return NULL_TREE;
650e4c94 8472}
8473
01107f42 8474/* Create builtin_expect or builtin_expect_with_probability
8475 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8476 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8477 builtin_expect_with_probability instead uses third argument as PROBABILITY
8478 value. */
4ee9c684 8479
8480static tree
c83059be 8481build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
01107f42 8482 tree predictor, tree probability)
4ee9c684 8483{
76f5a783 8484 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 8485
01107f42 8486 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8487 : BUILT_IN_EXPECT_WITH_PROBABILITY);
76f5a783 8488 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8489 ret_type = TREE_TYPE (TREE_TYPE (fn));
8490 pred_type = TREE_VALUE (arg_types);
8491 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8492
389dd41b 8493 pred = fold_convert_loc (loc, pred_type, pred);
8494 expected = fold_convert_loc (loc, expected_type, expected);
01107f42 8495
8496 if (probability)
8497 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8498 else
8499 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8500 predictor);
76f5a783 8501
8502 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8503 build_int_cst (ret_type, 0));
8504}
8505
01107f42 8506/* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
76f5a783 8507 NULL_TREE if no simplification is possible. */
8508
c83059be 8509tree
01107f42 8510fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8511 tree arg3)
76f5a783 8512{
083bada9 8513 tree inner, fndecl, inner_arg0;
76f5a783 8514 enum tree_code code;
8515
083bada9 8516 /* Distribute the expected value over short-circuiting operators.
8517 See through the cast from truthvalue_type_node to long. */
8518 inner_arg0 = arg0;
d09ef31a 8519 while (CONVERT_EXPR_P (inner_arg0)
083bada9 8520 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8521 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8522 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8523
76f5a783 8524 /* If this is a builtin_expect within a builtin_expect keep the
8525 inner one. See through a comparison against a constant. It
8526 might have been added to create a thruthvalue. */
083bada9 8527 inner = inner_arg0;
8528
76f5a783 8529 if (COMPARISON_CLASS_P (inner)
8530 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8531 inner = TREE_OPERAND (inner, 0);
8532
8533 if (TREE_CODE (inner) == CALL_EXPR
8534 && (fndecl = get_callee_fndecl (inner))
a0e9bfbb 8535 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8536 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
76f5a783 8537 return arg0;
8538
083bada9 8539 inner = inner_arg0;
76f5a783 8540 code = TREE_CODE (inner);
8541 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8542 {
8543 tree op0 = TREE_OPERAND (inner, 0);
8544 tree op1 = TREE_OPERAND (inner, 1);
2f2a7720 8545 arg1 = save_expr (arg1);
76f5a783 8546
01107f42 8547 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8548 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
76f5a783 8549 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8550
389dd41b 8551 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 8552 }
8553
8554 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 8555 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 8556 return NULL_TREE;
4ee9c684 8557
76f5a783 8558 /* If we expect that a comparison against the argument will fold to
8559 a constant return the constant. In practice, this means a true
8560 constant or the address of a non-weak symbol. */
083bada9 8561 inner = inner_arg0;
4ee9c684 8562 STRIP_NOPS (inner);
8563 if (TREE_CODE (inner) == ADDR_EXPR)
8564 {
8565 do
8566 {
8567 inner = TREE_OPERAND (inner, 0);
8568 }
8569 while (TREE_CODE (inner) == COMPONENT_REF
8570 || TREE_CODE (inner) == ARRAY_REF);
53e9c5c4 8571 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
c2f47e15 8572 return NULL_TREE;
4ee9c684 8573 }
8574
76f5a783 8575 /* Otherwise, ARG0 already has the proper type for the return value. */
8576 return arg0;
4ee9c684 8577}
8578
c2f47e15 8579/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 8580
539a3a92 8581static tree
c2f47e15 8582fold_builtin_classify_type (tree arg)
539a3a92 8583{
c2f47e15 8584 if (arg == 0)
7002a1c8 8585 return build_int_cst (integer_type_node, no_type_class);
539a3a92 8586
7002a1c8 8587 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 8588}
8589
c2f47e15 8590/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 8591
8592static tree
c7cbde74 8593fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 8594{
c2f47e15 8595 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 8596 return NULL_TREE;
8597 else
8598 {
98d5ba5d 8599 c_strlen_data lendata = { };
8600 tree len = c_strlen (arg, 0, &lendata);
e6e27594 8601
8602 if (len)
c7cbde74 8603 return fold_convert_loc (loc, type, len);
e6e27594 8604
98d5ba5d 8605 if (!lendata.decl)
8606 c_strlen (arg, 1, &lendata);
7af57b1c 8607
98d5ba5d 8608 if (lendata.decl)
7af57b1c 8609 {
8610 if (EXPR_HAS_LOCATION (arg))
8611 loc = EXPR_LOCATION (arg);
8612 else if (loc == UNKNOWN_LOCATION)
8613 loc = input_location;
98d5ba5d 8614 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
7af57b1c 8615 }
8616
e6e27594 8617 return NULL_TREE;
8618 }
8619}
8620
92c43e3c 8621/* Fold a call to __builtin_inf or __builtin_huge_val. */
8622
8623static tree
389dd41b 8624fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 8625{
aa870c1b 8626 REAL_VALUE_TYPE real;
8627
40f4dbd5 8628 /* __builtin_inff is intended to be usable to define INFINITY on all
8629 targets. If an infinity is not available, INFINITY expands "to a
8630 positive constant of type float that overflows at translation
8631 time", footnote "In this case, using INFINITY will violate the
8632 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8633 Thus we pedwarn to ensure this constraint violation is
8634 diagnosed. */
92c43e3c 8635 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 8636 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 8637
aa870c1b 8638 real_inf (&real);
8639 return build_real (type, real);
92c43e3c 8640}
8641
d735c391 8642/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8643 NULL_TREE if no simplification can be made. */
8644
8645static tree
389dd41b 8646fold_builtin_sincos (location_t loc,
8647 tree arg0, tree arg1, tree arg2)
d735c391 8648{
c2f47e15 8649 tree type;
6c21be92 8650 tree fndecl, call = NULL_TREE;
d735c391 8651
c2f47e15 8652 if (!validate_arg (arg0, REAL_TYPE)
8653 || !validate_arg (arg1, POINTER_TYPE)
8654 || !validate_arg (arg2, POINTER_TYPE))
d735c391 8655 return NULL_TREE;
8656
d735c391 8657 type = TREE_TYPE (arg0);
d735c391 8658
8659 /* Calculate the result when the argument is a constant. */
e3240774 8660 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
6c21be92 8661 if (fn == END_BUILTINS)
d735c391 8662 return NULL_TREE;
8663
6c21be92 8664 /* Canonicalize sincos to cexpi. */
8665 if (TREE_CODE (arg0) == REAL_CST)
8666 {
8667 tree complex_type = build_complex_type (type);
744fe358 8668 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
6c21be92 8669 }
8670 if (!call)
8671 {
8672 if (!targetm.libc_has_function (function_c99_math_complex)
8673 || !builtin_decl_implicit_p (fn))
8674 return NULL_TREE;
8675 fndecl = builtin_decl_explicit (fn);
8676 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8677 call = builtin_save_expr (call);
8678 }
d735c391 8679
8234e9d3 8680 tree ptype = build_pointer_type (type);
8681 arg1 = fold_convert (ptype, arg1);
8682 arg2 = fold_convert (ptype, arg2);
a75b1c71 8683 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 8684 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8685 build_fold_indirect_ref_loc (loc, arg1),
6c21be92 8686 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
d735c391 8687 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8688 build_fold_indirect_ref_loc (loc, arg2),
6c21be92 8689 fold_build1_loc (loc, REALPART_EXPR, type, call)));
d735c391 8690}
8691
c2f47e15 8692/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8693 Return NULL_TREE if no simplification can be made. */
9c8a1629 8694
8695static tree
389dd41b 8696fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8697{
c2f47e15 8698 if (!validate_arg (arg1, POINTER_TYPE)
8699 || !validate_arg (arg2, POINTER_TYPE)
8700 || !validate_arg (len, INTEGER_TYPE))
8701 return NULL_TREE;
9c8a1629 8702
8703 /* If the LEN parameter is zero, return zero. */
8704 if (integer_zerop (len))
389dd41b 8705 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8706 arg1, arg2);
9c8a1629 8707
8708 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8709 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8710 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 8711
c4fef134 8712 /* If len parameter is one, return an expression corresponding to
8713 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8714 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8715 {
8716 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8717 tree cst_uchar_ptr_node
8718 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8719
389dd41b 8720 tree ind1
8721 = fold_convert_loc (loc, integer_type_node,
8722 build1 (INDIRECT_REF, cst_uchar_node,
8723 fold_convert_loc (loc,
8724 cst_uchar_ptr_node,
c4fef134 8725 arg1)));
389dd41b 8726 tree ind2
8727 = fold_convert_loc (loc, integer_type_node,
8728 build1 (INDIRECT_REF, cst_uchar_node,
8729 fold_convert_loc (loc,
8730 cst_uchar_ptr_node,
c4fef134 8731 arg2)));
389dd41b 8732 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 8733 }
9c8a1629 8734
c2f47e15 8735 return NULL_TREE;
9c8a1629 8736}
8737
c2f47e15 8738/* Fold a call to builtin isascii with argument ARG. */
d49367d4 8739
8740static tree
389dd41b 8741fold_builtin_isascii (location_t loc, tree arg)
d49367d4 8742{
c2f47e15 8743 if (!validate_arg (arg, INTEGER_TYPE))
8744 return NULL_TREE;
d49367d4 8745 else
8746 {
8747 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 8748 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8749 build_int_cst (integer_type_node,
c90b5d40 8750 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 8751 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 8752 arg, integer_zero_node);
d49367d4 8753 }
8754}
8755
c2f47e15 8756/* Fold a call to builtin toascii with argument ARG. */
d49367d4 8757
8758static tree
389dd41b 8759fold_builtin_toascii (location_t loc, tree arg)
d49367d4 8760{
c2f47e15 8761 if (!validate_arg (arg, INTEGER_TYPE))
8762 return NULL_TREE;
48e1416a 8763
c2f47e15 8764 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 8765 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8766 build_int_cst (integer_type_node, 0x7f));
d49367d4 8767}
8768
c2f47e15 8769/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 8770
8771static tree
389dd41b 8772fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 8773{
c2f47e15 8774 if (!validate_arg (arg, INTEGER_TYPE))
8775 return NULL_TREE;
df1cf42e 8776 else
8777 {
8778 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 8779 /* According to the C standard, isdigit is unaffected by locale.
8780 However, it definitely is affected by the target character set. */
624d37a6 8781 unsigned HOST_WIDE_INT target_digit0
8782 = lang_hooks.to_target_charset ('0');
8783
8784 if (target_digit0 == 0)
8785 return NULL_TREE;
8786
389dd41b 8787 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 8788 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8789 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 8790 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 8791 build_int_cst (unsigned_type_node, 9));
df1cf42e 8792 }
8793}
27f261ef 8794
c2f47e15 8795/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 8796
8797static tree
389dd41b 8798fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 8799{
c2f47e15 8800 if (!validate_arg (arg, REAL_TYPE))
8801 return NULL_TREE;
d1aade50 8802
389dd41b 8803 arg = fold_convert_loc (loc, type, arg);
389dd41b 8804 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8805}
8806
c2f47e15 8807/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 8808
8809static tree
389dd41b 8810fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 8811{
c2f47e15 8812 if (!validate_arg (arg, INTEGER_TYPE))
8813 return NULL_TREE;
d1aade50 8814
389dd41b 8815 arg = fold_convert_loc (loc, type, arg);
389dd41b 8816 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8817}
8818
abe4dcf6 8819/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8820
8821static tree
389dd41b 8822fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 8823{
239d491a 8824 if (validate_arg (arg, COMPLEX_TYPE)
8825 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 8826 {
8827 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 8828
abe4dcf6 8829 if (atan2_fn)
8830 {
c2f47e15 8831 tree new_arg = builtin_save_expr (arg);
389dd41b 8832 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8833 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8834 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 8835 }
8836 }
48e1416a 8837
abe4dcf6 8838 return NULL_TREE;
8839}
8840
3838b9ae 8841/* Fold a call to builtin frexp, we can assume the base is 2. */
8842
8843static tree
389dd41b 8844fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 8845{
8846 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8847 return NULL_TREE;
48e1416a 8848
3838b9ae 8849 STRIP_NOPS (arg0);
48e1416a 8850
3838b9ae 8851 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8852 return NULL_TREE;
48e1416a 8853
389dd41b 8854 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 8855
8856 /* Proceed if a valid pointer type was passed in. */
8857 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8858 {
8859 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8860 tree frac, exp;
48e1416a 8861
3838b9ae 8862 switch (value->cl)
8863 {
8864 case rvc_zero:
8865 /* For +-0, return (*exp = 0, +-0). */
8866 exp = integer_zero_node;
8867 frac = arg0;
8868 break;
8869 case rvc_nan:
8870 case rvc_inf:
8871 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 8872 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 8873 case rvc_normal:
8874 {
8875 /* Since the frexp function always expects base 2, and in
8876 GCC normalized significands are already in the range
8877 [0.5, 1.0), we have exactly what frexp wants. */
8878 REAL_VALUE_TYPE frac_rvt = *value;
8879 SET_REAL_EXP (&frac_rvt, 0);
8880 frac = build_real (rettype, frac_rvt);
7002a1c8 8881 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 8882 }
8883 break;
8884 default:
8885 gcc_unreachable ();
8886 }
48e1416a 8887
3838b9ae 8888 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8889 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 8890 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8891 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 8892 }
8893
8894 return NULL_TREE;
8895}
8896
ebf8b4f5 8897/* Fold a call to builtin modf. */
8898
8899static tree
389dd41b 8900fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 8901{
8902 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8903 return NULL_TREE;
48e1416a 8904
ebf8b4f5 8905 STRIP_NOPS (arg0);
48e1416a 8906
ebf8b4f5 8907 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8908 return NULL_TREE;
48e1416a 8909
389dd41b 8910 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 8911
8912 /* Proceed if a valid pointer type was passed in. */
8913 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8914 {
8915 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8916 REAL_VALUE_TYPE trunc, frac;
8917
8918 switch (value->cl)
8919 {
8920 case rvc_nan:
8921 case rvc_zero:
8922 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8923 trunc = frac = *value;
8924 break;
8925 case rvc_inf:
8926 /* For +-Inf, return (*arg1 = arg0, +-0). */
8927 frac = dconst0;
8928 frac.sign = value->sign;
8929 trunc = *value;
8930 break;
8931 case rvc_normal:
8932 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8933 real_trunc (&trunc, VOIDmode, value);
8934 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8935 /* If the original number was negative and already
8936 integral, then the fractional part is -0.0. */
8937 if (value->sign && frac.cl == rvc_zero)
8938 frac.sign = value->sign;
8939 break;
8940 }
48e1416a 8941
ebf8b4f5 8942 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8943 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 8944 build_real (rettype, trunc));
8945 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8946 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 8947 build_real (rettype, frac));
8948 }
48e1416a 8949
ebf8b4f5 8950 return NULL_TREE;
8951}
8952
12f08300 8953/* Given a location LOC, an interclass builtin function decl FNDECL
8954 and its single argument ARG, return an folded expression computing
8955 the same, or NULL_TREE if we either couldn't or didn't want to fold
8956 (the latter happen if there's an RTL instruction available). */
8957
8958static tree
8959fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8960{
8961 machine_mode mode;
8962
8963 if (!validate_arg (arg, REAL_TYPE))
8964 return NULL_TREE;
8965
8966 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8967 return NULL_TREE;
8968
8969 mode = TYPE_MODE (TREE_TYPE (arg));
8970
8971 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7f38718f 8972
12f08300 8973 /* If there is no optab, try generic code. */
8974 switch (DECL_FUNCTION_CODE (fndecl))
8975 {
8976 tree result;
a65c4d64 8977
12f08300 8978 CASE_FLT_FN (BUILT_IN_ISINF):
8979 {
8980 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8981 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8982 tree type = TREE_TYPE (arg);
8983 REAL_VALUE_TYPE r;
8984 char buf[128];
8985
8986 if (is_ibm_extended)
8987 {
8988 /* NaN and Inf are encoded in the high-order double value
8989 only. The low-order value is not significant. */
8990 type = double_type_node;
8991 mode = DFmode;
8992 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8993 }
8994 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8995 real_from_string (&r, buf);
8996 result = build_call_expr (isgr_fn, 2,
8997 fold_build1_loc (loc, ABS_EXPR, type, arg),
8998 build_real (type, r));
8999 return result;
9000 }
9001 CASE_FLT_FN (BUILT_IN_FINITE):
9002 case BUILT_IN_ISFINITE:
9003 {
9004 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9005 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9006 tree type = TREE_TYPE (arg);
9007 REAL_VALUE_TYPE r;
9008 char buf[128];
9009
9010 if (is_ibm_extended)
9011 {
9012 /* NaN and Inf are encoded in the high-order double value
9013 only. The low-order value is not significant. */
9014 type = double_type_node;
9015 mode = DFmode;
9016 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9017 }
9018 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9019 real_from_string (&r, buf);
9020 result = build_call_expr (isle_fn, 2,
9021 fold_build1_loc (loc, ABS_EXPR, type, arg),
9022 build_real (type, r));
9023 /*result = fold_build2_loc (loc, UNGT_EXPR,
9024 TREE_TYPE (TREE_TYPE (fndecl)),
9025 fold_build1_loc (loc, ABS_EXPR, type, arg),
9026 build_real (type, r));
9027 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9028 TREE_TYPE (TREE_TYPE (fndecl)),
9029 result);*/
9030 return result;
9031 }
9032 case BUILT_IN_ISNORMAL:
9033 {
9034 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9035 islessequal(fabs(x),DBL_MAX). */
9036 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9037 tree type = TREE_TYPE (arg);
9038 tree orig_arg, max_exp, min_exp;
9039 machine_mode orig_mode = mode;
9040 REAL_VALUE_TYPE rmax, rmin;
9041 char buf[128];
9042
9043 orig_arg = arg = builtin_save_expr (arg);
9044 if (is_ibm_extended)
9045 {
9046 /* Use double to test the normal range of IBM extended
9047 precision. Emin for IBM extended precision is
9048 different to emin for IEEE double, being 53 higher
9049 since the low double exponent is at least 53 lower
9050 than the high double exponent. */
9051 type = double_type_node;
9052 mode = DFmode;
9053 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9054 }
9055 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9056
9057 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9058 real_from_string (&rmax, buf);
9059 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9060 real_from_string (&rmin, buf);
9061 max_exp = build_real (type, rmax);
9062 min_exp = build_real (type, rmin);
9063
9064 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9065 if (is_ibm_extended)
9066 {
9067 /* Testing the high end of the range is done just using
9068 the high double, using the same test as isfinite().
9069 For the subnormal end of the range we first test the
9070 high double, then if its magnitude is equal to the
9071 limit of 0x1p-969, we test whether the low double is
9072 non-zero and opposite sign to the high double. */
9073 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9074 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9075 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9076 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9077 arg, min_exp);
9078 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9079 complex_double_type_node, orig_arg);
9080 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9081 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9082 tree zero = build_real (type, dconst0);
9083 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9084 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9085 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9086 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9087 fold_build3 (COND_EXPR,
9088 integer_type_node,
9089 hilt, logt, lolt));
9090 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9091 eq_min, ok_lo);
9092 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9093 gt_min, eq_min);
9094 }
9095 else
9096 {
9097 tree const isge_fn
9098 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9099 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9100 }
9101 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9102 max_exp, min_exp);
9103 return result;
9104 }
9105 default:
9106 break;
9107 }
9108
9109 return NULL_TREE;
9110}
9111
9112/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 9113 ARG is the argument for the call. */
726069ba 9114
9115static tree
12f08300 9116fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 9117{
12f08300 9118 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9119
c2f47e15 9120 if (!validate_arg (arg, REAL_TYPE))
d43cee80 9121 return NULL_TREE;
726069ba 9122
726069ba 9123 switch (builtin_index)
9124 {
12f08300 9125 case BUILT_IN_ISINF:
9126 if (!HONOR_INFINITIES (arg))
9127 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9128
9129 return NULL_TREE;
9130
c319d56a 9131 case BUILT_IN_ISINF_SIGN:
9132 {
9133 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9134 /* In a boolean context, GCC will fold the inner COND_EXPR to
9135 1. So e.g. "if (isinf_sign(x))" would be folded to just
9136 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
6cfc7001 9137 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
b9a16870 9138 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 9139 tree tmp = NULL_TREE;
9140
9141 arg = builtin_save_expr (arg);
9142
9143 if (signbit_fn && isinf_fn)
9144 {
389dd41b 9145 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9146 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 9147
389dd41b 9148 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9149 signbit_call, integer_zero_node);
389dd41b 9150 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9151 isinf_call, integer_zero_node);
48e1416a 9152
389dd41b 9153 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 9154 integer_minus_one_node, integer_one_node);
389dd41b 9155 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9156 isinf_call, tmp,
c319d56a 9157 integer_zero_node);
9158 }
9159
9160 return tmp;
9161 }
9162
12f08300 9163 case BUILT_IN_ISFINITE:
9164 if (!HONOR_NANS (arg)
9165 && !HONOR_INFINITIES (arg))
9166 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9167
9168 return NULL_TREE;
9169
9170 case BUILT_IN_ISNAN:
9171 if (!HONOR_NANS (arg))
9172 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9173
9174 {
9175 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9176 if (is_ibm_extended)
9177 {
9178 /* NaN and Inf are encoded in the high-order double value
9179 only. The low-order value is not significant. */
9180 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9181 }
9182 }
9183 arg = builtin_save_expr (arg);
9184 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9185
726069ba 9186 default:
64db345d 9187 gcc_unreachable ();
726069ba 9188 }
9189}
9190
12f08300 9191/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9192 This builtin will generate code to return the appropriate floating
9193 point classification depending on the value of the floating point
9194 number passed in. The possible return values must be supplied as
9195 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9196 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9197 one floating point argument which is "type generic". */
9198
9199static tree
9200fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9201{
9202 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9203 arg, type, res, tmp;
9204 machine_mode mode;
9205 REAL_VALUE_TYPE r;
9206 char buf[128];
9207
9208 /* Verify the required arguments in the original call. */
9209 if (nargs != 6
9210 || !validate_arg (args[0], INTEGER_TYPE)
9211 || !validate_arg (args[1], INTEGER_TYPE)
9212 || !validate_arg (args[2], INTEGER_TYPE)
9213 || !validate_arg (args[3], INTEGER_TYPE)
9214 || !validate_arg (args[4], INTEGER_TYPE)
9215 || !validate_arg (args[5], REAL_TYPE))
9216 return NULL_TREE;
9217
9218 fp_nan = args[0];
9219 fp_infinite = args[1];
9220 fp_normal = args[2];
9221 fp_subnormal = args[3];
9222 fp_zero = args[4];
9223 arg = args[5];
9224 type = TREE_TYPE (arg);
9225 mode = TYPE_MODE (type);
9226 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9227
9228 /* fpclassify(x) ->
9229 isnan(x) ? FP_NAN :
9230 (fabs(x) == Inf ? FP_INFINITE :
9231 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9232 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9233
9234 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9235 build_real (type, dconst0));
9236 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9237 tmp, fp_zero, fp_subnormal);
9238
9239 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9240 real_from_string (&r, buf);
9241 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9242 arg, build_real (type, r));
9243 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9244
9245 if (HONOR_INFINITIES (mode))
9246 {
9247 real_inf (&r);
9248 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9249 build_real (type, r));
9250 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9251 fp_infinite, res);
9252 }
9253
9254 if (HONOR_NANS (mode))
9255 {
9256 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9257 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9258 }
9259
9260 return res;
9261}
9262
9bc9f15f 9263/* Fold a call to an unordered comparison function such as
d5019fe8 9264 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 9265 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 9266 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9267 the opposite of the desired result. UNORDERED_CODE is used
9268 for modes that can hold NaNs and ORDERED_CODE is used for
9269 the rest. */
9bc9f15f 9270
9271static tree
389dd41b 9272fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 9273 enum tree_code unordered_code,
9274 enum tree_code ordered_code)
9275{
859f903a 9276 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 9277 enum tree_code code;
6978db0d 9278 tree type0, type1;
9279 enum tree_code code0, code1;
9280 tree cmp_type = NULL_TREE;
9bc9f15f 9281
6978db0d 9282 type0 = TREE_TYPE (arg0);
9283 type1 = TREE_TYPE (arg1);
a0c938f0 9284
6978db0d 9285 code0 = TREE_CODE (type0);
9286 code1 = TREE_CODE (type1);
a0c938f0 9287
6978db0d 9288 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9289 /* Choose the wider of two real types. */
9290 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9291 ? type0 : type1;
9292 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9293 cmp_type = type0;
9294 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9295 cmp_type = type1;
a0c938f0 9296
389dd41b 9297 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9298 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 9299
9300 if (unordered_code == UNORDERED_EXPR)
9301 {
93633022 9302 if (!HONOR_NANS (arg0))
389dd41b 9303 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9304 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 9305 }
9bc9f15f 9306
93633022 9307 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 9308 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9309 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 9310}
9311
0c93c8a9 9312/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9313 arithmetics if it can never overflow, or into internal functions that
9314 return both result of arithmetics and overflowed boolean flag in
732905bb 9315 a complex integer result, or some other check for overflow.
9316 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9317 checking part of that. */
0c93c8a9 9318
9319static tree
9320fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9321 tree arg0, tree arg1, tree arg2)
9322{
9323 enum internal_fn ifn = IFN_LAST;
02d7a132 9324 /* The code of the expression corresponding to the built-in. */
732905bb 9325 enum tree_code opcode = ERROR_MARK;
9326 bool ovf_only = false;
9327
0c93c8a9 9328 switch (fcode)
9329 {
732905bb 9330 case BUILT_IN_ADD_OVERFLOW_P:
9331 ovf_only = true;
9332 /* FALLTHRU */
0c93c8a9 9333 case BUILT_IN_ADD_OVERFLOW:
9334 case BUILT_IN_SADD_OVERFLOW:
9335 case BUILT_IN_SADDL_OVERFLOW:
9336 case BUILT_IN_SADDLL_OVERFLOW:
9337 case BUILT_IN_UADD_OVERFLOW:
9338 case BUILT_IN_UADDL_OVERFLOW:
9339 case BUILT_IN_UADDLL_OVERFLOW:
02d7a132 9340 opcode = PLUS_EXPR;
0c93c8a9 9341 ifn = IFN_ADD_OVERFLOW;
9342 break;
732905bb 9343 case BUILT_IN_SUB_OVERFLOW_P:
9344 ovf_only = true;
9345 /* FALLTHRU */
0c93c8a9 9346 case BUILT_IN_SUB_OVERFLOW:
9347 case BUILT_IN_SSUB_OVERFLOW:
9348 case BUILT_IN_SSUBL_OVERFLOW:
9349 case BUILT_IN_SSUBLL_OVERFLOW:
9350 case BUILT_IN_USUB_OVERFLOW:
9351 case BUILT_IN_USUBL_OVERFLOW:
9352 case BUILT_IN_USUBLL_OVERFLOW:
02d7a132 9353 opcode = MINUS_EXPR;
0c93c8a9 9354 ifn = IFN_SUB_OVERFLOW;
9355 break;
732905bb 9356 case BUILT_IN_MUL_OVERFLOW_P:
9357 ovf_only = true;
9358 /* FALLTHRU */
0c93c8a9 9359 case BUILT_IN_MUL_OVERFLOW:
9360 case BUILT_IN_SMUL_OVERFLOW:
9361 case BUILT_IN_SMULL_OVERFLOW:
9362 case BUILT_IN_SMULLL_OVERFLOW:
9363 case BUILT_IN_UMUL_OVERFLOW:
9364 case BUILT_IN_UMULL_OVERFLOW:
9365 case BUILT_IN_UMULLL_OVERFLOW:
02d7a132 9366 opcode = MULT_EXPR;
0c93c8a9 9367 ifn = IFN_MUL_OVERFLOW;
9368 break;
9369 default:
9370 gcc_unreachable ();
9371 }
732905bb 9372
9373 /* For the "generic" overloads, the first two arguments can have different
9374 types and the last argument determines the target type to use to check
9375 for overflow. The arguments of the other overloads all have the same
9376 type. */
9377 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9378
9379 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9380 arguments are constant, attempt to fold the built-in call into a constant
9381 expression indicating whether or not it detected an overflow. */
9382 if (ovf_only
9383 && TREE_CODE (arg0) == INTEGER_CST
9384 && TREE_CODE (arg1) == INTEGER_CST)
9385 /* Perform the computation in the target type and check for overflow. */
9386 return omit_one_operand_loc (loc, boolean_type_node,
9387 arith_overflowed_p (opcode, type, arg0, arg1)
9388 ? boolean_true_node : boolean_false_node,
9389 arg2);
9390
02d7a132 9391 tree intres, ovfres;
9392 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9393 {
9394 intres = fold_binary_loc (loc, opcode, type,
9395 fold_convert_loc (loc, type, arg0),
9396 fold_convert_loc (loc, type, arg1));
9397 if (TREE_OVERFLOW (intres))
9398 intres = drop_tree_overflow (intres);
9399 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9400 ? boolean_true_node : boolean_false_node);
9401 }
9402 else
9403 {
9404 tree ctype = build_complex_type (type);
9405 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9406 arg0, arg1);
9407 tree tgt = save_expr (call);
9408 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9409 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9410 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9411 }
732905bb 9412
9413 if (ovf_only)
9414 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9415
9416 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
0c93c8a9 9417 tree store
9418 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9419 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9420}
9421
c388a0cf 9422/* Fold a call to __builtin_FILE to a constant string. */
9423
9424static inline tree
9425fold_builtin_FILE (location_t loc)
9426{
9427 if (const char *fname = LOCATION_FILE (loc))
859b51f8 9428 {
9429 /* The documentation says this builtin is equivalent to the preprocessor
9430 __FILE__ macro so it appears appropriate to use the same file prefix
9431 mappings. */
9432 fname = remap_macro_filename (fname);
c388a0cf 9433 return build_string_literal (strlen (fname) + 1, fname);
859b51f8 9434 }
c388a0cf 9435
9436 return build_string_literal (1, "");
9437}
9438
9439/* Fold a call to __builtin_FUNCTION to a constant string. */
9440
9441static inline tree
9442fold_builtin_FUNCTION ()
9443{
c2d38635 9444 const char *name = "";
9445
c388a0cf 9446 if (current_function_decl)
c2d38635 9447 name = lang_hooks.decl_printable_name (current_function_decl, 0);
c388a0cf 9448
c2d38635 9449 return build_string_literal (strlen (name) + 1, name);
c388a0cf 9450}
9451
9452/* Fold a call to __builtin_LINE to an integer constant. */
9453
9454static inline tree
9455fold_builtin_LINE (location_t loc, tree type)
9456{
9457 return build_int_cst (type, LOCATION_LINE (loc));
9458}
9459
c2f47e15 9460/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 9461 This function returns NULL_TREE if no simplification was possible. */
650e4c94 9462
4ee9c684 9463static tree
e80cc485 9464fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 9465{
e9f80ff5 9466 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 9467 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 9468 switch (fcode)
650e4c94 9469 {
c388a0cf 9470 case BUILT_IN_FILE:
9471 return fold_builtin_FILE (loc);
9472
9473 case BUILT_IN_FUNCTION:
9474 return fold_builtin_FUNCTION ();
9475
9476 case BUILT_IN_LINE:
9477 return fold_builtin_LINE (loc, type);
9478
c2f47e15 9479 CASE_FLT_FN (BUILT_IN_INF):
012f068a 9480 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
c2f47e15 9481 case BUILT_IN_INFD32:
9482 case BUILT_IN_INFD64:
9483 case BUILT_IN_INFD128:
389dd41b 9484 return fold_builtin_inf (loc, type, true);
7c2f0500 9485
c2f47e15 9486 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
012f068a 9487 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
389dd41b 9488 return fold_builtin_inf (loc, type, false);
7c2f0500 9489
c2f47e15 9490 case BUILT_IN_CLASSIFY_TYPE:
9491 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 9492
c2f47e15 9493 default:
9494 break;
9495 }
9496 return NULL_TREE;
9497}
7c2f0500 9498
c2f47e15 9499/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 9500 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 9501
c2f47e15 9502static tree
e80cc485 9503fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 9504{
9505 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9506 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 9507
9508 if (TREE_CODE (arg0) == ERROR_MARK)
9509 return NULL_TREE;
9510
744fe358 9511 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
6c21be92 9512 return ret;
9513
c2f47e15 9514 switch (fcode)
9515 {
650e4c94 9516 case BUILT_IN_CONSTANT_P:
7c2f0500 9517 {
c2f47e15 9518 tree val = fold_builtin_constant_p (arg0);
7c2f0500 9519
7c2f0500 9520 /* Gimplification will pull the CALL_EXPR for the builtin out of
9521 an if condition. When not optimizing, we'll not CSE it back.
9522 To avoid link error types of regressions, return false now. */
9523 if (!val && !optimize)
9524 val = integer_zero_node;
9525
9526 return val;
9527 }
650e4c94 9528
539a3a92 9529 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 9530 return fold_builtin_classify_type (arg0);
539a3a92 9531
650e4c94 9532 case BUILT_IN_STRLEN:
c7cbde74 9533 return fold_builtin_strlen (loc, type, arg0);
650e4c94 9534
4f35b1fc 9535 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 9536 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 9537 case BUILT_IN_FABSD32:
9538 case BUILT_IN_FABSD64:
9539 case BUILT_IN_FABSD128:
389dd41b 9540 return fold_builtin_fabs (loc, arg0, type);
d1aade50 9541
9542 case BUILT_IN_ABS:
9543 case BUILT_IN_LABS:
9544 case BUILT_IN_LLABS:
9545 case BUILT_IN_IMAXABS:
389dd41b 9546 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 9547
4f35b1fc 9548 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 9549 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9550 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9551 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 9552 break;
36d3581d 9553
4f35b1fc 9554 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 9555 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9556 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 9557 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 9558 break;
36d3581d 9559
4f35b1fc 9560 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 9561 if (validate_arg (arg0, COMPLEX_TYPE)
9562 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9563 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 9564 break;
36d3581d 9565
6c21be92 9566 CASE_FLT_FN (BUILT_IN_CARG):
9567 return fold_builtin_carg (loc, arg0, type);
c2373fdb 9568
6c21be92 9569 case BUILT_IN_ISASCII:
9570 return fold_builtin_isascii (loc, arg0);
48e1416a 9571
6c21be92 9572 case BUILT_IN_TOASCII:
9573 return fold_builtin_toascii (loc, arg0);
48e1416a 9574
6c21be92 9575 case BUILT_IN_ISDIGIT:
9576 return fold_builtin_isdigit (loc, arg0);
48e1416a 9577
12f08300 9578 CASE_FLT_FN (BUILT_IN_FINITE):
9579 case BUILT_IN_FINITED32:
9580 case BUILT_IN_FINITED64:
9581 case BUILT_IN_FINITED128:
9582 case BUILT_IN_ISFINITE:
9583 {
9584 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9585 if (ret)
9586 return ret;
9587 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9588 }
9589
9590 CASE_FLT_FN (BUILT_IN_ISINF):
9591 case BUILT_IN_ISINFD32:
9592 case BUILT_IN_ISINFD64:
9593 case BUILT_IN_ISINFD128:
9594 {
9595 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9596 if (ret)
9597 return ret;
9598 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9599 }
9600
9601 case BUILT_IN_ISNORMAL:
9602 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9603
6c21be92 9604 case BUILT_IN_ISINF_SIGN:
12f08300 9605 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9606
9607 CASE_FLT_FN (BUILT_IN_ISNAN):
9608 case BUILT_IN_ISNAND32:
9609 case BUILT_IN_ISNAND64:
9610 case BUILT_IN_ISNAND128:
9611 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
48e1416a 9612
6c21be92 9613 case BUILT_IN_FREE:
9614 if (integer_zerop (arg0))
9615 return build_empty_stmt (loc);
d064d976 9616 break;
c63f4ad3 9617
6c21be92 9618 default:
8b4af95f 9619 break;
6c21be92 9620 }
805e22b2 9621
6c21be92 9622 return NULL_TREE;
3bc5c41b 9623
6c21be92 9624}
728bac60 9625
6c21be92 9626/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9627 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 9628
9629static tree
e80cc485 9630fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 9631{
9632 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9633 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9634
6c21be92 9635 if (TREE_CODE (arg0) == ERROR_MARK
9636 || TREE_CODE (arg1) == ERROR_MARK)
9637 return NULL_TREE;
e5407ca6 9638
744fe358 9639 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
6c21be92 9640 return ret;
e84da7c1 9641
6c21be92 9642 switch (fcode)
9643 {
e84da7c1 9644 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9645 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9646 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9647 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 9648 return do_mpfr_lgamma_r (arg0, arg1, type);
9649 break;
c2f47e15 9650
3838b9ae 9651 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 9652 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 9653
ebf8b4f5 9654 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 9655 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 9656
c2f47e15 9657 case BUILT_IN_STRSPN:
389dd41b 9658 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 9659
9660 case BUILT_IN_STRCSPN:
389dd41b 9661 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 9662
c2f47e15 9663 case BUILT_IN_STRPBRK:
389dd41b 9664 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 9665
9666 case BUILT_IN_EXPECT:
01107f42 9667 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
c2f47e15 9668
9bc9f15f 9669 case BUILT_IN_ISGREATER:
389dd41b 9670 return fold_builtin_unordered_cmp (loc, fndecl,
9671 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 9672 case BUILT_IN_ISGREATEREQUAL:
389dd41b 9673 return fold_builtin_unordered_cmp (loc, fndecl,
9674 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 9675 case BUILT_IN_ISLESS:
389dd41b 9676 return fold_builtin_unordered_cmp (loc, fndecl,
9677 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 9678 case BUILT_IN_ISLESSEQUAL:
389dd41b 9679 return fold_builtin_unordered_cmp (loc, fndecl,
9680 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 9681 case BUILT_IN_ISLESSGREATER:
389dd41b 9682 return fold_builtin_unordered_cmp (loc, fndecl,
9683 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 9684 case BUILT_IN_ISUNORDERED:
389dd41b 9685 return fold_builtin_unordered_cmp (loc, fndecl,
9686 arg0, arg1, UNORDERED_EXPR,
d5019fe8 9687 NOP_EXPR);
9bc9f15f 9688
7c2f0500 9689 /* We do the folding for va_start in the expander. */
9690 case BUILT_IN_VA_START:
9691 break;
f0613857 9692
0a39fd54 9693 case BUILT_IN_OBJECT_SIZE:
c2f47e15 9694 return fold_builtin_object_size (arg0, arg1);
0a39fd54 9695
1cd6e20d 9696 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9697 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9698
9699 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9700 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9701
c2f47e15 9702 default:
9703 break;
9704 }
9705 return NULL_TREE;
9706}
9707
9708/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 9709 and ARG2.
c2f47e15 9710 This function returns NULL_TREE if no simplification was possible. */
9711
9712static tree
389dd41b 9713fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 9714 tree arg0, tree arg1, tree arg2)
c2f47e15 9715{
9716 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9717 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 9718
9719 if (TREE_CODE (arg0) == ERROR_MARK
9720 || TREE_CODE (arg1) == ERROR_MARK
9721 || TREE_CODE (arg2) == ERROR_MARK)
9722 return NULL_TREE;
9723
744fe358 9724 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9725 arg0, arg1, arg2))
6c21be92 9726 return ret;
9727
c2f47e15 9728 switch (fcode)
9729 {
9730
9731 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 9732 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 9733
e5407ca6 9734 CASE_FLT_FN (BUILT_IN_REMQUO):
9735 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9736 && validate_arg (arg1, REAL_TYPE)
9737 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 9738 return do_mpfr_remquo (arg0, arg1, arg2);
9739 break;
e5407ca6 9740
c2f47e15 9741 case BUILT_IN_MEMCMP:
7f38a6aa 9742 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
c2f47e15 9743
c83059be 9744 case BUILT_IN_EXPECT:
01107f42 9745 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9746
9747 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9748 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
c83059be 9749
0c93c8a9 9750 case BUILT_IN_ADD_OVERFLOW:
9751 case BUILT_IN_SUB_OVERFLOW:
9752 case BUILT_IN_MUL_OVERFLOW:
732905bb 9753 case BUILT_IN_ADD_OVERFLOW_P:
9754 case BUILT_IN_SUB_OVERFLOW_P:
9755 case BUILT_IN_MUL_OVERFLOW_P:
0c93c8a9 9756 case BUILT_IN_SADD_OVERFLOW:
9757 case BUILT_IN_SADDL_OVERFLOW:
9758 case BUILT_IN_SADDLL_OVERFLOW:
9759 case BUILT_IN_SSUB_OVERFLOW:
9760 case BUILT_IN_SSUBL_OVERFLOW:
9761 case BUILT_IN_SSUBLL_OVERFLOW:
9762 case BUILT_IN_SMUL_OVERFLOW:
9763 case BUILT_IN_SMULL_OVERFLOW:
9764 case BUILT_IN_SMULLL_OVERFLOW:
9765 case BUILT_IN_UADD_OVERFLOW:
9766 case BUILT_IN_UADDL_OVERFLOW:
9767 case BUILT_IN_UADDLL_OVERFLOW:
9768 case BUILT_IN_USUB_OVERFLOW:
9769 case BUILT_IN_USUBL_OVERFLOW:
9770 case BUILT_IN_USUBLL_OVERFLOW:
9771 case BUILT_IN_UMUL_OVERFLOW:
9772 case BUILT_IN_UMULL_OVERFLOW:
9773 case BUILT_IN_UMULLL_OVERFLOW:
9774 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9775
650e4c94 9776 default:
9777 break;
9778 }
c2f47e15 9779 return NULL_TREE;
9780}
650e4c94 9781
c2f47e15 9782/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 9783 arguments. IGNORE is true if the result of the
9784 function call is ignored. This function returns NULL_TREE if no
9785 simplification was possible. */
48e1416a 9786
2165588a 9787tree
e80cc485 9788fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 9789{
9790 tree ret = NULL_TREE;
a7f5bb2d 9791
c2f47e15 9792 switch (nargs)
9793 {
9794 case 0:
e80cc485 9795 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 9796 break;
9797 case 1:
e80cc485 9798 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 9799 break;
9800 case 2:
e80cc485 9801 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 9802 break;
9803 case 3:
e80cc485 9804 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 9805 break;
c2f47e15 9806 default:
12f08300 9807 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 9808 break;
9809 }
9810 if (ret)
9811 {
75a70cf9 9812 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 9813 SET_EXPR_LOCATION (ret, loc);
c2f47e15 9814 return ret;
9815 }
9816 return NULL_TREE;
9817}
9818
0e80b01d 9819/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9820 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9821 of arguments in ARGS to be omitted. OLDNARGS is the number of
9822 elements in ARGS. */
c2f47e15 9823
9824static tree
0e80b01d 9825rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9826 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 9827{
0e80b01d 9828 int nargs = oldnargs - skip + n;
9829 tree *buffer;
c2f47e15 9830
0e80b01d 9831 if (n > 0)
c2f47e15 9832 {
0e80b01d 9833 int i, j;
c2f47e15 9834
0e80b01d 9835 buffer = XALLOCAVEC (tree, nargs);
9836 for (i = 0; i < n; i++)
9837 buffer[i] = va_arg (newargs, tree);
9838 for (j = skip; j < oldnargs; j++, i++)
9839 buffer[i] = args[j];
9840 }
9841 else
9842 buffer = args + skip;
19fbe3a4 9843
0e80b01d 9844 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9845}
c2f47e15 9846
198622c0 9847/* Return true if FNDECL shouldn't be folded right now.
9848 If a built-in function has an inline attribute always_inline
9849 wrapper, defer folding it after always_inline functions have
9850 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9851 might not be performed. */
9852
51d2c51e 9853bool
198622c0 9854avoid_folding_inline_builtin (tree fndecl)
9855{
9856 return (DECL_DECLARED_INLINE_P (fndecl)
9857 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9858 && cfun
9859 && !cfun->always_inline_functions_inlined
9860 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9861}
9862
4ee9c684 9863/* A wrapper function for builtin folding that prevents warnings for
491e04ef 9864 "statement without effect" and the like, caused by removing the
4ee9c684 9865 call node earlier than the warning is generated. */
9866
9867tree
389dd41b 9868fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 9869{
c2f47e15 9870 tree ret = NULL_TREE;
9871 tree fndecl = get_callee_fndecl (exp);
a0e9bfbb 9872 if (fndecl && fndecl_built_in_p (fndecl)
48dc2227 9873 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9874 yet. Defer folding until we see all the arguments
9875 (after inlining). */
9876 && !CALL_EXPR_VA_ARG_PACK (exp))
9877 {
9878 int nargs = call_expr_nargs (exp);
9879
9880 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9881 instead last argument is __builtin_va_arg_pack (). Defer folding
9882 even in that case, until arguments are finalized. */
9883 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9884 {
9885 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
a0e9bfbb 9886 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
48dc2227 9887 return NULL_TREE;
9888 }
9889
198622c0 9890 if (avoid_folding_inline_builtin (fndecl))
9891 return NULL_TREE;
9892
c2f47e15 9893 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 9894 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9895 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 9896 else
9897 {
9d884767 9898 tree *args = CALL_EXPR_ARGP (exp);
9899 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 9900 if (ret)
389dd41b 9901 return ret;
c2f47e15 9902 }
4ee9c684 9903 }
c2f47e15 9904 return NULL_TREE;
9905}
48e1416a 9906
9d884767 9907/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9908 N arguments are passed in the array ARGARRAY. Return a folded
9909 expression or NULL_TREE if no simplification was possible. */
805e22b2 9910
9911tree
9d884767 9912fold_builtin_call_array (location_t loc, tree,
d01f58f9 9913 tree fn,
9914 int n,
9915 tree *argarray)
7e15618b 9916{
9d884767 9917 if (TREE_CODE (fn) != ADDR_EXPR)
9918 return NULL_TREE;
c2f47e15 9919
9d884767 9920 tree fndecl = TREE_OPERAND (fn, 0);
9921 if (TREE_CODE (fndecl) == FUNCTION_DECL
a0e9bfbb 9922 && fndecl_built_in_p (fndecl))
9d884767 9923 {
9924 /* If last argument is __builtin_va_arg_pack (), arguments to this
9925 function are not finalized yet. Defer folding until they are. */
9926 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9927 {
9928 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
a0e9bfbb 9929 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9d884767 9930 return NULL_TREE;
9931 }
9932 if (avoid_folding_inline_builtin (fndecl))
9933 return NULL_TREE;
9934 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9935 return targetm.fold_builtin (fndecl, n, argarray, false);
9936 else
9937 return fold_builtin_n (loc, fndecl, argarray, n, false);
9938 }
c2f47e15 9939
9d884767 9940 return NULL_TREE;
c2f47e15 9941}
9942
af1409ad 9943/* Construct a new CALL_EXPR using the tail of the argument list of EXP
9944 along with N new arguments specified as the "..." parameters. SKIP
9945 is the number of arguments in EXP to be omitted. This function is used
9946 to do varargs-to-varargs transformations. */
9947
9948static tree
9949rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9950{
9951 va_list ap;
9952 tree t;
9953
9954 va_start (ap, n);
9955 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9956 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9957 va_end (ap);
c2f47e15 9958
af1409ad 9959 return t;
c2f47e15 9960}
9961
9962/* Validate a single argument ARG against a tree code CODE representing
184fac50 9963 a type. Return true when argument is valid. */
48e1416a 9964
c2f47e15 9965static bool
184fac50 9966validate_arg (const_tree arg, enum tree_code code)
c2f47e15 9967{
9968 if (!arg)
9969 return false;
9970 else if (code == POINTER_TYPE)
184fac50 9971 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 9972 else if (code == INTEGER_TYPE)
9973 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 9974 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 9975}
0eb671f7 9976
75a70cf9 9977/* This function validates the types of a function call argument list
9978 against a specified list of tree_codes. If the last specifier is a 0,
9979 that represents an ellipses, otherwise the last specifier must be a
9980 VOID_TYPE.
9981
9982 This is the GIMPLE version of validate_arglist. Eventually we want to
9983 completely convert builtins.c to work from GIMPLEs and the tree based
9984 validate_arglist will then be removed. */
9985
9986bool
1a91d914 9987validate_gimple_arglist (const gcall *call, ...)
75a70cf9 9988{
9989 enum tree_code code;
9990 bool res = 0;
9991 va_list ap;
9992 const_tree arg;
9993 size_t i;
9994
9995 va_start (ap, call);
9996 i = 0;
9997
9998 do
9999 {
d62e827b 10000 code = (enum tree_code) va_arg (ap, int);
75a70cf9 10001 switch (code)
10002 {
10003 case 0:
10004 /* This signifies an ellipses, any further arguments are all ok. */
10005 res = true;
10006 goto end;
10007 case VOID_TYPE:
10008 /* This signifies an endlink, if no arguments remain, return
10009 true, otherwise return false. */
10010 res = (i == gimple_call_num_args (call));
10011 goto end;
10012 default:
10013 /* If no parameters remain or the parameter's code does not
10014 match the specified code, return false. Otherwise continue
10015 checking any remaining arguments. */
10016 arg = gimple_call_arg (call, i++);
10017 if (!validate_arg (arg, code))
10018 goto end;
10019 break;
10020 }
10021 }
10022 while (1);
10023
10024 /* We need gotos here since we can only have one VA_CLOSE in a
10025 function. */
10026 end: ;
10027 va_end (ap);
10028
10029 return res;
10030}
10031
fc2a2dcb 10032/* Default target-specific builtin expander that does nothing. */
10033
10034rtx
aecda0d6 10035default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10036 rtx target ATTRIBUTE_UNUSED,
10037 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 10038 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 10039 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 10040{
10041 return NULL_RTX;
10042}
c7926a82 10043
01537105 10044/* Returns true is EXP represents data that would potentially reside
10045 in a readonly section. */
10046
b9ea678c 10047bool
01537105 10048readonly_data_expr (tree exp)
10049{
10050 STRIP_NOPS (exp);
10051
9ff0637e 10052 if (TREE_CODE (exp) != ADDR_EXPR)
10053 return false;
10054
10055 exp = get_base_address (TREE_OPERAND (exp, 0));
10056 if (!exp)
10057 return false;
10058
10059 /* Make sure we call decl_readonly_section only for trees it
10060 can handle (since it returns true for everything it doesn't
10061 understand). */
491e04ef 10062 if (TREE_CODE (exp) == STRING_CST
9ff0637e 10063 || TREE_CODE (exp) == CONSTRUCTOR
53e9c5c4 10064 || (VAR_P (exp) && TREE_STATIC (exp)))
9ff0637e 10065 return decl_readonly_section (exp, 0);
01537105 10066 else
10067 return false;
10068}
4ee9c684 10069
c2f47e15 10070/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10071 to the call, and TYPE is its return type.
4ee9c684 10072
c2f47e15 10073 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10074 simplified form of the call as a tree.
10075
10076 The simplified form may be a constant or other expression which
10077 computes the same value, but in a more efficient manner (including
10078 calls to other builtin functions).
10079
10080 The call may contain arguments which need to be evaluated, but
10081 which are not useful to determine the result of the call. In
10082 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10083 COMPOUND_EXPR will be an argument which must be evaluated.
10084 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10085 COMPOUND_EXPR in the chain will contain the tree for the simplified
10086 form of the builtin function call. */
10087
10088static tree
389dd41b 10089fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10090{
c2f47e15 10091 if (!validate_arg (s1, POINTER_TYPE)
10092 || !validate_arg (s2, POINTER_TYPE))
10093 return NULL_TREE;
4ee9c684 10094 else
10095 {
4ee9c684 10096 tree fn;
10097 const char *p1, *p2;
10098
10099 p2 = c_getstr (s2);
10100 if (p2 == NULL)
c2f47e15 10101 return NULL_TREE;
4ee9c684 10102
10103 p1 = c_getstr (s1);
10104 if (p1 != NULL)
10105 {
10106 const char *r = strpbrk (p1, p2);
daa1d5f5 10107 tree tem;
4ee9c684 10108
10109 if (r == NULL)
779b4c41 10110 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 10111
10112 /* Return an offset into the constant string argument. */
2cc66f2a 10113 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10114 return fold_convert_loc (loc, type, tem);
4ee9c684 10115 }
10116
10117 if (p2[0] == '\0')
05abc81b 10118 /* strpbrk(x, "") == NULL.
10119 Evaluate and ignore s1 in case it had side-effects. */
44bfe16d 10120 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
4ee9c684 10121
10122 if (p2[1] != '\0')
c2f47e15 10123 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 10124
b9a16870 10125 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 10126 if (!fn)
c2f47e15 10127 return NULL_TREE;
4ee9c684 10128
10129 /* New argument list transforming strpbrk(s1, s2) to
10130 strchr(s1, s2[0]). */
7002a1c8 10131 return build_call_expr_loc (loc, fn, 2, s1,
10132 build_int_cst (integer_type_node, p2[0]));
4ee9c684 10133 }
10134}
10135
c2f47e15 10136/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10137 to the call.
4ee9c684 10138
c2f47e15 10139 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10140 simplified form of the call as a tree.
10141
10142 The simplified form may be a constant or other expression which
10143 computes the same value, but in a more efficient manner (including
10144 calls to other builtin functions).
10145
10146 The call may contain arguments which need to be evaluated, but
10147 which are not useful to determine the result of the call. In
10148 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10149 COMPOUND_EXPR will be an argument which must be evaluated.
10150 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10151 COMPOUND_EXPR in the chain will contain the tree for the simplified
10152 form of the builtin function call. */
10153
10154static tree
389dd41b 10155fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 10156{
c2f47e15 10157 if (!validate_arg (s1, POINTER_TYPE)
10158 || !validate_arg (s2, POINTER_TYPE))
10159 return NULL_TREE;
4ee9c684 10160 else
10161 {
4ee9c684 10162 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10163
c2f47e15 10164 /* If either argument is "", return NULL_TREE. */
4ee9c684 10165 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 10166 /* Evaluate and ignore both arguments in case either one has
10167 side-effects. */
389dd41b 10168 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 10169 s1, s2);
c2f47e15 10170 return NULL_TREE;
4ee9c684 10171 }
10172}
10173
c2f47e15 10174/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10175 to the call.
4ee9c684 10176
c2f47e15 10177 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10178 simplified form of the call as a tree.
10179
10180 The simplified form may be a constant or other expression which
10181 computes the same value, but in a more efficient manner (including
10182 calls to other builtin functions).
10183
10184 The call may contain arguments which need to be evaluated, but
10185 which are not useful to determine the result of the call. In
10186 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10187 COMPOUND_EXPR will be an argument which must be evaluated.
10188 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10189 COMPOUND_EXPR in the chain will contain the tree for the simplified
10190 form of the builtin function call. */
10191
10192static tree
389dd41b 10193fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 10194{
c2f47e15 10195 if (!validate_arg (s1, POINTER_TYPE)
10196 || !validate_arg (s2, POINTER_TYPE))
10197 return NULL_TREE;
4ee9c684 10198 else
10199 {
c2f47e15 10200 /* If the first argument is "", return NULL_TREE. */
b5e46e2c 10201 const char *p1 = c_getstr (s1);
4ee9c684 10202 if (p1 && *p1 == '\0')
10203 {
10204 /* Evaluate and ignore argument s2 in case it has
10205 side-effects. */
389dd41b 10206 return omit_one_operand_loc (loc, size_type_node,
39761420 10207 size_zero_node, s2);
4ee9c684 10208 }
10209
10210 /* If the second argument is "", return __builtin_strlen(s1). */
b5e46e2c 10211 const char *p2 = c_getstr (s2);
4ee9c684 10212 if (p2 && *p2 == '\0')
10213 {
b9a16870 10214 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 10215
10216 /* If the replacement _DECL isn't initialized, don't do the
10217 transformation. */
10218 if (!fn)
c2f47e15 10219 return NULL_TREE;
4ee9c684 10220
389dd41b 10221 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 10222 }
c2f47e15 10223 return NULL_TREE;
4ee9c684 10224 }
10225}
10226
c2f47e15 10227/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 10228 produced. False otherwise. This is done so that we don't output the error
10229 or warning twice or three times. */
75a70cf9 10230
743b0c6a 10231bool
c2f47e15 10232fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 10233{
10234 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 10235 int nargs = call_expr_nargs (exp);
10236 tree arg;
d98fd4a4 10237 /* There is good chance the current input_location points inside the
10238 definition of the va_start macro (perhaps on the token for
10239 builtin) in a system header, so warnings will not be emitted.
10240 Use the location in real source code. */
be1e7283 10241 location_t current_location =
d98fd4a4 10242 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10243 NULL);
4ee9c684 10244
257d99c3 10245 if (!stdarg_p (fntype))
743b0c6a 10246 {
85b9be9b 10247 error ("%<va_start%> used in function with fixed arguments");
743b0c6a 10248 return true;
10249 }
c2f47e15 10250
10251 if (va_start_p)
79012a9d 10252 {
c2f47e15 10253 if (va_start_p && (nargs != 2))
10254 {
10255 error ("wrong number of arguments to function %<va_start%>");
10256 return true;
10257 }
10258 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 10259 }
10260 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10261 when we checked the arguments and if needed issued a warning. */
c2f47e15 10262 else
4ee9c684 10263 {
c2f47e15 10264 if (nargs == 0)
10265 {
10266 /* Evidently an out of date version of <stdarg.h>; can't validate
10267 va_start's second argument, but can still work as intended. */
d98fd4a4 10268 warning_at (current_location,
7edb1062 10269 OPT_Wvarargs,
10270 "%<__builtin_next_arg%> called without an argument");
c2f47e15 10271 return true;
10272 }
10273 else if (nargs > 1)
a0c938f0 10274 {
c2f47e15 10275 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 10276 return true;
10277 }
c2f47e15 10278 arg = CALL_EXPR_ARG (exp, 0);
10279 }
10280
a8dd994c 10281 if (TREE_CODE (arg) == SSA_NAME)
10282 arg = SSA_NAME_VAR (arg);
10283
c2f47e15 10284 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 10285 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 10286 the arguments and if needed issuing a warning. */
10287 if (!integer_zerop (arg))
10288 {
10289 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 10290
4ee9c684 10291 /* Strip off all nops for the sake of the comparison. This
10292 is not quite the same as STRIP_NOPS. It does more.
10293 We must also strip off INDIRECT_EXPR for C++ reference
10294 parameters. */
72dd6141 10295 while (CONVERT_EXPR_P (arg)
4ee9c684 10296 || TREE_CODE (arg) == INDIRECT_REF)
10297 arg = TREE_OPERAND (arg, 0);
10298 if (arg != last_parm)
a0c938f0 10299 {
b08cf617 10300 /* FIXME: Sometimes with the tree optimizers we can get the
10301 not the last argument even though the user used the last
10302 argument. We just warn and set the arg to be the last
10303 argument so that we will get wrong-code because of
10304 it. */
d98fd4a4 10305 warning_at (current_location,
7edb1062 10306 OPT_Wvarargs,
d98fd4a4 10307 "second parameter of %<va_start%> not last named argument");
743b0c6a 10308 }
24158ad7 10309
10310 /* Undefined by C99 7.15.1.4p4 (va_start):
10311 "If the parameter parmN is declared with the register storage
10312 class, with a function or array type, or with a type that is
10313 not compatible with the type that results after application of
10314 the default argument promotions, the behavior is undefined."
10315 */
10316 else if (DECL_REGISTER (arg))
d98fd4a4 10317 {
10318 warning_at (current_location,
7edb1062 10319 OPT_Wvarargs,
67cf9b55 10320 "undefined behavior when second parameter of "
d98fd4a4 10321 "%<va_start%> is declared with %<register%> storage");
10322 }
24158ad7 10323
79012a9d 10324 /* We want to verify the second parameter just once before the tree
a0c938f0 10325 optimizers are run and then avoid keeping it in the tree,
10326 as otherwise we could warn even for correct code like:
10327 void foo (int i, ...)
10328 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 10329 if (va_start_p)
10330 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10331 else
10332 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 10333 }
10334 return false;
4ee9c684 10335}
10336
10337
c2f47e15 10338/* Expand a call EXP to __builtin_object_size. */
0a39fd54 10339
f7715905 10340static rtx
0a39fd54 10341expand_builtin_object_size (tree exp)
10342{
10343 tree ost;
10344 int object_size_type;
10345 tree fndecl = get_callee_fndecl (exp);
0a39fd54 10346
c2f47e15 10347 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 10348 {
8c41abe8 10349 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
b8c23db3 10350 exp, fndecl);
0a39fd54 10351 expand_builtin_trap ();
10352 return const0_rtx;
10353 }
10354
c2f47e15 10355 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 10356 STRIP_NOPS (ost);
10357
10358 if (TREE_CODE (ost) != INTEGER_CST
10359 || tree_int_cst_sgn (ost) < 0
10360 || compare_tree_int (ost, 3) > 0)
10361 {
8c41abe8 10362 error ("%Klast argument of %qD is not integer constant between 0 and 3",
b8c23db3 10363 exp, fndecl);
0a39fd54 10364 expand_builtin_trap ();
10365 return const0_rtx;
10366 }
10367
e913b5cd 10368 object_size_type = tree_to_shwi (ost);
0a39fd54 10369
10370 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10371}
10372
10373/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10374 FCODE is the BUILT_IN_* to use.
c2f47e15 10375 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 10376 otherwise try to get the result in TARGET, if convenient (and in
10377 mode MODE if that's convenient). */
10378
10379static rtx
3754d046 10380expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 10381 enum built_in_function fcode)
10382{
c2f47e15 10383 if (!validate_arglist (exp,
0a39fd54 10384 POINTER_TYPE,
10385 fcode == BUILT_IN_MEMSET_CHK
10386 ? INTEGER_TYPE : POINTER_TYPE,
10387 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 10388 return NULL_RTX;
0a39fd54 10389
e6a18b5a 10390 tree dest = CALL_EXPR_ARG (exp, 0);
10391 tree src = CALL_EXPR_ARG (exp, 1);
10392 tree len = CALL_EXPR_ARG (exp, 2);
10393 tree size = CALL_EXPR_ARG (exp, 3);
0a39fd54 10394
e6a18b5a 10395 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10396 /*str=*/NULL_TREE, size);
5aef8938 10397
10398 if (!tree_fits_uhwi_p (size))
c2f47e15 10399 return NULL_RTX;
0a39fd54 10400
e913b5cd 10401 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 10402 {
5aef8938 10403 /* Avoid transforming the checking call to an ordinary one when
10404 an overflow has been detected or when the call couldn't be
10405 validated because the size is not constant. */
10406 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10407 return NULL_RTX;
0a39fd54 10408
5aef8938 10409 tree fn = NULL_TREE;
0a39fd54 10410 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10411 mem{cpy,pcpy,move,set} is available. */
10412 switch (fcode)
10413 {
10414 case BUILT_IN_MEMCPY_CHK:
b9a16870 10415 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 10416 break;
10417 case BUILT_IN_MEMPCPY_CHK:
b9a16870 10418 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 10419 break;
10420 case BUILT_IN_MEMMOVE_CHK:
b9a16870 10421 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 10422 break;
10423 case BUILT_IN_MEMSET_CHK:
b9a16870 10424 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 10425 break;
10426 default:
10427 break;
10428 }
10429
10430 if (! fn)
c2f47e15 10431 return NULL_RTX;
0a39fd54 10432
0568e9c1 10433 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 10434 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10435 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 10436 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10437 }
10438 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 10439 return NULL_RTX;
0a39fd54 10440 else
10441 {
957d0361 10442 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 10443
10444 /* If DEST is not a pointer type, call the normal function. */
10445 if (dest_align == 0)
c2f47e15 10446 return NULL_RTX;
0a39fd54 10447
10448 /* If SRC and DEST are the same (and not volatile), do nothing. */
10449 if (operand_equal_p (src, dest, 0))
10450 {
10451 tree expr;
10452
10453 if (fcode != BUILT_IN_MEMPCPY_CHK)
10454 {
10455 /* Evaluate and ignore LEN in case it has side-effects. */
10456 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10457 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10458 }
10459
2cc66f2a 10460 expr = fold_build_pointer_plus (dest, len);
0a39fd54 10461 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10462 }
10463
10464 /* __memmove_chk special case. */
10465 if (fcode == BUILT_IN_MEMMOVE_CHK)
10466 {
957d0361 10467 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 10468
10469 if (src_align == 0)
c2f47e15 10470 return NULL_RTX;
0a39fd54 10471
10472 /* If src is categorized for a readonly section we can use
10473 normal __memcpy_chk. */
10474 if (readonly_data_expr (src))
10475 {
b9a16870 10476 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 10477 if (!fn)
c2f47e15 10478 return NULL_RTX;
0568e9c1 10479 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10480 dest, src, len, size);
a65c4d64 10481 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10482 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 10483 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10484 }
10485 }
c2f47e15 10486 return NULL_RTX;
0a39fd54 10487 }
10488}
10489
10490/* Emit warning if a buffer overflow is detected at compile time. */
10491
10492static void
10493maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10494{
5aef8938 10495 /* The source string. */
10496 tree srcstr = NULL_TREE;
10497 /* The size of the destination object. */
10498 tree objsize = NULL_TREE;
10499 /* The string that is being concatenated with (as in __strcat_chk)
10500 or null if it isn't. */
10501 tree catstr = NULL_TREE;
10502 /* The maximum length of the source sequence in a bounded operation
10503 (such as __strncat_chk) or null if the operation isn't bounded
10504 (such as __strcat_chk). */
e6a18b5a 10505 tree maxread = NULL_TREE;
f3969b49 10506 /* The exact size of the access (such as in __strncpy_chk). */
10507 tree size = NULL_TREE;
0a39fd54 10508
10509 switch (fcode)
10510 {
10511 case BUILT_IN_STRCPY_CHK:
10512 case BUILT_IN_STPCPY_CHK:
5aef8938 10513 srcstr = CALL_EXPR_ARG (exp, 1);
10514 objsize = CALL_EXPR_ARG (exp, 2);
10515 break;
10516
0a39fd54 10517 case BUILT_IN_STRCAT_CHK:
5aef8938 10518 /* For __strcat_chk the warning will be emitted only if overflowing
10519 by at least strlen (dest) + 1 bytes. */
10520 catstr = CALL_EXPR_ARG (exp, 0);
10521 srcstr = CALL_EXPR_ARG (exp, 1);
10522 objsize = CALL_EXPR_ARG (exp, 2);
0a39fd54 10523 break;
5aef8938 10524
b356dfef 10525 case BUILT_IN_STRNCAT_CHK:
5aef8938 10526 catstr = CALL_EXPR_ARG (exp, 0);
10527 srcstr = CALL_EXPR_ARG (exp, 1);
e6a18b5a 10528 maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 10529 objsize = CALL_EXPR_ARG (exp, 3);
10530 break;
10531
0a39fd54 10532 case BUILT_IN_STRNCPY_CHK:
1063acde 10533 case BUILT_IN_STPNCPY_CHK:
5aef8938 10534 srcstr = CALL_EXPR_ARG (exp, 1);
f3969b49 10535 size = CALL_EXPR_ARG (exp, 2);
5aef8938 10536 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 10537 break;
5aef8938 10538
0a39fd54 10539 case BUILT_IN_SNPRINTF_CHK:
10540 case BUILT_IN_VSNPRINTF_CHK:
e6a18b5a 10541 maxread = CALL_EXPR_ARG (exp, 1);
5aef8938 10542 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 10543 break;
10544 default:
10545 gcc_unreachable ();
10546 }
10547
e6a18b5a 10548 if (catstr && maxread)
0a39fd54 10549 {
5aef8938 10550 /* Check __strncat_chk. There is no way to determine the length
10551 of the string to which the source string is being appended so
10552 just warn when the length of the source string is not known. */
8d6c6ef5 10553 check_strncat_sizes (exp, objsize);
10554 return;
0a39fd54 10555 }
0a39fd54 10556
e6a18b5a 10557 /* The destination argument is the first one for all built-ins above. */
10558 tree dst = CALL_EXPR_ARG (exp, 0);
10559
10560 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
0a39fd54 10561}
10562
10563/* Emit warning if a buffer overflow is detected at compile time
10564 in __sprintf_chk/__vsprintf_chk calls. */
10565
10566static void
10567maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10568{
1e4adcfc 10569 tree size, len, fmt;
0a39fd54 10570 const char *fmt_str;
c2f47e15 10571 int nargs = call_expr_nargs (exp);
0a39fd54 10572
10573 /* Verify the required arguments in the original call. */
48e1416a 10574
c2f47e15 10575 if (nargs < 4)
0a39fd54 10576 return;
c2f47e15 10577 size = CALL_EXPR_ARG (exp, 2);
10578 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 10579
e913b5cd 10580 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 10581 return;
10582
10583 /* Check whether the format is a literal string constant. */
10584 fmt_str = c_getstr (fmt);
10585 if (fmt_str == NULL)
10586 return;
10587
d4473c84 10588 if (!init_target_chars ())
99eabcc1 10589 return;
10590
0a39fd54 10591 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 10592 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 10593 len = build_int_cstu (size_type_node, strlen (fmt_str));
10594 /* If the format is "%s" and first ... argument is a string literal,
10595 we know it too. */
c2f47e15 10596 else if (fcode == BUILT_IN_SPRINTF_CHK
10597 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 10598 {
10599 tree arg;
10600
c2f47e15 10601 if (nargs < 5)
0a39fd54 10602 return;
c2f47e15 10603 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 10604 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10605 return;
10606
10607 len = c_strlen (arg, 1);
e913b5cd 10608 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 10609 return;
10610 }
10611 else
10612 return;
10613
5aef8938 10614 /* Add one for the terminating nul. */
10615 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
e6a18b5a 10616
10617 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10618 /*maxread=*/NULL_TREE, len, size);
0a39fd54 10619}
10620
2c281b15 10621/* Emit warning if a free is called with address of a variable. */
10622
10623static void
10624maybe_emit_free_warning (tree exp)
10625{
06229fe5 10626 if (call_expr_nargs (exp) != 1)
10627 return;
10628
2c281b15 10629 tree arg = CALL_EXPR_ARG (exp, 0);
10630
10631 STRIP_NOPS (arg);
10632 if (TREE_CODE (arg) != ADDR_EXPR)
10633 return;
10634
10635 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 10636 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 10637 return;
10638
10639 if (SSA_VAR_P (arg))
f74ea1c2 10640 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10641 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 10642 else
f74ea1c2 10643 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10644 "%Kattempt to free a non-heap object", exp);
2c281b15 10645}
10646
c2f47e15 10647/* Fold a call to __builtin_object_size with arguments PTR and OST,
10648 if possible. */
0a39fd54 10649
f7715905 10650static tree
c2f47e15 10651fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 10652{
a6caa15f 10653 unsigned HOST_WIDE_INT bytes;
0a39fd54 10654 int object_size_type;
10655
c2f47e15 10656 if (!validate_arg (ptr, POINTER_TYPE)
10657 || !validate_arg (ost, INTEGER_TYPE))
10658 return NULL_TREE;
0a39fd54 10659
0a39fd54 10660 STRIP_NOPS (ost);
10661
10662 if (TREE_CODE (ost) != INTEGER_CST
10663 || tree_int_cst_sgn (ost) < 0
10664 || compare_tree_int (ost, 3) > 0)
c2f47e15 10665 return NULL_TREE;
0a39fd54 10666
e913b5cd 10667 object_size_type = tree_to_shwi (ost);
0a39fd54 10668
10669 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10670 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10671 and (size_t) 0 for types 2 and 3. */
10672 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 10673 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 10674
10675 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 10676 {
4e91a07b 10677 compute_builtin_object_size (ptr, object_size_type, &bytes);
6da74b21 10678 if (wi::fits_to_tree_p (bytes, size_type_node))
10679 return build_int_cstu (size_type_node, bytes);
a6caa15f 10680 }
0a39fd54 10681 else if (TREE_CODE (ptr) == SSA_NAME)
10682 {
0a39fd54 10683 /* If object size is not known yet, delay folding until
10684 later. Maybe subsequent passes will help determining
10685 it. */
4e91a07b 10686 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10687 && wi::fits_to_tree_p (bytes, size_type_node))
6da74b21 10688 return build_int_cstu (size_type_node, bytes);
0a39fd54 10689 }
10690
a6caa15f 10691 return NULL_TREE;
0a39fd54 10692}
10693
12f08300 10694/* Builtins with folding operations that operate on "..." arguments
10695 need special handling; we need to store the arguments in a convenient
10696 data structure before attempting any folding. Fortunately there are
10697 only a few builtins that fall into this category. FNDECL is the
10698 function, EXP is the CALL_EXPR for the call. */
10699
10700static tree
10701fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10702{
10703 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10704 tree ret = NULL_TREE;
10705
10706 switch (fcode)
10707 {
10708 case BUILT_IN_FPCLASSIFY:
10709 ret = fold_builtin_fpclassify (loc, args, nargs);
10710 break;
10711
10712 default:
10713 break;
10714 }
10715 if (ret)
10716 {
10717 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10718 SET_EXPR_LOCATION (ret, loc);
10719 TREE_NO_WARNING (ret) = 1;
10720 return ret;
10721 }
10722 return NULL_TREE;
10723}
10724
99eabcc1 10725/* Initialize format string characters in the target charset. */
10726
b9ea678c 10727bool
99eabcc1 10728init_target_chars (void)
10729{
10730 static bool init;
10731 if (!init)
10732 {
10733 target_newline = lang_hooks.to_target_charset ('\n');
10734 target_percent = lang_hooks.to_target_charset ('%');
10735 target_c = lang_hooks.to_target_charset ('c');
10736 target_s = lang_hooks.to_target_charset ('s');
10737 if (target_newline == 0 || target_percent == 0 || target_c == 0
10738 || target_s == 0)
10739 return false;
10740
10741 target_percent_c[0] = target_percent;
10742 target_percent_c[1] = target_c;
10743 target_percent_c[2] = '\0';
10744
10745 target_percent_s[0] = target_percent;
10746 target_percent_s[1] = target_s;
10747 target_percent_s[2] = '\0';
10748
10749 target_percent_s_newline[0] = target_percent;
10750 target_percent_s_newline[1] = target_s;
10751 target_percent_s_newline[2] = target_newline;
10752 target_percent_s_newline[3] = '\0';
a0c938f0 10753
99eabcc1 10754 init = true;
10755 }
10756 return true;
10757}
bffb7645 10758
f0c477f2 10759/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10760 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 10761 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 10762 function assumes that you cleared the MPFR flags and then
10763 calculated M to see if anything subsequently set a flag prior to
10764 entering this function. Return NULL_TREE if any checks fail. */
10765
10766static tree
d4473c84 10767do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 10768{
10769 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10770 overflow/underflow occurred. If -frounding-math, proceed iff the
10771 result of calling FUNC was exact. */
d4473c84 10772 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 10773 && (!flag_rounding_math || !inexact))
10774 {
10775 REAL_VALUE_TYPE rr;
10776
66fa16e6 10777 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 10778 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10779 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10780 but the mpft_t is not, then we underflowed in the
10781 conversion. */
776a7bab 10782 if (real_isfinite (&rr)
f0c477f2 10783 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10784 {
10785 REAL_VALUE_TYPE rmode;
10786
10787 real_convert (&rmode, TYPE_MODE (type), &rr);
10788 /* Proceed iff the specified mode can hold the value. */
10789 if (real_identical (&rmode, &rr))
10790 return build_real (type, rmode);
10791 }
10792 }
10793 return NULL_TREE;
10794}
10795
239d491a 10796/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10797 number and no overflow/underflow occurred. INEXACT is true if M
10798 was not exactly calculated. TYPE is the tree type for the result.
10799 This function assumes that you cleared the MPFR flags and then
10800 calculated M to see if anything subsequently set a flag prior to
652d9409 10801 entering this function. Return NULL_TREE if any checks fail, if
10802 FORCE_CONVERT is true, then bypass the checks. */
239d491a 10803
10804static tree
652d9409 10805do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 10806{
10807 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10808 overflow/underflow occurred. If -frounding-math, proceed iff the
10809 result of calling FUNC was exact. */
652d9409 10810 if (force_convert
10811 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10812 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10813 && (!flag_rounding_math || !inexact)))
239d491a 10814 {
10815 REAL_VALUE_TYPE re, im;
10816
b0e7c4d4 10817 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10818 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 10819 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10820 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10821 but the mpft_t is not, then we underflowed in the
10822 conversion. */
652d9409 10823 if (force_convert
10824 || (real_isfinite (&re) && real_isfinite (&im)
10825 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10826 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 10827 {
10828 REAL_VALUE_TYPE re_mode, im_mode;
10829
10830 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10831 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10832 /* Proceed iff the specified mode can hold the value. */
652d9409 10833 if (force_convert
10834 || (real_identical (&re_mode, &re)
10835 && real_identical (&im_mode, &im)))
239d491a 10836 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10837 build_real (TREE_TYPE (type), im_mode));
10838 }
10839 }
10840 return NULL_TREE;
10841}
239d491a 10842
e5407ca6 10843/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10844 the pointer *(ARG_QUO) and return the result. The type is taken
10845 from the type of ARG0 and is used for setting the precision of the
10846 calculation and results. */
10847
10848static tree
10849do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10850{
10851 tree const type = TREE_TYPE (arg0);
10852 tree result = NULL_TREE;
48e1416a 10853
e5407ca6 10854 STRIP_NOPS (arg0);
10855 STRIP_NOPS (arg1);
48e1416a 10856
e5407ca6 10857 /* To proceed, MPFR must exactly represent the target floating point
10858 format, which only happens when the target base equals two. */
10859 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10860 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10861 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10862 {
10863 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10864 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10865
776a7bab 10866 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 10867 {
e2eb2b7f 10868 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10869 const int prec = fmt->p;
10870 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 10871 tree result_rem;
10872 long integer_quo;
10873 mpfr_t m0, m1;
10874
10875 mpfr_inits2 (prec, m0, m1, NULL);
10876 mpfr_from_real (m0, ra0, GMP_RNDN);
10877 mpfr_from_real (m1, ra1, GMP_RNDN);
10878 mpfr_clear_flags ();
e2eb2b7f 10879 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 10880 /* Remquo is independent of the rounding mode, so pass
10881 inexact=0 to do_mpfr_ckconv(). */
10882 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10883 mpfr_clears (m0, m1, NULL);
10884 if (result_rem)
10885 {
10886 /* MPFR calculates quo in the host's long so it may
10887 return more bits in quo than the target int can hold
10888 if sizeof(host long) > sizeof(target int). This can
10889 happen even for native compilers in LP64 mode. In
10890 these cases, modulo the quo value with the largest
10891 number that the target int can hold while leaving one
10892 bit for the sign. */
10893 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10894 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10895
10896 /* Dereference the quo pointer argument. */
10897 arg_quo = build_fold_indirect_ref (arg_quo);
10898 /* Proceed iff a valid pointer type was passed in. */
10899 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10900 {
10901 /* Set the value. */
7002a1c8 10902 tree result_quo
10903 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10904 build_int_cst (TREE_TYPE (arg_quo),
10905 integer_quo));
e5407ca6 10906 TREE_SIDE_EFFECTS (result_quo) = 1;
10907 /* Combine the quo assignment with the rem. */
10908 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10909 result_quo, result_rem));
10910 }
10911 }
10912 }
10913 }
10914 return result;
10915}
e84da7c1 10916
10917/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10918 resulting value as a tree with type TYPE. The mpfr precision is
10919 set to the precision of TYPE. We assume that this mpfr function
10920 returns zero if the result could be calculated exactly within the
10921 requested precision. In addition, the integer pointer represented
10922 by ARG_SG will be dereferenced and set to the appropriate signgam
10923 (-1,1) value. */
10924
10925static tree
10926do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10927{
10928 tree result = NULL_TREE;
10929
10930 STRIP_NOPS (arg);
48e1416a 10931
e84da7c1 10932 /* To proceed, MPFR must exactly represent the target floating point
10933 format, which only happens when the target base equals two. Also
10934 verify ARG is a constant and that ARG_SG is an int pointer. */
10935 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10936 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10937 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10938 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10939 {
10940 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10941
10942 /* In addition to NaN and Inf, the argument cannot be zero or a
10943 negative integer. */
776a7bab 10944 if (real_isfinite (ra)
e84da7c1 10945 && ra->cl != rvc_zero
9af5ce0c 10946 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 10947 {
e2eb2b7f 10948 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10949 const int prec = fmt->p;
10950 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 10951 int inexact, sg;
10952 mpfr_t m;
10953 tree result_lg;
10954
10955 mpfr_init2 (m, prec);
10956 mpfr_from_real (m, ra, GMP_RNDN);
10957 mpfr_clear_flags ();
e2eb2b7f 10958 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 10959 result_lg = do_mpfr_ckconv (m, type, inexact);
10960 mpfr_clear (m);
10961 if (result_lg)
10962 {
10963 tree result_sg;
10964
10965 /* Dereference the arg_sg pointer argument. */
10966 arg_sg = build_fold_indirect_ref (arg_sg);
10967 /* Assign the signgam value into *arg_sg. */
10968 result_sg = fold_build2 (MODIFY_EXPR,
10969 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 10970 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 10971 TREE_SIDE_EFFECTS (result_sg) = 1;
10972 /* Combine the signgam assignment with the lgamma result. */
10973 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10974 result_sg, result_lg));
10975 }
10976 }
10977 }
10978
10979 return result;
10980}
75a70cf9 10981
c699fab8 10982/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10983 mpc function FUNC on it and return the resulting value as a tree
10984 with type TYPE. The mpfr precision is set to the precision of
10985 TYPE. We assume that function FUNC returns zero if the result
652d9409 10986 could be calculated exactly within the requested precision. If
10987 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10988 in the arguments and/or results. */
c699fab8 10989
63e89698 10990tree
652d9409 10991do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 10992 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10993{
10994 tree result = NULL_TREE;
48e1416a 10995
c699fab8 10996 STRIP_NOPS (arg0);
10997 STRIP_NOPS (arg1);
10998
10999 /* To proceed, MPFR must exactly represent the target floating point
11000 format, which only happens when the target base equals two. */
11001 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11002 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11003 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11004 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11005 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11006 {
11007 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11008 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11009 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11010 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11011
652d9409 11012 if (do_nonfinite
11013 || (real_isfinite (re0) && real_isfinite (im0)
11014 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 11015 {
11016 const struct real_format *const fmt =
11017 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11018 const int prec = fmt->p;
11019 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11020 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11021 int inexact;
11022 mpc_t m0, m1;
48e1416a 11023
c699fab8 11024 mpc_init2 (m0, prec);
11025 mpc_init2 (m1, prec);
9af5ce0c 11026 mpfr_from_real (mpc_realref (m0), re0, rnd);
11027 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11028 mpfr_from_real (mpc_realref (m1), re1, rnd);
11029 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 11030 mpfr_clear_flags ();
11031 inexact = func (m0, m0, m1, crnd);
652d9409 11032 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 11033 mpc_clear (m0);
11034 mpc_clear (m1);
11035 }
11036 }
11037
11038 return result;
11039}
239d491a 11040
75a70cf9 11041/* A wrapper function for builtin folding that prevents warnings for
11042 "statement without effect" and the like, caused by removing the
11043 call node earlier than the warning is generated. */
11044
11045tree
1a91d914 11046fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 11047{
11048 tree ret = NULL_TREE;
11049 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 11050 location_t loc = gimple_location (stmt);
a0e9bfbb 11051 if (fndecl && fndecl_built_in_p (fndecl)
75a70cf9 11052 && !gimple_call_va_arg_pack_p (stmt))
11053 {
11054 int nargs = gimple_call_num_args (stmt);
9845fb99 11055 tree *args = (nargs > 0
11056 ? gimple_call_arg_ptr (stmt, 0)
11057 : &error_mark_node);
75a70cf9 11058
198622c0 11059 if (avoid_folding_inline_builtin (fndecl))
11060 return NULL_TREE;
75a70cf9 11061 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11062 {
9845fb99 11063 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 11064 }
11065 else
11066 {
9d884767 11067 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 11068 if (ret)
11069 {
11070 /* Propagate location information from original call to
11071 expansion of builtin. Otherwise things like
11072 maybe_emit_chk_warning, that operate on the expansion
11073 of a builtin, will use the wrong location information. */
11074 if (gimple_has_location (stmt))
11075 {
11076 tree realret = ret;
11077 if (TREE_CODE (ret) == NOP_EXPR)
11078 realret = TREE_OPERAND (ret, 0);
11079 if (CAN_HAVE_LOCATION_P (realret)
11080 && !EXPR_HAS_LOCATION (realret))
389dd41b 11081 SET_EXPR_LOCATION (realret, loc);
75a70cf9 11082 return realret;
11083 }
11084 return ret;
11085 }
11086 }
11087 }
11088 return NULL_TREE;
11089}
7bfefa9d 11090
b9a16870 11091/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 11092 and set ASMSPEC as its user assembler name. DECL must be a
11093 function decl that declares a builtin. */
11094
11095void
11096set_builtin_user_assembler_name (tree decl, const char *asmspec)
11097{
a0e9bfbb 11098 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
7bfefa9d 11099 && asmspec != 0);
11100
61ffc71a 11101 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 11102 set_user_assembler_name (builtin, asmspec);
61ffc71a 11103
11104 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11105 && INT_TYPE_SIZE < BITS_PER_WORD)
7bfefa9d 11106 {
44504d18 11107 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
61ffc71a 11108 set_user_assembler_libfunc ("ffs", asmspec);
44504d18 11109 set_optab_libfunc (ffs_optab, mode, "ffs");
7bfefa9d 11110 }
11111}
a6b74a67 11112
11113/* Return true if DECL is a builtin that expands to a constant or similarly
11114 simple code. */
11115bool
11116is_simple_builtin (tree decl)
11117{
a0e9bfbb 11118 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
a6b74a67 11119 switch (DECL_FUNCTION_CODE (decl))
11120 {
11121 /* Builtins that expand to constants. */
11122 case BUILT_IN_CONSTANT_P:
11123 case BUILT_IN_EXPECT:
11124 case BUILT_IN_OBJECT_SIZE:
11125 case BUILT_IN_UNREACHABLE:
11126 /* Simple register moves or loads from stack. */
fca0886c 11127 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 11128 case BUILT_IN_RETURN_ADDRESS:
11129 case BUILT_IN_EXTRACT_RETURN_ADDR:
11130 case BUILT_IN_FROB_RETURN_ADDR:
11131 case BUILT_IN_RETURN:
11132 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11133 case BUILT_IN_FRAME_ADDRESS:
11134 case BUILT_IN_VA_END:
11135 case BUILT_IN_STACK_SAVE:
11136 case BUILT_IN_STACK_RESTORE:
11137 /* Exception state returns or moves registers around. */
11138 case BUILT_IN_EH_FILTER:
11139 case BUILT_IN_EH_POINTER:
11140 case BUILT_IN_EH_COPY_VALUES:
11141 return true;
11142
11143 default:
11144 return false;
11145 }
11146
11147 return false;
11148}
11149
11150/* Return true if DECL is a builtin that is not expensive, i.e., they are
11151 most probably expanded inline into reasonably simple code. This is a
11152 superset of is_simple_builtin. */
11153bool
11154is_inexpensive_builtin (tree decl)
11155{
11156 if (!decl)
11157 return false;
11158 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11159 return true;
11160 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11161 switch (DECL_FUNCTION_CODE (decl))
11162 {
11163 case BUILT_IN_ABS:
2b34677f 11164 CASE_BUILT_IN_ALLOCA:
74bdbe96 11165 case BUILT_IN_BSWAP16:
a6b74a67 11166 case BUILT_IN_BSWAP32:
11167 case BUILT_IN_BSWAP64:
11168 case BUILT_IN_CLZ:
11169 case BUILT_IN_CLZIMAX:
11170 case BUILT_IN_CLZL:
11171 case BUILT_IN_CLZLL:
11172 case BUILT_IN_CTZ:
11173 case BUILT_IN_CTZIMAX:
11174 case BUILT_IN_CTZL:
11175 case BUILT_IN_CTZLL:
11176 case BUILT_IN_FFS:
11177 case BUILT_IN_FFSIMAX:
11178 case BUILT_IN_FFSL:
11179 case BUILT_IN_FFSLL:
11180 case BUILT_IN_IMAXABS:
11181 case BUILT_IN_FINITE:
11182 case BUILT_IN_FINITEF:
11183 case BUILT_IN_FINITEL:
11184 case BUILT_IN_FINITED32:
11185 case BUILT_IN_FINITED64:
11186 case BUILT_IN_FINITED128:
11187 case BUILT_IN_FPCLASSIFY:
11188 case BUILT_IN_ISFINITE:
11189 case BUILT_IN_ISINF_SIGN:
11190 case BUILT_IN_ISINF:
11191 case BUILT_IN_ISINFF:
11192 case BUILT_IN_ISINFL:
11193 case BUILT_IN_ISINFD32:
11194 case BUILT_IN_ISINFD64:
11195 case BUILT_IN_ISINFD128:
11196 case BUILT_IN_ISNAN:
11197 case BUILT_IN_ISNANF:
11198 case BUILT_IN_ISNANL:
11199 case BUILT_IN_ISNAND32:
11200 case BUILT_IN_ISNAND64:
11201 case BUILT_IN_ISNAND128:
11202 case BUILT_IN_ISNORMAL:
11203 case BUILT_IN_ISGREATER:
11204 case BUILT_IN_ISGREATEREQUAL:
11205 case BUILT_IN_ISLESS:
11206 case BUILT_IN_ISLESSEQUAL:
11207 case BUILT_IN_ISLESSGREATER:
11208 case BUILT_IN_ISUNORDERED:
11209 case BUILT_IN_VA_ARG_PACK:
11210 case BUILT_IN_VA_ARG_PACK_LEN:
11211 case BUILT_IN_VA_COPY:
11212 case BUILT_IN_TRAP:
11213 case BUILT_IN_SAVEREGS:
11214 case BUILT_IN_POPCOUNTL:
11215 case BUILT_IN_POPCOUNTLL:
11216 case BUILT_IN_POPCOUNTIMAX:
11217 case BUILT_IN_POPCOUNT:
11218 case BUILT_IN_PARITYL:
11219 case BUILT_IN_PARITYLL:
11220 case BUILT_IN_PARITYIMAX:
11221 case BUILT_IN_PARITY:
11222 case BUILT_IN_LABS:
11223 case BUILT_IN_LLABS:
11224 case BUILT_IN_PREFETCH:
ca4c3545 11225 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 11226 return true;
11227
11228 default:
11229 return is_simple_builtin (decl);
11230 }
11231
11232 return false;
11233}
507a998e 11234
11235/* Return true if T is a constant and the value cast to a target char
11236 can be represented by a host char.
11237 Store the casted char constant in *P if so. */
11238
11239bool
11240target_char_cst_p (tree t, char *p)
11241{
11242 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11243 return false;
11244
11245 *p = (char)tree_to_uhwi (t);
11246 return true;
11247}
6cd252e8 11248
11249/* Return true if the builtin DECL is implemented in a standard library.
11250 Otherwise returns false which doesn't guarantee it is not (thus the list of
11251 handled builtins below may be incomplete). */
11252
11253bool
11254builtin_with_linkage_p (tree decl)
11255{
11256 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11257 switch (DECL_FUNCTION_CODE (decl))
11258 {
11259 CASE_FLT_FN (BUILT_IN_ACOS):
11260 CASE_FLT_FN (BUILT_IN_ACOSH):
11261 CASE_FLT_FN (BUILT_IN_ASIN):
11262 CASE_FLT_FN (BUILT_IN_ASINH):
11263 CASE_FLT_FN (BUILT_IN_ATAN):
11264 CASE_FLT_FN (BUILT_IN_ATANH):
11265 CASE_FLT_FN (BUILT_IN_ATAN2):
11266 CASE_FLT_FN (BUILT_IN_CBRT):
11267 CASE_FLT_FN (BUILT_IN_CEIL):
11268 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11269 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11270 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11271 CASE_FLT_FN (BUILT_IN_COS):
11272 CASE_FLT_FN (BUILT_IN_COSH):
11273 CASE_FLT_FN (BUILT_IN_ERF):
11274 CASE_FLT_FN (BUILT_IN_ERFC):
11275 CASE_FLT_FN (BUILT_IN_EXP):
11276 CASE_FLT_FN (BUILT_IN_EXP2):
11277 CASE_FLT_FN (BUILT_IN_EXPM1):
11278 CASE_FLT_FN (BUILT_IN_FABS):
11279 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11280 CASE_FLT_FN (BUILT_IN_FDIM):
11281 CASE_FLT_FN (BUILT_IN_FLOOR):
11282 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11283 CASE_FLT_FN (BUILT_IN_FMA):
11284 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11285 CASE_FLT_FN (BUILT_IN_FMAX):
11286 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11287 CASE_FLT_FN (BUILT_IN_FMIN):
11288 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11289 CASE_FLT_FN (BUILT_IN_FMOD):
11290 CASE_FLT_FN (BUILT_IN_FREXP):
11291 CASE_FLT_FN (BUILT_IN_HYPOT):
11292 CASE_FLT_FN (BUILT_IN_ILOGB):
11293 CASE_FLT_FN (BUILT_IN_LDEXP):
11294 CASE_FLT_FN (BUILT_IN_LGAMMA):
11295 CASE_FLT_FN (BUILT_IN_LLRINT):
11296 CASE_FLT_FN (BUILT_IN_LLROUND):
11297 CASE_FLT_FN (BUILT_IN_LOG):
11298 CASE_FLT_FN (BUILT_IN_LOG10):
11299 CASE_FLT_FN (BUILT_IN_LOG1P):
11300 CASE_FLT_FN (BUILT_IN_LOG2):
11301 CASE_FLT_FN (BUILT_IN_LOGB):
11302 CASE_FLT_FN (BUILT_IN_LRINT):
11303 CASE_FLT_FN (BUILT_IN_LROUND):
11304 CASE_FLT_FN (BUILT_IN_MODF):
11305 CASE_FLT_FN (BUILT_IN_NAN):
11306 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11307 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11308 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11309 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11310 CASE_FLT_FN (BUILT_IN_POW):
11311 CASE_FLT_FN (BUILT_IN_REMAINDER):
11312 CASE_FLT_FN (BUILT_IN_REMQUO):
11313 CASE_FLT_FN (BUILT_IN_RINT):
11314 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11315 CASE_FLT_FN (BUILT_IN_ROUND):
11316 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11317 CASE_FLT_FN (BUILT_IN_SCALBLN):
11318 CASE_FLT_FN (BUILT_IN_SCALBN):
11319 CASE_FLT_FN (BUILT_IN_SIN):
11320 CASE_FLT_FN (BUILT_IN_SINH):
11321 CASE_FLT_FN (BUILT_IN_SINCOS):
11322 CASE_FLT_FN (BUILT_IN_SQRT):
11323 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11324 CASE_FLT_FN (BUILT_IN_TAN):
11325 CASE_FLT_FN (BUILT_IN_TANH):
11326 CASE_FLT_FN (BUILT_IN_TGAMMA):
11327 CASE_FLT_FN (BUILT_IN_TRUNC):
11328 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11329 return true;
11330 default:
11331 break;
11332 }
11333 return false;
11334}