]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
2018-10-05 François Dumont <fdumont@gcc.gnu.org>
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
8e8f6434 2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
7c2ecb89 20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
53800dbe 24#include "config.h"
25#include "system.h"
805e22b2 26#include "coretypes.h"
9ef16211 27#include "backend.h"
7c29e30e 28#include "target.h"
29#include "rtl.h"
9ef16211 30#include "tree.h"
ea36272b 31#include "memmodel.h"
9ef16211 32#include "gimple.h"
7c29e30e 33#include "predict.h"
a950155e 34#include "params.h"
7c29e30e 35#include "tm_p.h"
36#include "stringpool.h"
c296f633 37#include "tree-vrp.h"
7c29e30e 38#include "tree-ssanames.h"
39#include "expmed.h"
40#include "optabs.h"
7c29e30e 41#include "emit-rtl.h"
42#include "recog.h"
7c29e30e 43#include "diagnostic-core.h"
b20a8bb4 44#include "alias.h"
b20a8bb4 45#include "fold-const.h"
6c21be92 46#include "fold-const-call.h"
e6a18b5a 47#include "gimple-ssa-warn-restrict.h"
9ed99284 48#include "stor-layout.h"
49#include "calls.h"
50#include "varasm.h"
51#include "tree-object-size.h"
dae0b5cb 52#include "realmpfr.h"
94ea8568 53#include "cfgrtl.h"
53800dbe 54#include "except.h"
d53441c8 55#include "dojump.h"
56#include "explow.h"
d53441c8 57#include "stmt.h"
53800dbe 58#include "expr.h"
d8fc4d0b 59#include "libfuncs.h"
53800dbe 60#include "output.h"
61#include "typeclass.h"
63c62881 62#include "langhooks.h"
162719b3 63#include "value-prof.h"
3b9c3a16 64#include "builtins.h"
30a86690 65#include "stringpool.h"
66#include "attribs.h"
f9acf11a 67#include "asan.h"
1f24b8e9 68#include "internal-fn.h"
e3240774 69#include "case-cfn-macros.h"
732905bb 70#include "gimple-fold.h"
5aef8938 71#include "intl.h"
859b51f8 72#include "file-prefix-map.h" /* remap_macro_filename() */
a7babc1e 73#include "gomp-constants.h"
74#include "omp-general.h"
5383fb56 75
3b9c3a16 76struct target_builtins default_target_builtins;
77#if SWITCHABLE_TARGET
78struct target_builtins *this_target_builtins = &default_target_builtins;
79#endif
80
ab7943b9 81/* Define the names of the builtin function types and codes. */
96423453 82const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84
9cfddb70 85#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 86const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 87{
88#include "builtins.def"
89};
ab7943b9 90
cffdfb3d 91/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 92 initialized to NULL_TREE. */
cffdfb3d 93builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 94
0b049e15 95/* Non-zero if __builtin_constant_p should be folded right away. */
96bool force_folding_builtin_constant_p;
97
f77c4496 98static rtx c_readstr (const char *, scalar_int_mode);
aecda0d6 99static int target_char_cast (tree, char *);
d8ae1baa 100static rtx get_memory_rtx (tree, tree);
aecda0d6 101static int apply_args_size (void);
102static int apply_result_size (void);
aecda0d6 103static rtx result_vector (int, rtx);
aecda0d6 104static void expand_builtin_prefetch (tree);
105static rtx expand_builtin_apply_args (void);
106static rtx expand_builtin_apply_args_1 (void);
107static rtx expand_builtin_apply (rtx, rtx, rtx);
108static void expand_builtin_return (rtx);
109static enum type_class type_to_class (tree);
110static rtx expand_builtin_classify_type (tree);
6b43bae4 111static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 112static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 113static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 114static rtx expand_builtin_sincos (tree);
f97eea22 115static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 116static rtx expand_builtin_int_roundingfn (tree, rtx);
117static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 118static rtx expand_builtin_next_arg (void);
aecda0d6 119static rtx expand_builtin_va_start (tree);
120static rtx expand_builtin_va_end (tree);
121static rtx expand_builtin_va_copy (tree);
0dbefa15 122static rtx inline_expand_builtin_string_cmp (tree, rtx);
a65c4d64 123static rtx expand_builtin_strcmp (tree, rtx);
3754d046 124static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
f77c4496 125static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
8d6c6ef5 126static rtx expand_builtin_memchr (tree, rtx);
a65c4d64 127static rtx expand_builtin_memcpy (tree, rtx);
d0fbba1a 128static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
4d317237 130static rtx expand_builtin_memmove (tree, rtx);
d0fbba1a 131static rtx expand_builtin_mempcpy (tree, rtx);
d0fbba1a 132static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
5aef8938 133static rtx expand_builtin_strcat (tree, rtx);
a65c4d64 134static rtx expand_builtin_strcpy (tree, rtx);
a788aa5f 135static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
3754d046 136static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
4d317237 137static rtx expand_builtin_stpncpy (tree, rtx);
5aef8938 138static rtx expand_builtin_strncat (tree, rtx);
a65c4d64 139static rtx expand_builtin_strncpy (tree, rtx);
f77c4496 140static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
3754d046 141static rtx expand_builtin_memset (tree, rtx, machine_mode);
142static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 143static rtx expand_builtin_bzero (tree);
3754d046 144static rtx expand_builtin_strlen (tree, rtx, machine_mode);
864bd5de 145static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
2b29cc6a 146static rtx expand_builtin_alloca (tree);
3754d046 147static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 148static rtx expand_builtin_frame_address (tree, tree);
389dd41b 149static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 150static rtx expand_builtin_expect (tree, rtx);
01107f42 151static rtx expand_builtin_expect_with_probability (tree, rtx);
aecda0d6 152static tree fold_builtin_constant_p (tree);
153static tree fold_builtin_classify_type (tree);
c7cbde74 154static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 155static tree fold_builtin_inf (location_t, tree, int);
389dd41b 156static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
184fac50 157static bool validate_arg (const_tree, enum tree_code code);
aecda0d6 158static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 159static rtx expand_builtin_signbit (tree, rtx);
389dd41b 160static tree fold_builtin_memcmp (location_t, tree, tree, tree);
389dd41b 161static tree fold_builtin_isascii (location_t, tree);
162static tree fold_builtin_toascii (location_t, tree);
163static tree fold_builtin_isdigit (location_t, tree);
164static tree fold_builtin_fabs (location_t, tree, tree);
165static tree fold_builtin_abs (location_t, tree, tree);
166static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 167 enum tree_code);
e80cc485 168static tree fold_builtin_0 (location_t, tree);
169static tree fold_builtin_1 (location_t, tree, tree);
170static tree fold_builtin_2 (location_t, tree, tree, tree);
171static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
12f08300 172static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 173
174static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
389dd41b 175static tree fold_builtin_strspn (location_t, tree, tree);
176static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 177
0a39fd54 178static rtx expand_builtin_object_size (tree);
3754d046 179static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 180 enum built_in_function);
181static void maybe_emit_chk_warning (tree, enum built_in_function);
182static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 183static void maybe_emit_free_warning (tree);
c2f47e15 184static tree fold_builtin_object_size (tree, tree);
99eabcc1 185
e788f202 186unsigned HOST_WIDE_INT target_newline;
b9ea678c 187unsigned HOST_WIDE_INT target_percent;
99eabcc1 188static unsigned HOST_WIDE_INT target_c;
189static unsigned HOST_WIDE_INT target_s;
aea88c77 190char target_percent_c[3];
b9ea678c 191char target_percent_s[3];
e788f202 192char target_percent_s_newline[4];
e5407ca6 193static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 194static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 195static void expand_builtin_sync_synchronize (void);
0a39fd54 196
7bfefa9d 197/* Return true if NAME starts with __builtin_ or __sync_. */
198
b29139ad 199static bool
1c47b3e8 200is_builtin_name (const char *name)
b6a5fc45 201{
b6a5fc45 202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
1cd6e20d 206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
b6a5fc45 208 return false;
209}
4ee9c684 210
1c47b3e8 211/* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
214
ae62deea 215bool
1c47b3e8 216called_as_built_in (tree node)
217{
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
220 will have. */
221 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222 return is_builtin_name (name);
223}
224
ceea063b 225/* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
0d8f7716 229
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 235 whereas foo() itself starts on an even address.
698537d1 236
3482bf13 237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
239
240static bool
241get_object_alignment_2 (tree exp, unsigned int *alignp,
242 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 243{
eaa09bfd 244 poly_int64 bitsize, bitpos;
98ab9e8f 245 tree offset;
3754d046 246 machine_mode mode;
292237f3 247 int unsignedp, reversep, volatilep;
c8a2b4ff 248 unsigned int align = BITS_PER_UNIT;
ceea063b 249 bool known_alignment = false;
698537d1 250
98ab9e8f 251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
292237f3 253 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
b3b6e4b5 254 &unsignedp, &reversep, &volatilep);
98ab9e8f 255
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
3482bf13 258 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 259 {
3482bf13 260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 align = 2 * BITS_PER_UNIT;
0d8f7716 266 }
3482bf13 267 else if (TREE_CODE (exp) == LABEL_DECL)
268 ;
269 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 270 {
3482bf13 271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp = DECL_INITIAL (exp);
98ab9e8f 273 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 274 if (CONSTANT_CLASS_P (exp))
579d67ba 275 align = targetm.constant_alignment (exp, align);
e532afed 276
3482bf13 277 known_alignment = true;
98ab9e8f 278 }
3482bf13 279 else if (DECL_P (exp))
ceea063b 280 {
3482bf13 281 align = DECL_ALIGN (exp);
ceea063b 282 known_alignment = true;
ceea063b 283 }
3482bf13 284 else if (TREE_CODE (exp) == INDIRECT_REF
285 || TREE_CODE (exp) == MEM_REF
286 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 287 {
288 tree addr = TREE_OPERAND (exp, 0);
ceea063b 289 unsigned ptr_align;
290 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 291 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 292
ab1e78e5 293 /* If the address is explicitely aligned, handle that. */
98ab9e8f 294 if (TREE_CODE (addr) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
296 {
ab1e78e5 297 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 ptr_bitmask *= BITS_PER_UNIT;
ac29ece2 299 align = least_bit_hwi (ptr_bitmask);
98ab9e8f 300 addr = TREE_OPERAND (addr, 0);
301 }
ceea063b 302
3482bf13 303 known_alignment
304 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 305 align = MAX (ptr_align, align);
306
ab1e78e5 307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos &= ptr_bitmask;
309
4083990a 310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
3482bf13 312 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 313 {
3482bf13 314 if (TMR_INDEX (exp))
315 {
316 unsigned HOST_WIDE_INT step = 1;
317 if (TMR_STEP (exp))
f9ae6f95 318 step = TREE_INT_CST_LOW (TMR_STEP (exp));
ac29ece2 319 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
3482bf13 320 }
321 if (TMR_INDEX2 (exp))
322 align = BITS_PER_UNIT;
323 known_alignment = false;
153c3b50 324 }
ceea063b 325
3482bf13 326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 329 alignment knowledge and if using that alignment would
330 improve the situation. */
700a9760 331 unsigned int talign;
4083990a 332 if (!addr_p && !known_alignment
700a9760 333 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 && talign > align)
335 align = talign;
4083990a 336 else
337 {
338 /* Else adjust bitpos accordingly. */
339 bitpos += ptr_bitpos;
340 if (TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
90ca1268 342 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
4083990a 343 }
98ab9e8f 344 }
3482bf13 345 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 346 {
3482bf13 347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 350 if (CONSTANT_CLASS_P (exp))
579d67ba 351 align = targetm.constant_alignment (exp, align);
e532afed 352
3482bf13 353 known_alignment = true;
98ab9e8f 354 }
98ab9e8f 355
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
c8a2b4ff 358 if (offset)
98ab9e8f 359 {
ad464c56 360 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 361 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 362 {
c8a2b4ff 363 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 if (inner)
365 align = MIN (align, inner);
98ab9e8f 366 }
98ab9e8f 367 }
368
eaa09bfd 369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372 if (alt_align != 0 && alt_align < align)
373 {
374 align = alt_align;
375 known_alignment = false;
376 }
377
3482bf13 378 *alignp = align;
eaa09bfd 379 *bitposp = bitpos.coeffs[0] & (align - 1);
ceea063b 380 return known_alignment;
0c883ef3 381}
382
3482bf13 383/* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
387
388bool
389get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
391{
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
393}
394
957d0361 395/* Return the alignment in bits of EXP, an object. */
0c883ef3 396
397unsigned int
957d0361 398get_object_alignment (tree exp)
0c883ef3 399{
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
402
ceea063b 403 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 404
98ab9e8f 405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
407
408 if (bitpos != 0)
ac29ece2 409 align = least_bit_hwi (bitpos);
957d0361 410 return align;
698537d1 411}
412
ceea063b 413/* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
53800dbe 417
ceea063b 418 If EXP is not a pointer, false is returned too. */
53800dbe 419
ceea063b 420bool
421get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
53800dbe 423{
153c3b50 424 STRIP_NOPS (exp);
535e2026 425
153c3b50 426 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
906a9403 429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430 {
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
438 {
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
441 {
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
445 }
446 }
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
450 }
153c3b50 451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 453 {
ceea063b 454 unsigned int ptr_align, ptr_misalign;
153c3b50 455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 456
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 {
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
d10da77a 461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
3482bf13 465 /* We cannot really tell whether this result is an approximation. */
b428654a 466 return false;
ceea063b 467 }
468 else
69fbc3aa 469 {
470 *bitposp = 0;
ceea063b 471 *alignp = BITS_PER_UNIT;
472 return false;
69fbc3aa 473 }
53800dbe 474 }
0bb8b39a 475 else if (TREE_CODE (exp) == INTEGER_CST)
476 {
477 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
481 }
153c3b50 482
69fbc3aa 483 *bitposp = 0;
ceea063b 484 *alignp = BITS_PER_UNIT;
485 return false;
53800dbe 486}
487
69fbc3aa 488/* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
491
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
494
495unsigned int
496get_pointer_alignment (tree exp)
497{
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
ceea063b 500
501 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 502
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
505
506 if (bitpos != 0)
ac29ece2 507 align = least_bit_hwi (bitpos);
69fbc3aa 508
509 return align;
510}
511
c4183f31 512/* Return the number of leading non-zero elements in the sequence
c62d63d4 513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
515
c4183f31 516unsigned
c62d63d4 517string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
518{
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
520
521 unsigned n;
522
523 if (eltsize == 1)
524 {
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
527 {
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
531 }
532 }
533 else
534 {
535 for (n = 0; n < maxelts; n++)
536 {
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
540 }
541 }
542 return n;
543}
544
7af57b1c 545/* For a call at LOC to a function FN that expects a string in the argument
546 ARG, issue a diagnostic due to it being a called with an argument
547 declared at NONSTR that is a character array with no terminating NUL. */
548
549void
550warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
551{
552 if (TREE_NO_WARNING (arg))
553 return;
554
555 loc = expansion_point_location_if_in_system_header (loc);
556
557 if (warning_at (loc, OPT_Wstringop_overflow_,
558 "%qs argument missing terminating nul", fn))
559 {
560 inform (DECL_SOURCE_LOCATION (decl),
561 "referenced argument declared here");
562 TREE_NO_WARNING (arg) = 1;
563 }
564}
565
a788aa5f 566/* If EXP refers to an unterminated constant character array return
567 the declaration of the object of which the array is a member or
fec27bf2 568 element and if SIZE is not null, set *SIZE to the size of
569 the unterminated array and set *EXACT if the size is exact or
570 clear it otherwise. Otherwise return null. */
a788aa5f 571
50e57712 572tree
fec27bf2 573unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
a788aa5f 574{
fec27bf2 575 /* C_STRLEN will return NULL and set DECL in the info
576 structure if EXP references a unterminated array. */
2b84b289 577 c_strlen_data data;
578 memset (&data, 0, sizeof (c_strlen_data));
fec27bf2 579 tree len = c_strlen (exp, 1, &data);
580 if (len == NULL_TREE && data.len && data.decl)
581 {
582 if (size)
583 {
584 len = data.len;
585 if (data.off)
586 {
587 /* Constant offsets are already accounted for in data.len, but
588 not in a SSA_NAME + CST expression. */
589 if (TREE_CODE (data.off) == INTEGER_CST)
590 *exact = true;
591 else if (TREE_CODE (data.off) == PLUS_EXPR
592 && TREE_CODE (TREE_OPERAND (data.off, 1)) == INTEGER_CST)
593 {
594 /* Subtract the offset from the size of the array. */
595 *exact = false;
596 tree temp = TREE_OPERAND (data.off, 1);
597 temp = fold_convert (ssizetype, temp);
598 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
599 }
600 else
601 *exact = false;
602 }
603 else
604 *exact = true;
605
606 *size = len;
607 }
608 return data.decl;
609 }
610
611 return NULL_TREE;
a788aa5f 612}
613
c62d63d4 614/* Compute the length of a null-terminated character string or wide
615 character string handling character sizes of 1, 2, and 4 bytes.
616 TREE_STRING_LENGTH is not the right way because it evaluates to
617 the size of the character array in bytes (as opposed to characters)
618 and because it can contain a zero byte in the middle.
53800dbe 619
4172d65e 620 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 621 into the instruction stream and zero if it is going to be expanded.
4172d65e 622 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 623 is returned, otherwise NULL, since
624 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
625 evaluate the side-effects.
626
6bda159e 627 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
628 accesses. Note that this implies the result is not going to be emitted
629 into the instruction stream.
630
2b84b289 631 Additional information about the string accessed may be recorded
632 in DATA. For example, if SRC references an unterminated string,
633 then the declaration will be stored in the DECL field. If the
634 length of the unterminated string can be determined, it'll be
635 stored in the LEN field. Note this length could well be different
636 than what a C strlen call would return.
7af57b1c 637
893c4605 638 ELTSIZE is 1 for normal single byte character strings, and 2 or
639 4 for wide characer strings. ELTSIZE is by default 1.
902de8ed 640
893c4605 641 The value returned is of type `ssizetype'. */
53800dbe 642
4ee9c684 643tree
2b84b289 644c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
53800dbe 645{
2b84b289 646 /* If we were not passed a DATA pointer, then get one to a local
647 structure. That avoids having to check DATA for NULL before
648 each time we want to use it. */
649 c_strlen_data local_strlen_data;
650 memset (&local_strlen_data, 0, sizeof (c_strlen_data));
651 if (!data)
652 data = &local_strlen_data;
653
2a0aa722 654 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
681fab1e 655 STRIP_NOPS (src);
656 if (TREE_CODE (src) == COND_EXPR
657 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
658 {
659 tree len1, len2;
660
2b84b289 661 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
662 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
0862b7e9 663 if (tree_int_cst_equal (len1, len2))
681fab1e 664 return len1;
665 }
666
667 if (TREE_CODE (src) == COMPOUND_EXPR
668 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
2b84b289 669 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
681fab1e 670
c62d63d4 671 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 672
c62d63d4 673 /* Offset from the beginning of the string in bytes. */
674 tree byteoff;
893c4605 675 tree memsize;
7af57b1c 676 tree decl;
677 src = string_constant (src, &byteoff, &memsize, &decl);
53800dbe 678 if (src == 0)
c2f47e15 679 return NULL_TREE;
902de8ed 680
c62d63d4 681 /* Determine the size of the string element. */
893c4605 682 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
683 return NULL_TREE;
c62d63d4 684
685 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
47d2cd73 686 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
c4183f31 687 in case the latter is less than the size of the array, such as when
688 SRC refers to a short string literal used to initialize a large array.
689 In that case, the elements of the array after the terminating NUL are
690 all NUL. */
691 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
9b7116a1 692 strelts = strelts / eltsize;
c4183f31 693
893c4605 694 if (!tree_fits_uhwi_p (memsize))
695 return NULL_TREE;
696
9b7116a1 697 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
c62d63d4 698
699 /* PTR can point to the byte representation of any string type, including
700 char* and wchar_t*. */
701 const char *ptr = TREE_STRING_POINTER (src);
902de8ed 702
c62d63d4 703 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
53800dbe 704 {
893c4605 705 /* The code below works only for single byte character types. */
706 if (eltsize != 1)
707 return NULL_TREE;
708
c4183f31 709 /* If the string has an internal NUL character followed by any
710 non-NUL characters (e.g., "foo\0bar"), we can't compute
711 the offset to the following NUL if we don't know where to
53800dbe 712 start searching for it. */
c4183f31 713 unsigned len = string_length (ptr, eltsize, strelts);
902de8ed 714
2b84b289 715 /* Return when an embedded null character is found or none at all.
716 In the latter case, set the DECL/LEN field in the DATA structure
717 so that callers may examine them. */
7af57b1c 718 if (len + 1 < strelts)
893c4605 719 return NULL_TREE;
7af57b1c 720 else if (len >= maxelts)
721 {
2b84b289 722 data->decl = decl;
fec27bf2 723 data->off = byteoff;
2b84b289 724 data->len = ssize_int (len);
7af57b1c 725 return NULL_TREE;
726 }
d5d661d5 727
9b7116a1 728 /* For empty strings the result should be zero. */
729 if (len == 0)
730 return ssize_int (0);
731
53800dbe 732 /* We don't know the starting offset, but we do know that the string
c4183f31 733 has no internal zero bytes. If the offset falls within the bounds
734 of the string subtract the offset from the length of the string,
735 and return that. Otherwise the length is zero. Take care to
736 use SAVE_EXPR in case the OFFSET has side-effects. */
737 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
738 offsave = fold_convert (ssizetype, offsave);
739 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
893c4605 740 build_int_cst (ssizetype, len));
9b7116a1 741 tree lenexp = size_diffop_loc (loc, ssize_int (len), offsave);
c4183f31 742 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
743 build_zero_cst (ssizetype));
53800dbe 744 }
745
c62d63d4 746 /* Offset from the beginning of the string in elements. */
747 HOST_WIDE_INT eltoff;
748
53800dbe 749 /* We have a known offset into the string. Start searching there for
27d0c333 750 a null character if we can represent it as a single HOST_WIDE_INT. */
c62d63d4 751 if (byteoff == 0)
752 eltoff = 0;
2a0aa722 753 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
c62d63d4 754 eltoff = -1;
53800dbe 755 else
2a0aa722 756 eltoff = tree_to_uhwi (byteoff) / eltsize;
902de8ed 757
1f63a7d6 758 /* If the offset is known to be out of bounds, warn, and call strlen at
759 runtime. */
9b7116a1 760 if (eltoff < 0 || eltoff >= maxelts)
53800dbe 761 {
1f63a7d6 762 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 763 if (only_value != 2
764 && !TREE_NO_WARNING (src))
1f63a7d6 765 {
d5d661d5 766 warning_at (loc, OPT_Warray_bounds,
767 "offset %qwi outside bounds of constant string",
c62d63d4 768 eltoff);
1f63a7d6 769 TREE_NO_WARNING (src) = 1;
770 }
c2f47e15 771 return NULL_TREE;
53800dbe 772 }
902de8ed 773
893c4605 774 /* If eltoff is larger than strelts but less than maxelts the
775 string length is zero, since the excess memory will be zero. */
776 if (eltoff > strelts)
777 return ssize_int (0);
778
53800dbe 779 /* Use strlen to search for the first zero byte. Since any strings
780 constructed with build_string will have nulls appended, we win even
781 if we get handed something like (char[4])"abcd".
782
c62d63d4 783 Since ELTOFF is our starting index into the string, no further
53800dbe 784 calculation is needed. */
c62d63d4 785 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
893c4605 786 strelts - eltoff);
c62d63d4 787
9b7116a1 788 /* Don't know what to return if there was no zero termination.
2b84b289 789 Ideally this would turn into a gcc_checking_assert over time.
790 Set DECL/LEN so callers can examine them. */
9b7116a1 791 if (len >= maxelts - eltoff)
7af57b1c 792 {
2b84b289 793 data->decl = decl;
fec27bf2 794 data->off = byteoff;
2b84b289 795 data->len = ssize_int (len);
7af57b1c 796 return NULL_TREE;
797 }
2a0aa722 798
c62d63d4 799 return ssize_int (len);
53800dbe 800}
801
e913b5cd 802/* Return a constant integer corresponding to target reading
8c85fcb7 803 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 804
6840589f 805static rtx
f77c4496 806c_readstr (const char *str, scalar_int_mode mode)
6840589f 807{
6840589f 808 HOST_WIDE_INT ch;
809 unsigned int i, j;
e913b5cd 810 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 811
812 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 813 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
814 / HOST_BITS_PER_WIDE_INT;
815
a12aa4cc 816 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 817 for (i = 0; i < len; i++)
818 tmp[i] = 0;
6840589f 819
6840589f 820 ch = 1;
821 for (i = 0; i < GET_MODE_SIZE (mode); i++)
822 {
823 j = i;
824 if (WORDS_BIG_ENDIAN)
825 j = GET_MODE_SIZE (mode) - i - 1;
826 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 827 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 828 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
829 j *= BITS_PER_UNIT;
7d3f6cc7 830
6840589f 831 if (ch)
832 ch = (unsigned char) str[i];
e913b5cd 833 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 834 }
ddb1be65 835
ab2c1de8 836 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 837 return immed_wide_int_const (c, mode);
6840589f 838}
839
ecc318ff 840/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 841 host char type, return zero and put that value into variable pointed to by
ecc318ff 842 P. */
843
844static int
aecda0d6 845target_char_cast (tree cst, char *p)
ecc318ff 846{
847 unsigned HOST_WIDE_INT val, hostval;
848
c19686c5 849 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 850 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
851 return 1;
852
e913b5cd 853 /* Do not care if it fits or not right here. */
f9ae6f95 854 val = TREE_INT_CST_LOW (cst);
e913b5cd 855
ecc318ff 856 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
edc19fd0 857 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ecc318ff 858
859 hostval = val;
860 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
edc19fd0 861 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ecc318ff 862
863 if (val != hostval)
864 return 1;
865
866 *p = hostval;
867 return 0;
868}
869
4ee9c684 870/* Similar to save_expr, but assumes that arbitrary code is not executed
871 in between the multiple evaluations. In particular, we assume that a
872 non-addressable local variable will not be modified. */
873
874static tree
875builtin_save_expr (tree exp)
876{
f6c35aa4 877 if (TREE_CODE (exp) == SSA_NAME
878 || (TREE_ADDRESSABLE (exp) == 0
879 && (TREE_CODE (exp) == PARM_DECL
53e9c5c4 880 || (VAR_P (exp) && !TREE_STATIC (exp)))))
4ee9c684 881 return exp;
882
883 return save_expr (exp);
884}
885
53800dbe 886/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
887 times to get the address of either a higher stack frame, or a return
888 address located within it (depending on FNDECL_CODE). */
902de8ed 889
c626df3d 890static rtx
869d0ef0 891expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 892{
893 int i;
869d0ef0 894 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
3f840859 895 if (tem == NULL_RTX)
e3e15c50 896 {
3f840859 897 /* For a zero count with __builtin_return_address, we don't care what
898 frame address we return, because target-specific definitions will
899 override us. Therefore frame pointer elimination is OK, and using
900 the soft frame pointer is OK.
901
902 For a nonzero count, or a zero count with __builtin_frame_address,
903 we require a stable offset from the current frame pointer to the
904 previous one, so we must use the hard frame pointer, and
905 we must disable frame pointer elimination. */
906 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
907 tem = frame_pointer_rtx;
908 else
909 {
910 tem = hard_frame_pointer_rtx;
e3e15c50 911
3f840859 912 /* Tell reload not to eliminate the frame pointer. */
913 crtl->accesses_prior_frames = 1;
914 }
e3e15c50 915 }
869d0ef0 916
53800dbe 917 if (count > 0)
918 SETUP_FRAME_ADDRESSES ();
53800dbe 919
3a69c60c 920 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 921 register. There is no way to access it off of the current frame
922 pointer, but it can be accessed off the previous frame pointer by
923 reading the value from the register window save area. */
a26d6c60 924 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 925 count--;
53800dbe 926
927 /* Scan back COUNT frames to the specified frame. */
928 for (i = 0; i < count; i++)
929 {
930 /* Assume the dynamic chain pointer is in the word that the
931 frame address points to, unless otherwise specified. */
53800dbe 932 tem = DYNAMIC_CHAIN_ADDRESS (tem);
53800dbe 933 tem = memory_address (Pmode, tem);
00060fc2 934 tem = gen_frame_mem (Pmode, tem);
83fc1478 935 tem = copy_to_reg (tem);
53800dbe 936 }
937
3a69c60c 938 /* For __builtin_frame_address, return what we've got. But, on
939 the SPARC for example, we may have to add a bias. */
53800dbe 940 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 941 return FRAME_ADDR_RTX (tem);
53800dbe 942
3a69c60c 943 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 944#ifdef RETURN_ADDR_RTX
945 tem = RETURN_ADDR_RTX (count, tem);
946#else
947 tem = memory_address (Pmode,
29c05e22 948 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 949 tem = gen_frame_mem (Pmode, tem);
53800dbe 950#endif
951 return tem;
952}
953
f7c44134 954/* Alias set used for setjmp buffer. */
32c2fdea 955static alias_set_type setjmp_alias_set = -1;
f7c44134 956
6b7f6858 957/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 958 return to RECEIVER_LABEL. This is also called directly by the SJLJ
959 exception handling code. */
53800dbe 960
6b7f6858 961void
aecda0d6 962expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 963{
3754d046 964 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 965 rtx stack_save;
f7c44134 966 rtx mem;
53800dbe 967
f7c44134 968 if (setjmp_alias_set == -1)
969 setjmp_alias_set = new_alias_set ();
970
85d654dd 971 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 972
37ae8504 973 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 974
6b7f6858 975 /* We store the frame pointer and the address of receiver_label in
976 the buffer and use the rest of it for the stack save area, which
977 is machine-dependent. */
53800dbe 978
f7c44134 979 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 980 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 981 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 982
29c05e22 983 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
984 GET_MODE_SIZE (Pmode))),
ab6ab77e 985 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 986
987 emit_move_insn (validize_mem (mem),
6b7f6858 988 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 989
990 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 991 plus_constant (Pmode, buf_addr,
53800dbe 992 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 993 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 994 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 995
996 /* If there is further processing to do, do it. */
a3c81e61 997 if (targetm.have_builtin_setjmp_setup ())
998 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
53800dbe 999
29f09705 1000 /* We have a nonlocal label. */
18d50ae6 1001 cfun->has_nonlocal_label = 1;
6b7f6858 1002}
53800dbe 1003
2c8a1497 1004/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 1005 also called directly by the SJLJ exception handling code.
1006 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 1007
1008void
a3c81e61 1009expand_builtin_setjmp_receiver (rtx receiver_label)
6b7f6858 1010{
82c7907c 1011 rtx chain;
1012
4598ade9 1013 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 1014 marked as used by this function. */
18b42941 1015 emit_use (hard_frame_pointer_rtx);
53800dbe 1016
1017 /* Mark the static chain as clobbered here so life information
1018 doesn't get messed up for it. */
3c56e0c1 1019 chain = rtx_for_static_chain (current_function_decl, true);
82c7907c 1020 if (chain && REG_P (chain))
1021 emit_clobber (chain);
53800dbe 1022
1023 /* Now put in the code to restore the frame pointer, and argument
491e04ef 1024 pointer, if needed. */
a3c81e61 1025 if (! targetm.have_nonlocal_goto ())
62dcb5c8 1026 {
1027 /* First adjust our frame pointer to its actual value. It was
1028 previously set to the start of the virtual area corresponding to
1029 the stacked variables when we branched here and now needs to be
1030 adjusted to the actual hardware fp value.
1031
1032 Assignments to virtual registers are converted by
1033 instantiate_virtual_regs into the corresponding assignment
1034 to the underlying register (fp in this case) that makes
1035 the original assignment true.
1036 So the following insn will actually be decrementing fp by
8374586c 1037 TARGET_STARTING_FRAME_OFFSET. */
62dcb5c8 1038 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1039
1040 /* Restoring the frame pointer also modifies the hard frame pointer.
1041 Mark it used (so that the previous assignment remains live once
1042 the frame pointer is eliminated) and clobbered (to represent the
1043 implicit update from the assignment). */
1044 emit_use (hard_frame_pointer_rtx);
1045 emit_clobber (hard_frame_pointer_rtx);
1046 }
53800dbe 1047
a494b6d7 1048 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
53800dbe 1049 {
4598ade9 1050 /* If the argument pointer can be eliminated in favor of the
1051 frame pointer, we don't need to restore it. We assume here
1052 that if such an elimination is present, it can always be used.
1053 This is the case on all known machines; if we don't make this
1054 assumption, we do unnecessary saving on many machines. */
53800dbe 1055 size_t i;
e99c3a1d 1056 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 1057
3098b2d3 1058 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 1059 if (elim_regs[i].from == ARG_POINTER_REGNUM
1060 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1061 break;
1062
3098b2d3 1063 if (i == ARRAY_SIZE (elim_regs))
53800dbe 1064 {
1065 /* Now restore our arg pointer from the address at which it
05927e40 1066 was saved in our stack frame. */
27a7a23a 1067 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 1068 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 1069 }
1070 }
53800dbe 1071
a3c81e61 1072 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1073 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1074 else if (targetm.have_nonlocal_goto_receiver ())
1075 emit_insn (targetm.gen_nonlocal_goto_receiver ());
53800dbe 1076 else
a3c81e61 1077 { /* Nothing */ }
57f6bb94 1078
3072d30e 1079 /* We must not allow the code we just generated to be reordered by
1080 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 1081 happen immediately, not later. */
3072d30e 1082 emit_insn (gen_blockage ());
6b7f6858 1083}
53800dbe 1084
53800dbe 1085/* __builtin_longjmp is passed a pointer to an array of five words (not
1086 all will be used on all machines). It operates similarly to the C
1087 library function of the same name, but is more efficient. Much of
2c8a1497 1088 the code below is copied from the handling of non-local gotos. */
53800dbe 1089
c626df3d 1090static void
aecda0d6 1091expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 1092{
1e0c0b35 1093 rtx fp, lab, stack;
1094 rtx_insn *insn, *last;
3754d046 1095 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 1096
48e1416a 1097 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 1098 function */
1099 if (SUPPORTS_STACK_ALIGNMENT)
1100 crtl->need_drap = true;
1101
f7c44134 1102 if (setjmp_alias_set == -1)
1103 setjmp_alias_set = new_alias_set ();
1104
85d654dd 1105 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 1106
53800dbe 1107 buf_addr = force_reg (Pmode, buf_addr);
1108
82c7907c 1109 /* We require that the user must pass a second argument of 1, because
1110 that is what builtin_setjmp will return. */
64db345d 1111 gcc_assert (value == const1_rtx);
53800dbe 1112
4712c7d6 1113 last = get_last_insn ();
a3c81e61 1114 if (targetm.have_builtin_longjmp ())
1115 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
53800dbe 1116 else
53800dbe 1117 {
1118 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 1119 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 1120 GET_MODE_SIZE (Pmode)));
1121
29c05e22 1122 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 1123 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 1124 set_mem_alias_set (fp, setjmp_alias_set);
1125 set_mem_alias_set (lab, setjmp_alias_set);
1126 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 1127
1128 /* Pick up FP, label, and SP from the block and jump. This code is
1129 from expand_goto in stmt.c; see there for detailed comments. */
a3c81e61 1130 if (targetm.have_nonlocal_goto ())
53800dbe 1131 /* We have to pass a value to the nonlocal_goto pattern that will
1132 get copied into the static_chain pointer, but it does not matter
1133 what that value is, because builtin_setjmp does not use it. */
a3c81e61 1134 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 1135 else
53800dbe 1136 {
1137 lab = copy_to_reg (lab);
1138
18b42941 1139 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1140 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1141
53800dbe 1142 emit_move_insn (hard_frame_pointer_rtx, fp);
e9c97615 1143 emit_stack_restore (SAVE_NONLOCAL, stack);
53800dbe 1144
18b42941 1145 emit_use (hard_frame_pointer_rtx);
1146 emit_use (stack_pointer_rtx);
53800dbe 1147 emit_indirect_jump (lab);
1148 }
1149 }
615166bb 1150
1151 /* Search backwards and mark the jump insn as a non-local goto.
1152 Note that this precludes the use of __builtin_longjmp to a
1153 __builtin_setjmp target in the same function. However, we've
1154 already cautioned the user that these functions are for
1155 internal exception handling use only. */
449c0509 1156 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1157 {
64db345d 1158 gcc_assert (insn != last);
7d3f6cc7 1159
6d7dc5b9 1160 if (JUMP_P (insn))
449c0509 1161 {
a1ddb869 1162 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1163 break;
1164 }
6d7dc5b9 1165 else if (CALL_P (insn))
9342ee68 1166 break;
449c0509 1167 }
53800dbe 1168}
1169
0e80b01d 1170static inline bool
1171more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1172{
1173 return (iter->i < iter->n);
1174}
1175
1176/* This function validates the types of a function call argument list
1177 against a specified list of tree_codes. If the last specifier is a 0,
5cfa3fc8 1178 that represents an ellipsis, otherwise the last specifier must be a
0e80b01d 1179 VOID_TYPE. */
1180
1181static bool
1182validate_arglist (const_tree callexpr, ...)
1183{
1184 enum tree_code code;
1185 bool res = 0;
1186 va_list ap;
1187 const_call_expr_arg_iterator iter;
1188 const_tree arg;
1189
1190 va_start (ap, callexpr);
1191 init_const_call_expr_arg_iterator (callexpr, &iter);
1192
5cfa3fc8 1193 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
184fac50 1194 tree fn = CALL_EXPR_FN (callexpr);
1195 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
5cfa3fc8 1196
1197 for (unsigned argno = 1; ; ++argno)
0e80b01d 1198 {
1199 code = (enum tree_code) va_arg (ap, int);
5cfa3fc8 1200
0e80b01d 1201 switch (code)
1202 {
1203 case 0:
1204 /* This signifies an ellipses, any further arguments are all ok. */
1205 res = true;
1206 goto end;
1207 case VOID_TYPE:
1208 /* This signifies an endlink, if no arguments remain, return
1209 true, otherwise return false. */
1210 res = !more_const_call_expr_args_p (&iter);
1211 goto end;
5cfa3fc8 1212 case POINTER_TYPE:
1213 /* The actual argument must be nonnull when either the whole
1214 called function has been declared nonnull, or when the formal
1215 argument corresponding to the actual argument has been. */
184fac50 1216 if (argmap
1217 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1218 {
1219 arg = next_const_call_expr_arg (&iter);
1220 if (!validate_arg (arg, code) || integer_zerop (arg))
1221 goto end;
1222 break;
1223 }
5cfa3fc8 1224 /* FALLTHRU */
0e80b01d 1225 default:
1226 /* If no parameters remain or the parameter's code does not
1227 match the specified code, return false. Otherwise continue
1228 checking any remaining arguments. */
1229 arg = next_const_call_expr_arg (&iter);
184fac50 1230 if (!validate_arg (arg, code))
0e80b01d 1231 goto end;
1232 break;
1233 }
1234 }
0e80b01d 1235
1236 /* We need gotos here since we can only have one VA_CLOSE in a
1237 function. */
1238 end: ;
1239 va_end (ap);
1240
5cfa3fc8 1241 BITMAP_FREE (argmap);
1242
0e80b01d 1243 return res;
1244}
1245
4ee9c684 1246/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1247 and the address of the save area. */
1248
1249static rtx
c2f47e15 1250expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1251{
1252 tree t_label, t_save_area;
1e0c0b35 1253 rtx r_label, r_save_area, r_fp, r_sp;
1254 rtx_insn *insn;
4ee9c684 1255
c2f47e15 1256 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1257 return NULL_RTX;
1258
c2f47e15 1259 t_label = CALL_EXPR_ARG (exp, 0);
1260 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1261
8ec3c5c2 1262 r_label = expand_normal (t_label);
3dce56cc 1263 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1264 r_save_area = expand_normal (t_save_area);
3dce56cc 1265 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1266 /* Copy the address of the save location to a register just in case it was
1267 based on the frame pointer. */
51adbc8a 1268 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1269 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1270 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1271 plus_constant (Pmode, r_save_area,
1272 GET_MODE_SIZE (Pmode)));
4ee9c684 1273
18d50ae6 1274 crtl->has_nonlocal_goto = 1;
4ee9c684 1275
4ee9c684 1276 /* ??? We no longer need to pass the static chain value, afaik. */
a3c81e61 1277 if (targetm.have_nonlocal_goto ())
1278 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
4ee9c684 1279 else
4ee9c684 1280 {
1281 r_label = copy_to_reg (r_label);
1282
18b42941 1283 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1284 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1285
d1ff492e 1286 /* Restore frame pointer for containing function. */
4ee9c684 1287 emit_move_insn (hard_frame_pointer_rtx, r_fp);
e9c97615 1288 emit_stack_restore (SAVE_NONLOCAL, r_sp);
491e04ef 1289
4ee9c684 1290 /* USE of hard_frame_pointer_rtx added for consistency;
1291 not clear if really needed. */
18b42941 1292 emit_use (hard_frame_pointer_rtx);
1293 emit_use (stack_pointer_rtx);
ad0d0af8 1294
1295 /* If the architecture is using a GP register, we must
1296 conservatively assume that the target function makes use of it.
1297 The prologue of functions with nonlocal gotos must therefore
1298 initialize the GP register to the appropriate value, and we
1299 must then make sure that this value is live at the point
1300 of the jump. (Note that this doesn't necessarily apply
1301 to targets with a nonlocal_goto pattern; they are free
1302 to implement it in their own way. Note also that this is
1303 a no-op if the GP register is a global invariant.) */
1e826931 1304 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1305 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
18b42941 1306 emit_use (pic_offset_table_rtx);
ad0d0af8 1307
4ee9c684 1308 emit_indirect_jump (r_label);
1309 }
491e04ef 1310
4ee9c684 1311 /* Search backwards to the jump insn and mark it as a
1312 non-local goto. */
1313 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1314 {
6d7dc5b9 1315 if (JUMP_P (insn))
4ee9c684 1316 {
a1ddb869 1317 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1318 break;
1319 }
6d7dc5b9 1320 else if (CALL_P (insn))
4ee9c684 1321 break;
1322 }
1323
1324 return const0_rtx;
1325}
1326
843d08a9 1327/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1328 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1329 It updates the stack pointer in that block to the current value. This is
1330 also called directly by the SJLJ exception handling code. */
843d08a9 1331
97354ae4 1332void
843d08a9 1333expand_builtin_update_setjmp_buf (rtx buf_addr)
1334{
3754d046 1335 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
77e843a0 1336 buf_addr = convert_memory_address (Pmode, buf_addr);
d1ff492e 1337 rtx stack_save
843d08a9 1338 = gen_rtx_MEM (sa_mode,
1339 memory_address
1340 (sa_mode,
29c05e22 1341 plus_constant (Pmode, buf_addr,
1342 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1343
e9c97615 1344 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1345}
1346
5e3608d8 1347/* Expand a call to __builtin_prefetch. For a target that does not support
1348 data prefetch, evaluate the memory address argument in case it has side
1349 effects. */
1350
1351static void
c2f47e15 1352expand_builtin_prefetch (tree exp)
5e3608d8 1353{
1354 tree arg0, arg1, arg2;
c2f47e15 1355 int nargs;
5e3608d8 1356 rtx op0, op1, op2;
1357
c2f47e15 1358 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1359 return;
1360
c2f47e15 1361 arg0 = CALL_EXPR_ARG (exp, 0);
1362
26a5cadb 1363 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1364 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1365 locality). */
c2f47e15 1366 nargs = call_expr_nargs (exp);
1367 if (nargs > 1)
1368 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1369 else
c2f47e15 1370 arg1 = integer_zero_node;
1371 if (nargs > 2)
1372 arg2 = CALL_EXPR_ARG (exp, 2);
1373 else
2512209b 1374 arg2 = integer_three_node;
5e3608d8 1375
1376 /* Argument 0 is an address. */
1377 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1378
1379 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1380 if (TREE_CODE (arg1) != INTEGER_CST)
1381 {
07e3a3d2 1382 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1383 arg1 = integer_zero_node;
5e3608d8 1384 }
8ec3c5c2 1385 op1 = expand_normal (arg1);
5e3608d8 1386 /* Argument 1 must be either zero or one. */
1387 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1388 {
c3ceba8e 1389 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1390 " using zero");
5e3608d8 1391 op1 = const0_rtx;
1392 }
1393
1394 /* Argument 2 (locality) must be a compile-time constant int. */
1395 if (TREE_CODE (arg2) != INTEGER_CST)
1396 {
07e3a3d2 1397 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1398 arg2 = integer_zero_node;
1399 }
8ec3c5c2 1400 op2 = expand_normal (arg2);
5e3608d8 1401 /* Argument 2 must be 0, 1, 2, or 3. */
1402 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1403 {
c3ceba8e 1404 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1405 op2 = const0_rtx;
1406 }
1407
1d375a79 1408 if (targetm.have_prefetch ())
5e3608d8 1409 {
8786db1e 1410 struct expand_operand ops[3];
1411
1412 create_address_operand (&ops[0], op0);
1413 create_integer_operand (&ops[1], INTVAL (op1));
1414 create_integer_operand (&ops[2], INTVAL (op2));
1d375a79 1415 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
8786db1e 1416 return;
5e3608d8 1417 }
0a534ba7 1418
f0ce3b1f 1419 /* Don't do anything with direct references to volatile memory, but
1420 generate code to handle other side effects. */
e16ceb8e 1421 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1422 emit_insn (op0);
5e3608d8 1423}
1424
f7c44134 1425/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1426 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1427 the maximum length of the block of memory that might be accessed or
1428 NULL if unknown. */
f7c44134 1429
53800dbe 1430static rtx
d8ae1baa 1431get_memory_rtx (tree exp, tree len)
53800dbe 1432{
ad0a178f 1433 tree orig_exp = exp;
1434 rtx addr, mem;
ad0a178f 1435
1436 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1437 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1438 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1439 exp = TREE_OPERAND (exp, 0);
1440
1441 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1442 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1443
f7c44134 1444 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1445 First remove any nops. */
72dd6141 1446 while (CONVERT_EXPR_P (exp)
f7c44134 1447 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1448 exp = TREE_OPERAND (exp, 0);
1449
5dd3f78f 1450 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1451 (as builtin stringops may alias with anything). */
1452 exp = fold_build2 (MEM_REF,
1453 build_array_type (char_type_node,
1454 build_range_type (sizetype,
1455 size_one_node, len)),
1456 exp, build_int_cst (ptr_type_node, 0));
1457
1458 /* If the MEM_REF has no acceptable address, try to get the base object
1459 from the original address we got, and build an all-aliasing
1460 unknown-sized access to that one. */
1461 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1462 set_mem_attributes (mem, exp, 0);
1463 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1464 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1465 0))))
eec8e941 1466 {
5dd3f78f 1467 exp = build_fold_addr_expr (exp);
1468 exp = fold_build2 (MEM_REF,
1469 build_array_type (char_type_node,
1470 build_range_type (sizetype,
1471 size_zero_node,
1472 NULL)),
1473 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1474 set_mem_attributes (mem, exp, 0);
eec8e941 1475 }
5dd3f78f 1476 set_mem_alias_set (mem, 0);
53800dbe 1477 return mem;
1478}
1479\f
1480/* Built-in functions to perform an untyped call and return. */
1481
3b9c3a16 1482#define apply_args_mode \
1483 (this_target_builtins->x_apply_args_mode)
1484#define apply_result_mode \
1485 (this_target_builtins->x_apply_result_mode)
53800dbe 1486
53800dbe 1487/* Return the size required for the block returned by __builtin_apply_args,
1488 and initialize apply_args_mode. */
1489
1490static int
aecda0d6 1491apply_args_size (void)
53800dbe 1492{
1493 static int size = -1;
58e9ce8f 1494 int align;
1495 unsigned int regno;
53800dbe 1496
1497 /* The values computed by this function never change. */
1498 if (size < 0)
1499 {
1500 /* The first value is the incoming arg-pointer. */
1501 size = GET_MODE_SIZE (Pmode);
1502
1503 /* The second value is the structure value address unless this is
1504 passed as an "invisible" first argument. */
6812c89e 1505 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1506 size += GET_MODE_SIZE (Pmode);
1507
1508 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1509 if (FUNCTION_ARG_REGNO_P (regno))
1510 {
d8ba6ec1 1511 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1512
64db345d 1513 gcc_assert (mode != VOIDmode);
53800dbe 1514
1515 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1516 if (size % align != 0)
1517 size = CEIL (size, align) * align;
53800dbe 1518 size += GET_MODE_SIZE (mode);
1519 apply_args_mode[regno] = mode;
1520 }
1521 else
1522 {
d8ba6ec1 1523 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
53800dbe 1524 }
1525 }
1526 return size;
1527}
1528
1529/* Return the size required for the block returned by __builtin_apply,
1530 and initialize apply_result_mode. */
1531
1532static int
aecda0d6 1533apply_result_size (void)
53800dbe 1534{
1535 static int size = -1;
1536 int align, regno;
53800dbe 1537
1538 /* The values computed by this function never change. */
1539 if (size < 0)
1540 {
1541 size = 0;
1542
1543 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1544 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1545 {
d8ba6ec1 1546 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1547
64db345d 1548 gcc_assert (mode != VOIDmode);
53800dbe 1549
1550 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1551 if (size % align != 0)
1552 size = CEIL (size, align) * align;
1553 size += GET_MODE_SIZE (mode);
1554 apply_result_mode[regno] = mode;
1555 }
1556 else
d8ba6ec1 1557 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
53800dbe 1558
1559 /* Allow targets that use untyped_call and untyped_return to override
1560 the size so that machine-specific information can be stored here. */
1561#ifdef APPLY_RESULT_SIZE
1562 size = APPLY_RESULT_SIZE;
1563#endif
1564 }
1565 return size;
1566}
1567
53800dbe 1568/* Create a vector describing the result block RESULT. If SAVEP is true,
1569 the result block is used to save the values; otherwise it is used to
1570 restore the values. */
1571
1572static rtx
aecda0d6 1573result_vector (int savep, rtx result)
53800dbe 1574{
1575 int regno, size, align, nelts;
d8ba6ec1 1576 fixed_size_mode mode;
53800dbe 1577 rtx reg, mem;
364c0c59 1578 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1579
53800dbe 1580 size = nelts = 0;
1581 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1582 if ((mode = apply_result_mode[regno]) != VOIDmode)
1583 {
1584 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1585 if (size % align != 0)
1586 size = CEIL (size, align) * align;
1587 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1588 mem = adjust_address (result, mode, size);
53800dbe 1589 savevec[nelts++] = (savep
d1f9b275 1590 ? gen_rtx_SET (mem, reg)
1591 : gen_rtx_SET (reg, mem));
53800dbe 1592 size += GET_MODE_SIZE (mode);
1593 }
1594 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1595}
53800dbe 1596
1597/* Save the state required to perform an untyped call with the same
1598 arguments as were passed to the current function. */
1599
1600static rtx
aecda0d6 1601expand_builtin_apply_args_1 (void)
53800dbe 1602{
1c7e61a7 1603 rtx registers, tem;
53800dbe 1604 int size, align, regno;
d8ba6ec1 1605 fixed_size_mode mode;
6812c89e 1606 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1607
1608 /* Create a block where the arg-pointer, structure value address,
1609 and argument registers can be saved. */
1610 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1611
1612 /* Walk past the arg-pointer and structure value address. */
1613 size = GET_MODE_SIZE (Pmode);
6812c89e 1614 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1615 size += GET_MODE_SIZE (Pmode);
1616
1617 /* Save each register used in calling a function to the block. */
1618 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1619 if ((mode = apply_args_mode[regno]) != VOIDmode)
1620 {
53800dbe 1621 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1622 if (size % align != 0)
1623 size = CEIL (size, align) * align;
1624
1625 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1626
e513d163 1627 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1628 size += GET_MODE_SIZE (mode);
1629 }
1630
1631 /* Save the arg pointer to the block. */
27a7a23a 1632 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1633 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1634 as we might have pretended they were passed. Make sure it's a valid
1635 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1636 if (STACK_GROWS_DOWNWARD)
1637 tem
1638 = force_operand (plus_constant (Pmode, tem,
1639 crtl->args.pretend_args_size),
1640 NULL_RTX);
1c7e61a7 1641 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1642
53800dbe 1643 size = GET_MODE_SIZE (Pmode);
1644
1645 /* Save the structure value address unless this is passed as an
1646 "invisible" first argument. */
45550790 1647 if (struct_incoming_value)
53800dbe 1648 {
e513d163 1649 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1650 copy_to_reg (struct_incoming_value));
53800dbe 1651 size += GET_MODE_SIZE (Pmode);
1652 }
1653
1654 /* Return the address of the block. */
1655 return copy_addr_to_reg (XEXP (registers, 0));
1656}
1657
1658/* __builtin_apply_args returns block of memory allocated on
1659 the stack into which is stored the arg pointer, structure
1660 value address, static chain, and all the registers that might
1661 possibly be used in performing a function call. The code is
1662 moved to the start of the function so the incoming values are
1663 saved. */
27d0c333 1664
53800dbe 1665static rtx
aecda0d6 1666expand_builtin_apply_args (void)
53800dbe 1667{
1668 /* Don't do __builtin_apply_args more than once in a function.
1669 Save the result of the first call and reuse it. */
1670 if (apply_args_value != 0)
1671 return apply_args_value;
1672 {
1673 /* When this function is called, it means that registers must be
1674 saved on entry to this function. So we migrate the
1675 call to the first insn of this function. */
1676 rtx temp;
53800dbe 1677
1678 start_sequence ();
1679 temp = expand_builtin_apply_args_1 ();
9ed997be 1680 rtx_insn *seq = get_insns ();
53800dbe 1681 end_sequence ();
1682
1683 apply_args_value = temp;
1684
31d3e01c 1685 /* Put the insns after the NOTE that starts the function.
1686 If this is inside a start_sequence, make the outer-level insn
53800dbe 1687 chain current, so the code is placed at the start of the
0ef1a651 1688 function. If internal_arg_pointer is a non-virtual pseudo,
1689 it needs to be placed after the function that initializes
1690 that pseudo. */
53800dbe 1691 push_topmost_sequence ();
0ef1a651 1692 if (REG_P (crtl->args.internal_arg_pointer)
1693 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1694 emit_insn_before (seq, parm_birth_insn);
1695 else
1696 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1697 pop_topmost_sequence ();
1698 return temp;
1699 }
1700}
1701
1702/* Perform an untyped call and save the state required to perform an
1703 untyped return of whatever value was returned by the given function. */
1704
1705static rtx
aecda0d6 1706expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1707{
1708 int size, align, regno;
d8ba6ec1 1709 fixed_size_mode mode;
1e0c0b35 1710 rtx incoming_args, result, reg, dest, src;
1711 rtx_call_insn *call_insn;
53800dbe 1712 rtx old_stack_level = 0;
1713 rtx call_fusage = 0;
6812c89e 1714 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1715
85d654dd 1716 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1717
53800dbe 1718 /* Create a block where the return registers can be saved. */
1719 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1720
53800dbe 1721 /* Fetch the arg pointer from the ARGUMENTS block. */
1722 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1723 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1724 if (!STACK_GROWS_DOWNWARD)
1725 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1726 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1727
04a46d40 1728 /* Push a new argument block and copy the arguments. Do not allow
1729 the (potential) memcpy call below to interfere with our stack
1730 manipulations. */
53800dbe 1731 do_pending_stack_adjust ();
04a46d40 1732 NO_DEFER_POP;
53800dbe 1733
2358393e 1734 /* Save the stack with nonlocal if available. */
71512c05 1735 if (targetm.have_save_stack_nonlocal ())
e9c97615 1736 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1737 else
e9c97615 1738 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1739
59647703 1740 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1741 arguments to the outgoing arguments address. We can pass TRUE
1742 as the 4th argument because we just saved the stack pointer
1743 and will restore it right after the call. */
2b34677f 1744 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
27a7a23a 1745
1746 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1747 may have already set current_function_calls_alloca to true.
1748 current_function_calls_alloca won't be set if argsize is zero,
1749 so we have to guarantee need_drap is true here. */
1750 if (SUPPORTS_STACK_ALIGNMENT)
1751 crtl->need_drap = true;
1752
59647703 1753 dest = virtual_outgoing_args_rtx;
3764c94e 1754 if (!STACK_GROWS_DOWNWARD)
1755 {
1756 if (CONST_INT_P (argsize))
1757 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1758 else
1759 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1760 }
2a631e19 1761 dest = gen_rtx_MEM (BLKmode, dest);
1762 set_mem_align (dest, PARM_BOUNDARY);
1763 src = gen_rtx_MEM (BLKmode, incoming_args);
1764 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1765 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1766
1767 /* Refer to the argument block. */
1768 apply_args_size ();
1769 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1770 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1771
1772 /* Walk past the arg-pointer and structure value address. */
1773 size = GET_MODE_SIZE (Pmode);
45550790 1774 if (struct_value)
53800dbe 1775 size += GET_MODE_SIZE (Pmode);
1776
1777 /* Restore each of the registers previously saved. Make USE insns
1778 for each of these registers for use in making the call. */
1779 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1780 if ((mode = apply_args_mode[regno]) != VOIDmode)
1781 {
1782 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1783 if (size % align != 0)
1784 size = CEIL (size, align) * align;
1785 reg = gen_rtx_REG (mode, regno);
e513d163 1786 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1787 use_reg (&call_fusage, reg);
1788 size += GET_MODE_SIZE (mode);
1789 }
1790
1791 /* Restore the structure value address unless this is passed as an
1792 "invisible" first argument. */
1793 size = GET_MODE_SIZE (Pmode);
45550790 1794 if (struct_value)
53800dbe 1795 {
1796 rtx value = gen_reg_rtx (Pmode);
e513d163 1797 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1798 emit_move_insn (struct_value, value);
8ad4c111 1799 if (REG_P (struct_value))
45550790 1800 use_reg (&call_fusage, struct_value);
53800dbe 1801 size += GET_MODE_SIZE (Pmode);
1802 }
1803
1804 /* All arguments and registers used for the call are set up by now! */
82c7907c 1805 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1806
1807 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1808 and we don't want to load it into a register as an optimization,
1809 because prepare_call_address already did it if it should be done. */
1810 if (GET_CODE (function) != SYMBOL_REF)
1811 function = memory_address (FUNCTION_MODE, function);
1812
1813 /* Generate the actual call instruction and save the return value. */
1d99ab0a 1814 if (targetm.have_untyped_call ())
1815 {
1816 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1817 emit_call_insn (targetm.gen_untyped_call (mem, result,
1818 result_vector (1, result)));
1819 }
7f265a08 1820 else if (targetm.have_call_value ())
53800dbe 1821 {
1822 rtx valreg = 0;
1823
1824 /* Locate the unique return register. It is not possible to
1825 express a call that sets more than one return register using
1826 call_value; use untyped_call for that. In fact, untyped_call
1827 only needs to save the return registers in the given block. */
1828 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1829 if ((mode = apply_result_mode[regno]) != VOIDmode)
1830 {
7f265a08 1831 gcc_assert (!valreg); /* have_untyped_call required. */
7d3f6cc7 1832
53800dbe 1833 valreg = gen_rtx_REG (mode, regno);
1834 }
1835
7f265a08 1836 emit_insn (targetm.gen_call_value (valreg,
1837 gen_rtx_MEM (FUNCTION_MODE, function),
1838 const0_rtx, NULL_RTX, const0_rtx));
53800dbe 1839
e513d163 1840 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1841 }
1842 else
64db345d 1843 gcc_unreachable ();
53800dbe 1844
d5f9786f 1845 /* Find the CALL insn we just emitted, and attach the register usage
1846 information. */
1847 call_insn = last_call_insn ();
1848 add_function_usage_to (call_insn, call_fusage);
53800dbe 1849
1850 /* Restore the stack. */
71512c05 1851 if (targetm.have_save_stack_nonlocal ())
e9c97615 1852 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1853 else
e9c97615 1854 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1855 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1856
04a46d40 1857 OK_DEFER_POP;
1858
53800dbe 1859 /* Return the address of the result block. */
85d654dd 1860 result = copy_addr_to_reg (XEXP (result, 0));
1861 return convert_memory_address (ptr_mode, result);
53800dbe 1862}
1863
1864/* Perform an untyped return. */
1865
1866static void
aecda0d6 1867expand_builtin_return (rtx result)
53800dbe 1868{
1869 int size, align, regno;
d8ba6ec1 1870 fixed_size_mode mode;
53800dbe 1871 rtx reg;
57c26b3a 1872 rtx_insn *call_fusage = 0;
53800dbe 1873
85d654dd 1874 result = convert_memory_address (Pmode, result);
726ec87c 1875
53800dbe 1876 apply_result_size ();
1877 result = gen_rtx_MEM (BLKmode, result);
1878
1d99ab0a 1879 if (targetm.have_untyped_return ())
53800dbe 1880 {
1d99ab0a 1881 rtx vector = result_vector (0, result);
1882 emit_jump_insn (targetm.gen_untyped_return (result, vector));
53800dbe 1883 emit_barrier ();
1884 return;
1885 }
53800dbe 1886
1887 /* Restore the return value and note that each value is used. */
1888 size = 0;
1889 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1890 if ((mode = apply_result_mode[regno]) != VOIDmode)
1891 {
1892 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1893 if (size % align != 0)
1894 size = CEIL (size, align) * align;
1895 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1896 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1897
1898 push_to_sequence (call_fusage);
18b42941 1899 emit_use (reg);
53800dbe 1900 call_fusage = get_insns ();
1901 end_sequence ();
1902 size += GET_MODE_SIZE (mode);
1903 }
1904
1905 /* Put the USE insns before the return. */
31d3e01c 1906 emit_insn (call_fusage);
53800dbe 1907
1908 /* Return whatever values was restored by jumping directly to the end
1909 of the function. */
62380d2d 1910 expand_naked_return ();
53800dbe 1911}
1912
539a3a92 1913/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1914
539a3a92 1915static enum type_class
aecda0d6 1916type_to_class (tree type)
539a3a92 1917{
1918 switch (TREE_CODE (type))
1919 {
1920 case VOID_TYPE: return void_type_class;
1921 case INTEGER_TYPE: return integer_type_class;
539a3a92 1922 case ENUMERAL_TYPE: return enumeral_type_class;
1923 case BOOLEAN_TYPE: return boolean_type_class;
1924 case POINTER_TYPE: return pointer_type_class;
1925 case REFERENCE_TYPE: return reference_type_class;
1926 case OFFSET_TYPE: return offset_type_class;
1927 case REAL_TYPE: return real_type_class;
1928 case COMPLEX_TYPE: return complex_type_class;
1929 case FUNCTION_TYPE: return function_type_class;
1930 case METHOD_TYPE: return method_type_class;
1931 case RECORD_TYPE: return record_type_class;
1932 case UNION_TYPE:
1933 case QUAL_UNION_TYPE: return union_type_class;
1934 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1935 ? string_type_class : array_type_class);
539a3a92 1936 case LANG_TYPE: return lang_type_class;
1937 default: return no_type_class;
1938 }
1939}
bf8e3599 1940
c2f47e15 1941/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1942
53800dbe 1943static rtx
c2f47e15 1944expand_builtin_classify_type (tree exp)
53800dbe 1945{
c2f47e15 1946 if (call_expr_nargs (exp))
1947 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1948 return GEN_INT (no_type_class);
1949}
1950
8c32188e 1951/* This helper macro, meant to be used in mathfn_built_in below, determines
1952 which among a set of builtin math functions is appropriate for a given type
1953 mode. The `F' (float) and `L' (long double) are automatically generated
1954 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1955 types, there are additional types that are considered with 'F32', 'F64',
1956 'F128', etc. suffixes. */
e3240774 1957#define CASE_MATHFN(MATHFN) \
1958 CASE_CFN_##MATHFN: \
1959 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1960 fcodel = BUILT_IN_##MATHFN##L ; break;
8c32188e 1961/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1962 types. */
1963#define CASE_MATHFN_FLOATN(MATHFN) \
1964 CASE_CFN_##MATHFN: \
1965 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1966 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1967 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1968 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1969 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1970 break;
cd2656b0 1971/* Similar to above, but appends _R after any F/L suffix. */
e3240774 1972#define CASE_MATHFN_REENT(MATHFN) \
1973 case CFN_BUILT_IN_##MATHFN##_R: \
1974 case CFN_BUILT_IN_##MATHFN##F_R: \
1975 case CFN_BUILT_IN_##MATHFN##L_R: \
1976 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1977 fcodel = BUILT_IN_##MATHFN##L_R ; break;
07976da7 1978
6c21be92 1979/* Return a function equivalent to FN but operating on floating-point
1980 values of type TYPE, or END_BUILTINS if no such function exists.
e3240774 1981 This is purely an operation on function codes; it does not guarantee
1982 that the target actually has an implementation of the function. */
c319d56a 1983
6c21be92 1984static built_in_function
e3240774 1985mathfn_built_in_2 (tree type, combined_fn fn)
0a68165a 1986{
8c32188e 1987 tree mtype;
6c21be92 1988 built_in_function fcode, fcodef, fcodel;
8c32188e 1989 built_in_function fcodef16 = END_BUILTINS;
1990 built_in_function fcodef32 = END_BUILTINS;
1991 built_in_function fcodef64 = END_BUILTINS;
1992 built_in_function fcodef128 = END_BUILTINS;
1993 built_in_function fcodef32x = END_BUILTINS;
1994 built_in_function fcodef64x = END_BUILTINS;
1995 built_in_function fcodef128x = END_BUILTINS;
07976da7 1996
1997 switch (fn)
1998 {
e3240774 1999 CASE_MATHFN (ACOS)
2000 CASE_MATHFN (ACOSH)
2001 CASE_MATHFN (ASIN)
2002 CASE_MATHFN (ASINH)
2003 CASE_MATHFN (ATAN)
2004 CASE_MATHFN (ATAN2)
2005 CASE_MATHFN (ATANH)
2006 CASE_MATHFN (CBRT)
054e9558 2007 CASE_MATHFN_FLOATN (CEIL)
e3240774 2008 CASE_MATHFN (CEXPI)
8c32188e 2009 CASE_MATHFN_FLOATN (COPYSIGN)
e3240774 2010 CASE_MATHFN (COS)
2011 CASE_MATHFN (COSH)
2012 CASE_MATHFN (DREM)
2013 CASE_MATHFN (ERF)
2014 CASE_MATHFN (ERFC)
2015 CASE_MATHFN (EXP)
2016 CASE_MATHFN (EXP10)
2017 CASE_MATHFN (EXP2)
2018 CASE_MATHFN (EXPM1)
2019 CASE_MATHFN (FABS)
2020 CASE_MATHFN (FDIM)
054e9558 2021 CASE_MATHFN_FLOATN (FLOOR)
8c32188e 2022 CASE_MATHFN_FLOATN (FMA)
2023 CASE_MATHFN_FLOATN (FMAX)
2024 CASE_MATHFN_FLOATN (FMIN)
e3240774 2025 CASE_MATHFN (FMOD)
2026 CASE_MATHFN (FREXP)
2027 CASE_MATHFN (GAMMA)
2028 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2029 CASE_MATHFN (HUGE_VAL)
2030 CASE_MATHFN (HYPOT)
2031 CASE_MATHFN (ILOGB)
2032 CASE_MATHFN (ICEIL)
2033 CASE_MATHFN (IFLOOR)
2034 CASE_MATHFN (INF)
2035 CASE_MATHFN (IRINT)
2036 CASE_MATHFN (IROUND)
2037 CASE_MATHFN (ISINF)
2038 CASE_MATHFN (J0)
2039 CASE_MATHFN (J1)
2040 CASE_MATHFN (JN)
2041 CASE_MATHFN (LCEIL)
2042 CASE_MATHFN (LDEXP)
2043 CASE_MATHFN (LFLOOR)
2044 CASE_MATHFN (LGAMMA)
2045 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2046 CASE_MATHFN (LLCEIL)
2047 CASE_MATHFN (LLFLOOR)
2048 CASE_MATHFN (LLRINT)
2049 CASE_MATHFN (LLROUND)
2050 CASE_MATHFN (LOG)
2051 CASE_MATHFN (LOG10)
2052 CASE_MATHFN (LOG1P)
2053 CASE_MATHFN (LOG2)
2054 CASE_MATHFN (LOGB)
2055 CASE_MATHFN (LRINT)
2056 CASE_MATHFN (LROUND)
2057 CASE_MATHFN (MODF)
2058 CASE_MATHFN (NAN)
2059 CASE_MATHFN (NANS)
054e9558 2060 CASE_MATHFN_FLOATN (NEARBYINT)
e3240774 2061 CASE_MATHFN (NEXTAFTER)
2062 CASE_MATHFN (NEXTTOWARD)
2063 CASE_MATHFN (POW)
2064 CASE_MATHFN (POWI)
2065 CASE_MATHFN (POW10)
2066 CASE_MATHFN (REMAINDER)
2067 CASE_MATHFN (REMQUO)
054e9558 2068 CASE_MATHFN_FLOATN (RINT)
2069 CASE_MATHFN_FLOATN (ROUND)
e3240774 2070 CASE_MATHFN (SCALB)
2071 CASE_MATHFN (SCALBLN)
2072 CASE_MATHFN (SCALBN)
2073 CASE_MATHFN (SIGNBIT)
2074 CASE_MATHFN (SIGNIFICAND)
2075 CASE_MATHFN (SIN)
2076 CASE_MATHFN (SINCOS)
2077 CASE_MATHFN (SINH)
8c32188e 2078 CASE_MATHFN_FLOATN (SQRT)
e3240774 2079 CASE_MATHFN (TAN)
2080 CASE_MATHFN (TANH)
2081 CASE_MATHFN (TGAMMA)
054e9558 2082 CASE_MATHFN_FLOATN (TRUNC)
e3240774 2083 CASE_MATHFN (Y0)
2084 CASE_MATHFN (Y1)
2085 CASE_MATHFN (YN)
07976da7 2086
e3240774 2087 default:
2088 return END_BUILTINS;
2089 }
07976da7 2090
8c32188e 2091 mtype = TYPE_MAIN_VARIANT (type);
2092 if (mtype == double_type_node)
6c21be92 2093 return fcode;
8c32188e 2094 else if (mtype == float_type_node)
6c21be92 2095 return fcodef;
8c32188e 2096 else if (mtype == long_double_type_node)
6c21be92 2097 return fcodel;
8c32188e 2098 else if (mtype == float16_type_node)
2099 return fcodef16;
2100 else if (mtype == float32_type_node)
2101 return fcodef32;
2102 else if (mtype == float64_type_node)
2103 return fcodef64;
2104 else if (mtype == float128_type_node)
2105 return fcodef128;
2106 else if (mtype == float32x_type_node)
2107 return fcodef32x;
2108 else if (mtype == float64x_type_node)
2109 return fcodef64x;
2110 else if (mtype == float128x_type_node)
2111 return fcodef128x;
07976da7 2112 else
6c21be92 2113 return END_BUILTINS;
2114}
2115
2116/* Return mathematic function equivalent to FN but operating directly on TYPE,
2117 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2118 otherwise use the explicit declaration. If we can't do the conversion,
2119 return null. */
2120
2121static tree
e3240774 2122mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
6c21be92 2123{
2124 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2125 if (fcode2 == END_BUILTINS)
c2f47e15 2126 return NULL_TREE;
b9a16870 2127
2128 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2129 return NULL_TREE;
2130
2131 return builtin_decl_explicit (fcode2);
0a68165a 2132}
2133
e3240774 2134/* Like mathfn_built_in_1, but always use the implicit array. */
c319d56a 2135
2136tree
e3240774 2137mathfn_built_in (tree type, combined_fn fn)
c319d56a 2138{
2139 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2140}
2141
e3240774 2142/* Like mathfn_built_in_1, but take a built_in_function and
2143 always use the implicit array. */
2144
2145tree
2146mathfn_built_in (tree type, enum built_in_function fn)
2147{
2148 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2149}
2150
1f24b8e9 2151/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2152 return its code, otherwise return IFN_LAST. Note that this function
2153 only tests whether the function is defined in internals.def, not whether
2154 it is actually available on the target. */
2155
2156internal_fn
2157associated_internal_fn (tree fndecl)
2158{
2159 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2160 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2161 switch (DECL_FUNCTION_CODE (fndecl))
2162 {
2163#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2164 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
8c32188e 2165#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2166 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2167 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
c9452b7c 2168#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2169 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1f24b8e9 2170#include "internal-fn.def"
2171
2172 CASE_FLT_FN (BUILT_IN_POW10):
2173 return IFN_EXP10;
2174
2175 CASE_FLT_FN (BUILT_IN_DREM):
2176 return IFN_REMAINDER;
2177
2178 CASE_FLT_FN (BUILT_IN_SCALBN):
2179 CASE_FLT_FN (BUILT_IN_SCALBLN):
2180 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2181 return IFN_LDEXP;
2182 return IFN_LAST;
2183
2184 default:
2185 return IFN_LAST;
2186 }
2187}
2188
2189/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2190 on the current target by a call to an internal function, return the
2191 code of that internal function, otherwise return IFN_LAST. The caller
2192 is responsible for ensuring that any side-effects of the built-in
2193 call are dealt with correctly. E.g. if CALL sets errno, the caller
2194 must decide that the errno result isn't needed or make it available
2195 in some other way. */
2196
2197internal_fn
2198replacement_internal_fn (gcall *call)
2199{
2200 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2201 {
2202 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2203 if (ifn != IFN_LAST)
2204 {
2205 tree_pair types = direct_internal_fn_types (ifn, call);
acdfe9e0 2206 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2207 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1f24b8e9 2208 return ifn;
2209 }
2210 }
2211 return IFN_LAST;
2212}
2213
7e0713b1 2214/* Expand a call to the builtin trinary math functions (fma).
2215 Return NULL_RTX if a normal call should be emitted rather than expanding the
2216 function in-line. EXP is the expression that is a call to the builtin
2217 function; if convenient, the result should be placed in TARGET.
2218 SUBTARGET may be used as the target for computing one of EXP's
2219 operands. */
2220
2221static rtx
2222expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2223{
2224 optab builtin_optab;
1e0c0b35 2225 rtx op0, op1, op2, result;
2226 rtx_insn *insns;
7e0713b1 2227 tree fndecl = get_callee_fndecl (exp);
2228 tree arg0, arg1, arg2;
3754d046 2229 machine_mode mode;
7e0713b1 2230
2231 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2232 return NULL_RTX;
2233
2234 arg0 = CALL_EXPR_ARG (exp, 0);
2235 arg1 = CALL_EXPR_ARG (exp, 1);
2236 arg2 = CALL_EXPR_ARG (exp, 2);
2237
2238 switch (DECL_FUNCTION_CODE (fndecl))
2239 {
2240 CASE_FLT_FN (BUILT_IN_FMA):
8c32188e 2241 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7e0713b1 2242 builtin_optab = fma_optab; break;
2243 default:
2244 gcc_unreachable ();
2245 }
2246
2247 /* Make a suitable register to place result in. */
2248 mode = TYPE_MODE (TREE_TYPE (exp));
2249
2250 /* Before working hard, check whether the instruction is available. */
2251 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2252 return NULL_RTX;
2253
de2e453e 2254 result = gen_reg_rtx (mode);
7e0713b1 2255
2256 /* Always stabilize the argument list. */
2257 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2258 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2259 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2260
2261 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2262 op1 = expand_normal (arg1);
2263 op2 = expand_normal (arg2);
2264
2265 start_sequence ();
2266
de2e453e 2267 /* Compute into RESULT.
2268 Set RESULT to wherever the result comes back. */
2269 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2270 result, 0);
7e0713b1 2271
2272 /* If we were unable to expand via the builtin, stop the sequence
2273 (without outputting the insns) and call to the library function
2274 with the stabilized argument list. */
de2e453e 2275 if (result == 0)
7e0713b1 2276 {
2277 end_sequence ();
2278 return expand_call (exp, target, target == const0_rtx);
2279 }
2280
2281 /* Output the entire sequence. */
2282 insns = get_insns ();
2283 end_sequence ();
2284 emit_insn (insns);
2285
de2e453e 2286 return result;
7e0713b1 2287}
2288
6b43bae4 2289/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2290 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2291 function in-line. EXP is the expression that is a call to the builtin
2292 function; if convenient, the result should be placed in TARGET.
2293 SUBTARGET may be used as the target for computing one of EXP's
2294 operands. */
2295
2296static rtx
2297expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2298{
2299 optab builtin_optab;
1e0c0b35 2300 rtx op0;
2301 rtx_insn *insns;
6b43bae4 2302 tree fndecl = get_callee_fndecl (exp);
3754d046 2303 machine_mode mode;
abfea505 2304 tree arg;
6b43bae4 2305
c2f47e15 2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 return NULL_RTX;
6b43bae4 2308
c2f47e15 2309 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2310
2311 switch (DECL_FUNCTION_CODE (fndecl))
2312 {
4f35b1fc 2313 CASE_FLT_FN (BUILT_IN_SIN):
2314 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2315 builtin_optab = sincos_optab; break;
2316 default:
64db345d 2317 gcc_unreachable ();
6b43bae4 2318 }
2319
2320 /* Make a suitable register to place result in. */
2321 mode = TYPE_MODE (TREE_TYPE (exp));
2322
6b43bae4 2323 /* Check if sincos insn is available, otherwise fallback
0bed3869 2324 to sin or cos insn. */
d6bf3b14 2325 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2326 switch (DECL_FUNCTION_CODE (fndecl))
2327 {
4f35b1fc 2328 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2329 builtin_optab = sin_optab; break;
4f35b1fc 2330 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2331 builtin_optab = cos_optab; break;
2332 default:
64db345d 2333 gcc_unreachable ();
6b43bae4 2334 }
6b43bae4 2335
2336 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2337 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2338 {
de2e453e 2339 rtx result = gen_reg_rtx (mode);
6b43bae4 2340
2341 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2342 need to expand the argument again. This way, we will not perform
2343 side-effects more the once. */
abfea505 2344 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2345
1db6d067 2346 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2347
6b43bae4 2348 start_sequence ();
2349
de2e453e 2350 /* Compute into RESULT.
2351 Set RESULT to wherever the result comes back. */
6b43bae4 2352 if (builtin_optab == sincos_optab)
2353 {
de2e453e 2354 int ok;
7d3f6cc7 2355
6b43bae4 2356 switch (DECL_FUNCTION_CODE (fndecl))
2357 {
4f35b1fc 2358 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2359 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2360 break;
4f35b1fc 2361 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2362 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2363 break;
2364 default:
64db345d 2365 gcc_unreachable ();
6b43bae4 2366 }
de2e453e 2367 gcc_assert (ok);
6b43bae4 2368 }
2369 else
de2e453e 2370 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2371
de2e453e 2372 if (result != 0)
6b43bae4 2373 {
6b43bae4 2374 /* Output the entire sequence. */
2375 insns = get_insns ();
2376 end_sequence ();
2377 emit_insn (insns);
de2e453e 2378 return result;
6b43bae4 2379 }
2380
2381 /* If we were unable to expand via the builtin, stop the sequence
2382 (without outputting the insns) and call to the library function
2383 with the stabilized argument list. */
2384 end_sequence ();
2385 }
2386
de2e453e 2387 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2388}
2389
a65c4d64 2390/* Given an interclass math builtin decl FNDECL and it's argument ARG
2391 return an RTL instruction code that implements the functionality.
2392 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2393
a65c4d64 2394static enum insn_code
2395interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2396{
a65c4d64 2397 bool errno_set = false;
6cdd383a 2398 optab builtin_optab = unknown_optab;
3754d046 2399 machine_mode mode;
a67a90e5 2400
2401 switch (DECL_FUNCTION_CODE (fndecl))
2402 {
2403 CASE_FLT_FN (BUILT_IN_ILOGB):
12f08300 2404 errno_set = true; builtin_optab = ilogb_optab; break;
2405 CASE_FLT_FN (BUILT_IN_ISINF):
2406 builtin_optab = isinf_optab; break;
2407 case BUILT_IN_ISNORMAL:
2408 case BUILT_IN_ISFINITE:
2409 CASE_FLT_FN (BUILT_IN_FINITE):
2410 case BUILT_IN_FINITED32:
2411 case BUILT_IN_FINITED64:
2412 case BUILT_IN_FINITED128:
2413 case BUILT_IN_ISINFD32:
2414 case BUILT_IN_ISINFD64:
2415 case BUILT_IN_ISINFD128:
2416 /* These builtins have no optabs (yet). */
cde061c1 2417 break;
a67a90e5 2418 default:
2419 gcc_unreachable ();
2420 }
2421
2422 /* There's no easy way to detect the case we need to set EDOM. */
2423 if (flag_errno_math && errno_set)
a65c4d64 2424 return CODE_FOR_nothing;
a67a90e5 2425
2426 /* Optab mode depends on the mode of the input argument. */
2427 mode = TYPE_MODE (TREE_TYPE (arg));
2428
cde061c1 2429 if (builtin_optab)
d6bf3b14 2430 return optab_handler (builtin_optab, mode);
a65c4d64 2431 return CODE_FOR_nothing;
2432}
2433
2434/* Expand a call to one of the builtin math functions that operate on
12f08300 2435 floating point argument and output an integer result (ilogb, isinf,
2436 isnan, etc).
a65c4d64 2437 Return 0 if a normal call should be emitted rather than expanding the
2438 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2439 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2440
2441static rtx
f97eea22 2442expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2443{
2444 enum insn_code icode = CODE_FOR_nothing;
2445 rtx op0;
2446 tree fndecl = get_callee_fndecl (exp);
3754d046 2447 machine_mode mode;
a65c4d64 2448 tree arg;
2449
2450 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2451 return NULL_RTX;
2452
2453 arg = CALL_EXPR_ARG (exp, 0);
2454 icode = interclass_mathfn_icode (arg, fndecl);
2455 mode = TYPE_MODE (TREE_TYPE (arg));
2456
a67a90e5 2457 if (icode != CODE_FOR_nothing)
2458 {
8786db1e 2459 struct expand_operand ops[1];
1e0c0b35 2460 rtx_insn *last = get_last_insn ();
4e2a2fb4 2461 tree orig_arg = arg;
a67a90e5 2462
2463 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2464 need to expand the argument again. This way, we will not perform
2465 side-effects more the once. */
abfea505 2466 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2467
f97eea22 2468 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2469
2470 if (mode != GET_MODE (op0))
2471 op0 = convert_to_mode (mode, op0, 0);
2472
8786db1e 2473 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2474 if (maybe_legitimize_operands (icode, 0, 1, ops)
2475 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2476 return ops[0].value;
2477
4e2a2fb4 2478 delete_insns_since (last);
2479 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2480 }
2481
a65c4d64 2482 return NULL_RTX;
a67a90e5 2483}
2484
c3147c1a 2485/* Expand a call to the builtin sincos math function.
c2f47e15 2486 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2487 function in-line. EXP is the expression that is a call to the builtin
2488 function. */
2489
2490static rtx
2491expand_builtin_sincos (tree exp)
2492{
2493 rtx op0, op1, op2, target1, target2;
3754d046 2494 machine_mode mode;
c3147c1a 2495 tree arg, sinp, cosp;
2496 int result;
389dd41b 2497 location_t loc = EXPR_LOCATION (exp);
be5575b2 2498 tree alias_type, alias_off;
c3147c1a 2499
c2f47e15 2500 if (!validate_arglist (exp, REAL_TYPE,
2501 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2502 return NULL_RTX;
c3147c1a 2503
c2f47e15 2504 arg = CALL_EXPR_ARG (exp, 0);
2505 sinp = CALL_EXPR_ARG (exp, 1);
2506 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2507
2508 /* Make a suitable register to place result in. */
2509 mode = TYPE_MODE (TREE_TYPE (arg));
2510
2511 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2512 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2513 return NULL_RTX;
2514
2515 target1 = gen_reg_rtx (mode);
2516 target2 = gen_reg_rtx (mode);
2517
8ec3c5c2 2518 op0 = expand_normal (arg);
be5575b2 2519 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2520 alias_off = build_int_cst (alias_type, 0);
2521 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2522 sinp, alias_off));
2523 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2524 cosp, alias_off));
c3147c1a 2525
2526 /* Compute into target1 and target2.
2527 Set TARGET to wherever the result comes back. */
2528 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2529 gcc_assert (result);
2530
2531 /* Move target1 and target2 to the memory locations indicated
2532 by op1 and op2. */
2533 emit_move_insn (op1, target1);
2534 emit_move_insn (op2, target2);
2535
2536 return const0_rtx;
2537}
2538
d735c391 2539/* Expand a call to the internal cexpi builtin to the sincos math function.
2540 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2541 the result should be placed in TARGET. */
d735c391 2542
2543static rtx
f97eea22 2544expand_builtin_cexpi (tree exp, rtx target)
d735c391 2545{
2546 tree fndecl = get_callee_fndecl (exp);
d735c391 2547 tree arg, type;
3754d046 2548 machine_mode mode;
d735c391 2549 rtx op0, op1, op2;
389dd41b 2550 location_t loc = EXPR_LOCATION (exp);
d735c391 2551
c2f47e15 2552 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2553 return NULL_RTX;
d735c391 2554
c2f47e15 2555 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2556 type = TREE_TYPE (arg);
2557 mode = TYPE_MODE (TREE_TYPE (arg));
2558
2559 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2560 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2561 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2562 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2563 {
2564 op1 = gen_reg_rtx (mode);
2565 op2 = gen_reg_rtx (mode);
2566
f97eea22 2567 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2568
2569 /* Compute into op1 and op2. */
2570 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2571 }
30f690e0 2572 else if (targetm.libc_has_function (function_sincos))
d735c391 2573 {
c2f47e15 2574 tree call, fn = NULL_TREE;
d735c391 2575 tree top1, top2;
2576 rtx op1a, op2a;
2577
2578 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2579 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2580 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2581 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2582 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2583 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2584 else
2585 gcc_unreachable ();
48e1416a 2586
0ab48139 2587 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2588 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2589 op1a = copy_addr_to_reg (XEXP (op1, 0));
2590 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2591 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2592 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2593
d735c391 2594 /* Make sure not to fold the sincos call again. */
2595 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2596 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2597 call, 3, arg, top1, top2));
d735c391 2598 }
18b8d8ae 2599 else
2600 {
0ecbc158 2601 tree call, fn = NULL_TREE, narg;
18b8d8ae 2602 tree ctype = build_complex_type (type);
2603
0ecbc158 2604 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2605 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2606 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2607 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2608 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2609 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2610 else
2611 gcc_unreachable ();
fc0dfa6e 2612
2613 /* If we don't have a decl for cexp create one. This is the
2614 friendliest fallback if the user calls __builtin_cexpi
2615 without full target C99 function support. */
2616 if (fn == NULL_TREE)
2617 {
2618 tree fntype;
2619 const char *name = NULL;
2620
2621 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2622 name = "cexpf";
2623 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2624 name = "cexp";
2625 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2626 name = "cexpl";
2627
2628 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2629 fn = build_fn_decl (name, fntype);
2630 }
2631
389dd41b 2632 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2633 build_real (type, dconst0), arg);
2634
2635 /* Make sure not to fold the cexp call again. */
2636 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2637 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2638 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2639 }
d735c391 2640
2641 /* Now build the proper return type. */
2642 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2643 make_tree (TREE_TYPE (arg), op2),
2644 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2645 target, VOIDmode, EXPAND_NORMAL);
d735c391 2646}
2647
a65c4d64 2648/* Conveniently construct a function call expression. FNDECL names the
2649 function to be called, N is the number of arguments, and the "..."
2650 parameters are the argument expressions. Unlike build_call_exr
2651 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2652
2653static tree
2654build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2655{
2656 va_list ap;
2657 tree fntype = TREE_TYPE (fndecl);
2658 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2659
2660 va_start (ap, n);
2661 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2662 va_end (ap);
2663 SET_EXPR_LOCATION (fn, loc);
2664 return fn;
2665}
a65c4d64 2666
7d3afc77 2667/* Expand a call to one of the builtin rounding functions gcc defines
2668 as an extension (lfloor and lceil). As these are gcc extensions we
2669 do not need to worry about setting errno to EDOM.
ad52b9b7 2670 If expanding via optab fails, lower expression to (int)(floor(x)).
2671 EXP is the expression that is a call to the builtin function;
ff1b14e4 2672 if convenient, the result should be placed in TARGET. */
ad52b9b7 2673
2674static rtx
ff1b14e4 2675expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2676{
9c42dd28 2677 convert_optab builtin_optab;
1e0c0b35 2678 rtx op0, tmp;
2679 rtx_insn *insns;
ad52b9b7 2680 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2681 enum built_in_function fallback_fn;
2682 tree fallback_fndecl;
3754d046 2683 machine_mode mode;
4de0924f 2684 tree arg;
ad52b9b7 2685
c2f47e15 2686 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2687 gcc_unreachable ();
2688
c2f47e15 2689 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2690
2691 switch (DECL_FUNCTION_CODE (fndecl))
2692 {
80ff6494 2693 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2694 CASE_FLT_FN (BUILT_IN_LCEIL):
2695 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2696 builtin_optab = lceil_optab;
2697 fallback_fn = BUILT_IN_CEIL;
2698 break;
2699
80ff6494 2700 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2701 CASE_FLT_FN (BUILT_IN_LFLOOR):
2702 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2703 builtin_optab = lfloor_optab;
2704 fallback_fn = BUILT_IN_FLOOR;
2705 break;
2706
2707 default:
2708 gcc_unreachable ();
2709 }
2710
2711 /* Make a suitable register to place result in. */
2712 mode = TYPE_MODE (TREE_TYPE (exp));
2713
9c42dd28 2714 target = gen_reg_rtx (mode);
ad52b9b7 2715
9c42dd28 2716 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2717 need to expand the argument again. This way, we will not perform
2718 side-effects more the once. */
abfea505 2719 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2720
ff1b14e4 2721 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2722
9c42dd28 2723 start_sequence ();
ad52b9b7 2724
9c42dd28 2725 /* Compute into TARGET. */
2726 if (expand_sfix_optab (target, op0, builtin_optab))
2727 {
2728 /* Output the entire sequence. */
2729 insns = get_insns ();
ad52b9b7 2730 end_sequence ();
9c42dd28 2731 emit_insn (insns);
2732 return target;
ad52b9b7 2733 }
2734
9c42dd28 2735 /* If we were unable to expand via the builtin, stop the sequence
2736 (without outputting the insns). */
2737 end_sequence ();
2738
ad52b9b7 2739 /* Fall back to floating point rounding optab. */
2740 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2741
2742 /* For non-C99 targets we may end up without a fallback fndecl here
2743 if the user called __builtin_lfloor directly. In this case emit
2744 a call to the floor/ceil variants nevertheless. This should result
2745 in the best user experience for not full C99 targets. */
2746 if (fallback_fndecl == NULL_TREE)
2747 {
2748 tree fntype;
2749 const char *name = NULL;
2750
2751 switch (DECL_FUNCTION_CODE (fndecl))
2752 {
80ff6494 2753 case BUILT_IN_ICEIL:
fc0dfa6e 2754 case BUILT_IN_LCEIL:
2755 case BUILT_IN_LLCEIL:
2756 name = "ceil";
2757 break;
80ff6494 2758 case BUILT_IN_ICEILF:
fc0dfa6e 2759 case BUILT_IN_LCEILF:
2760 case BUILT_IN_LLCEILF:
2761 name = "ceilf";
2762 break;
80ff6494 2763 case BUILT_IN_ICEILL:
fc0dfa6e 2764 case BUILT_IN_LCEILL:
2765 case BUILT_IN_LLCEILL:
2766 name = "ceill";
2767 break;
80ff6494 2768 case BUILT_IN_IFLOOR:
fc0dfa6e 2769 case BUILT_IN_LFLOOR:
2770 case BUILT_IN_LLFLOOR:
2771 name = "floor";
2772 break;
80ff6494 2773 case BUILT_IN_IFLOORF:
fc0dfa6e 2774 case BUILT_IN_LFLOORF:
2775 case BUILT_IN_LLFLOORF:
2776 name = "floorf";
2777 break;
80ff6494 2778 case BUILT_IN_IFLOORL:
fc0dfa6e 2779 case BUILT_IN_LFLOORL:
2780 case BUILT_IN_LLFLOORL:
2781 name = "floorl";
2782 break;
2783 default:
2784 gcc_unreachable ();
2785 }
2786
2787 fntype = build_function_type_list (TREE_TYPE (arg),
2788 TREE_TYPE (arg), NULL_TREE);
2789 fallback_fndecl = build_fn_decl (name, fntype);
2790 }
2791
0568e9c1 2792 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2793
d4c690af 2794 tmp = expand_normal (exp);
933eb13a 2795 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2796
2797 /* Truncate the result of floating point optab to integer
2798 via expand_fix (). */
2799 target = gen_reg_rtx (mode);
2800 expand_fix (target, tmp, 0);
2801
2802 return target;
2803}
2804
7d3afc77 2805/* Expand a call to one of the builtin math functions doing integer
2806 conversion (lrint).
2807 Return 0 if a normal call should be emitted rather than expanding the
2808 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2809 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2810
2811static rtx
ff1b14e4 2812expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2813{
5f51ee59 2814 convert_optab builtin_optab;
1e0c0b35 2815 rtx op0;
2816 rtx_insn *insns;
7d3afc77 2817 tree fndecl = get_callee_fndecl (exp);
4de0924f 2818 tree arg;
3754d046 2819 machine_mode mode;
e951f9a4 2820 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2821
c2f47e15 2822 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2823 gcc_unreachable ();
48e1416a 2824
c2f47e15 2825 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2826
2827 switch (DECL_FUNCTION_CODE (fndecl))
2828 {
80ff6494 2829 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2830 fallback_fn = BUILT_IN_LRINT;
3c77f69c 2831 gcc_fallthrough ();
7d3afc77 2832 CASE_FLT_FN (BUILT_IN_LRINT):
2833 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2834 builtin_optab = lrint_optab;
2835 break;
80ff6494 2836
2837 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2838 fallback_fn = BUILT_IN_LROUND;
3c77f69c 2839 gcc_fallthrough ();
ef2f1a10 2840 CASE_FLT_FN (BUILT_IN_LROUND):
2841 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2842 builtin_optab = lround_optab;
2843 break;
80ff6494 2844
7d3afc77 2845 default:
2846 gcc_unreachable ();
2847 }
2848
e951f9a4 2849 /* There's no easy way to detect the case we need to set EDOM. */
2850 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2851 return NULL_RTX;
2852
7d3afc77 2853 /* Make a suitable register to place result in. */
2854 mode = TYPE_MODE (TREE_TYPE (exp));
2855
e951f9a4 2856 /* There's no easy way to detect the case we need to set EDOM. */
2857 if (!flag_errno_math)
2858 {
de2e453e 2859 rtx result = gen_reg_rtx (mode);
7d3afc77 2860
e951f9a4 2861 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2862 need to expand the argument again. This way, we will not perform
2863 side-effects more the once. */
2864 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2865
e951f9a4 2866 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2867
e951f9a4 2868 start_sequence ();
7d3afc77 2869
de2e453e 2870 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2871 {
2872 /* Output the entire sequence. */
2873 insns = get_insns ();
2874 end_sequence ();
2875 emit_insn (insns);
de2e453e 2876 return result;
e951f9a4 2877 }
2878
2879 /* If we were unable to expand via the builtin, stop the sequence
2880 (without outputting the insns) and call to the library function
2881 with the stabilized argument list. */
7d3afc77 2882 end_sequence ();
2883 }
2884
e951f9a4 2885 if (fallback_fn != BUILT_IN_NONE)
2886 {
2887 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2888 targets, (int) round (x) should never be transformed into
2889 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2890 a call to lround in the hope that the target provides at least some
2891 C99 functions. This should result in the best user experience for
2892 not full C99 targets. */
e3240774 2893 tree fallback_fndecl = mathfn_built_in_1
2894 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
e951f9a4 2895
2896 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2897 fallback_fndecl, 1, arg);
2898
2899 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2900 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2901 return convert_to_mode (mode, target, 0);
2902 }
5f51ee59 2903
de2e453e 2904 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2905}
2906
c2f47e15 2907/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2908 a normal call should be emitted rather than expanding the function
2909 in-line. EXP is the expression that is a call to the builtin
2910 function; if convenient, the result should be placed in TARGET. */
2911
2912static rtx
f97eea22 2913expand_builtin_powi (tree exp, rtx target)
757c219d 2914{
757c219d 2915 tree arg0, arg1;
2916 rtx op0, op1;
3754d046 2917 machine_mode mode;
2918 machine_mode mode2;
757c219d 2919
c2f47e15 2920 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2921 return NULL_RTX;
757c219d 2922
c2f47e15 2923 arg0 = CALL_EXPR_ARG (exp, 0);
2924 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2925 mode = TYPE_MODE (TREE_TYPE (exp));
2926
757c219d 2927 /* Emit a libcall to libgcc. */
2928
c2f47e15 2929 /* Mode of the 2nd argument must match that of an int. */
517be012 2930 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
d0405f40 2931
757c219d 2932 if (target == NULL_RTX)
2933 target = gen_reg_rtx (mode);
2934
f97eea22 2935 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2936 if (GET_MODE (op0) != mode)
2937 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2938 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2939 if (GET_MODE (op1) != mode2)
2940 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2941
f36b9f69 2942 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
9e9e5c15 2943 target, LCT_CONST, mode,
d0405f40 2944 op0, mode, op1, mode2);
757c219d 2945
2946 return target;
2947}
2948
48e1416a 2949/* Expand expression EXP which is a call to the strlen builtin. Return
864bd5de 2950 NULL_RTX if we failed and the caller should emit a normal call, otherwise
aed0bd19 2951 try to get the result in TARGET, if convenient. */
f7c44134 2952
53800dbe 2953static rtx
c2f47e15 2954expand_builtin_strlen (tree exp, rtx target,
3754d046 2955 machine_mode target_mode)
53800dbe 2956{
c2f47e15 2957 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2958 return NULL_RTX;
6248e345 2959
5c5d012b 2960 struct expand_operand ops[4];
2961 rtx pat;
2962 tree len;
2963 tree src = CALL_EXPR_ARG (exp, 0);
2964 rtx src_reg;
2965 rtx_insn *before_strlen;
2966 machine_mode insn_mode;
2967 enum insn_code icode = CODE_FOR_nothing;
2968 unsigned int align;
681fab1e 2969
5c5d012b 2970 /* If the length can be computed at compile-time, return it. */
2971 len = c_strlen (src, 0);
2972 if (len)
2973 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2974
2975 /* If the length can be computed at compile-time and is constant
2976 integer, but there are side-effects in src, evaluate
2977 src for side-effects, then return len.
2978 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2979 can be optimized into: i++; x = 3; */
2980 len = c_strlen (src, 1);
2981 if (len && TREE_CODE (len) == INTEGER_CST)
2982 {
2983 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2984 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2985 }
53800dbe 2986
5c5d012b 2987 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 2988
5c5d012b 2989 /* If SRC is not a pointer type, don't do this operation inline. */
2990 if (align == 0)
2991 return NULL_RTX;
2992
2993 /* Bail out if we can't compute strlen in the right mode. */
2994 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2995 {
2996 icode = optab_handler (strlen_optab, insn_mode);
2997 if (icode != CODE_FOR_nothing)
2998 break;
2999 }
3000 if (insn_mode == VOIDmode)
3001 return NULL_RTX;
53800dbe 3002
5c5d012b 3003 /* Make a place to hold the source address. We will not expand
3004 the actual source until we are sure that the expansion will
3005 not fail -- there are trees that cannot be expanded twice. */
3006 src_reg = gen_reg_rtx (Pmode);
53800dbe 3007
5c5d012b 3008 /* Mark the beginning of the strlen sequence so we can emit the
3009 source operand later. */
3010 before_strlen = get_last_insn ();
53800dbe 3011
5c5d012b 3012 create_output_operand (&ops[0], target, insn_mode);
3013 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3014 create_integer_operand (&ops[2], 0);
3015 create_integer_operand (&ops[3], align);
3016 if (!maybe_expand_insn (icode, 4, ops))
3017 return NULL_RTX;
911c0150 3018
5c5d012b 3019 /* Check to see if the argument was declared attribute nonstring
3020 and if so, issue a warning since at this point it's not known
3021 to be nul-terminated. */
3022 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
0c45740b 3023
5c5d012b 3024 /* Now that we are assured of success, expand the source. */
3025 start_sequence ();
3026 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3027 if (pat != src_reg)
3028 {
499eee58 3029#ifdef POINTERS_EXTEND_UNSIGNED
5c5d012b 3030 if (GET_MODE (pat) != Pmode)
3031 pat = convert_to_mode (Pmode, pat,
3032 POINTERS_EXTEND_UNSIGNED);
499eee58 3033#endif
5c5d012b 3034 emit_move_insn (src_reg, pat);
3035 }
3036 pat = get_insns ();
3037 end_sequence ();
bceb0d1f 3038
5c5d012b 3039 if (before_strlen)
3040 emit_insn_after (pat, before_strlen);
3041 else
3042 emit_insn_before (pat, get_insns ());
53800dbe 3043
5c5d012b 3044 /* Return the value in the proper mode for this function. */
3045 if (GET_MODE (ops[0].value) == target_mode)
3046 target = ops[0].value;
3047 else if (target != 0)
3048 convert_move (target, ops[0].value, 0);
3049 else
3050 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 3051
5c5d012b 3052 return target;
53800dbe 3053}
3054
864bd5de 3055/* Expand call EXP to the strnlen built-in, returning the result
3056 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3057
3058static rtx
3059expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3060{
3061 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3062 return NULL_RTX;
3063
3064 tree src = CALL_EXPR_ARG (exp, 0);
3065 tree bound = CALL_EXPR_ARG (exp, 1);
3066
3067 if (!bound)
3068 return NULL_RTX;
3069
3070 location_t loc = UNKNOWN_LOCATION;
3071 if (EXPR_HAS_LOCATION (exp))
3072 loc = EXPR_LOCATION (exp);
3073
3074 tree maxobjsize = max_object_size ();
3075 tree func = get_callee_fndecl (exp);
3076
55769ed6 3077 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3078 so these conversions aren't necessary. */
fec27bf2 3079 c_strlen_data data;
3080 memset (&data, 0, sizeof (c_strlen_data));
3081 tree len = c_strlen (src, 0, &data, 1);
55769ed6 3082 if (len)
3083 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
864bd5de 3084
3085 if (TREE_CODE (bound) == INTEGER_CST)
3086 {
3087 if (!TREE_NO_WARNING (exp)
3088 && tree_int_cst_lt (maxobjsize, bound)
3089 && warning_at (loc, OPT_Wstringop_overflow_,
3090 "%K%qD specified bound %E "
3091 "exceeds maximum object size %E",
3092 exp, func, bound, maxobjsize))
3093 TREE_NO_WARNING (exp) = true;
3094
fec27bf2 3095 bool exact = true;
864bd5de 3096 if (!len || TREE_CODE (len) != INTEGER_CST)
fec27bf2 3097 {
3098 /* Clear EXACT if LEN may be less than SRC suggests,
3099 such as in
3100 strnlen (&a[i], sizeof a)
3101 where the value of i is unknown. Unless i's value is
3102 zero, the call is unsafe because the bound is greater. */
3103 data.decl = unterminated_array (src, &len, &exact);
3104 if (!data.decl)
3105 return NULL_RTX;
3106 }
3107
3108 if (data.decl
3109 && !TREE_NO_WARNING (exp)
3110 && ((tree_int_cst_lt (len, bound))
3111 || !exact))
3112 {
3113 location_t warnloc
3114 = expansion_point_location_if_in_system_header (loc);
3115
3116 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3117 exact
3118 ? G_("%K%qD specified bound %E exceeds the size %E "
3119 "of unterminated array")
3120 : G_("%K%qD specified bound %E may exceed the size "
3121 "of at most %E of unterminated array"),
3122 exp, func, bound, len))
3123 {
3124 inform (DECL_SOURCE_LOCATION (data.decl),
3125 "referenced argument declared here");
3126 TREE_NO_WARNING (exp) = true;
3127 return NULL_RTX;
3128 }
3129 }
3130
3131 if (!len)
864bd5de 3132 return NULL_RTX;
3133
864bd5de 3134 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3135 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3136 }
3137
3138 if (TREE_CODE (bound) != SSA_NAME)
3139 return NULL_RTX;
3140
3141 wide_int min, max;
3142 enum value_range_type rng = get_range_info (bound, &min, &max);
3143 if (rng != VR_RANGE)
3144 return NULL_RTX;
3145
3146 if (!TREE_NO_WARNING (exp)
3147 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3148 && warning_at (loc, OPT_Wstringop_overflow_,
3149 "%K%qD specified bound [%wu, %wu] "
3150 "exceeds maximum object size %E",
3151 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3152 TREE_NO_WARNING (exp) = true;
3153
3154 if (!len || TREE_CODE (len) != INTEGER_CST)
3155 return NULL_RTX;
3156
fec27bf2 3157 if (!TREE_NO_WARNING (exp)
3158 && wi::ltu_p (wi::to_wide (len), min)
3159 && warning_at (loc, OPT_Wstringop_overflow_,
3160 "%K%qD specified bound [%wu, %wu] "
3161 "exceeds the size %E of unterminated array",
3162 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3163 {
3164 inform (DECL_SOURCE_LOCATION (data.decl),
3165 "referenced argument declared here");
3166 TREE_NO_WARNING (exp) = true;
3167 }
3168
864bd5de 3169 if (wi::gtu_p (min, wi::to_wide (len)))
3170 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3171
3172 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3173 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3174}
3175
6840589f 3176/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3177 bytes from constant string DATA + OFFSET and return it as target
3178 constant. */
3179
3180static rtx
aecda0d6 3181builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
f77c4496 3182 scalar_int_mode mode)
6840589f 3183{
3184 const char *str = (const char *) data;
3185
64db345d 3186 gcc_assert (offset >= 0
3187 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3188 <= strlen (str) + 1));
6840589f 3189
3190 return c_readstr (str + offset, mode);
3191}
3192
36d63243 3193/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 3194 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3195 In some cases we can make very likely guess on max size, then we
3196 set it into PROBABLE_MAX_SIZE. */
36d63243 3197
3198static void
3199determine_block_size (tree len, rtx len_rtx,
3200 unsigned HOST_WIDE_INT *min_size,
9db0f34d 3201 unsigned HOST_WIDE_INT *max_size,
3202 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 3203{
3204 if (CONST_INT_P (len_rtx))
3205 {
4e140a5c 3206 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 3207 return;
3208 }
3209 else
3210 {
9c1be15e 3211 wide_int min, max;
9db0f34d 3212 enum value_range_type range_type = VR_UNDEFINED;
3213
3214 /* Determine bounds from the type. */
3215 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3216 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3217 else
3218 *min_size = 0;
3219 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 3220 *probable_max_size = *max_size
3221 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 3222 else
3223 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3224
3225 if (TREE_CODE (len) == SSA_NAME)
3226 range_type = get_range_info (len, &min, &max);
3227 if (range_type == VR_RANGE)
36d63243 3228 {
fe5ad926 3229 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 3230 *min_size = min.to_uhwi ();
fe5ad926 3231 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 3232 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 3233 }
9db0f34d 3234 else if (range_type == VR_ANTI_RANGE)
36d63243 3235 {
4a474a5a 3236 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 3237 if (min == 0)
9db0f34d 3238 {
9c1be15e 3239 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3240 *min_size = max.to_uhwi () + 1;
9db0f34d 3241 }
3242 /* Code like
3243
3244 int n;
3245 if (n < 100)
4a474a5a 3246 memcpy (a, b, n)
9db0f34d 3247
3248 Produce anti range allowing negative values of N. We still
3249 can use the information and make a guess that N is not negative.
3250 */
fe5ad926 3251 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3252 *probable_max_size = min.to_uhwi () - 1;
36d63243 3253 }
3254 }
3255 gcc_checking_assert (*max_size <=
3256 (unsigned HOST_WIDE_INT)
3257 GET_MODE_MASK (GET_MODE (len_rtx)));
3258}
3259
5aef8938 3260/* Try to verify that the sizes and lengths of the arguments to a string
3261 manipulation function given by EXP are within valid bounds and that
e6a18b5a 3262 the operation does not lead to buffer overflow or read past the end.
3263 Arguments other than EXP may be null. When non-null, the arguments
3264 have the following meaning:
3265 DST is the destination of a copy call or NULL otherwise.
3266 SRC is the source of a copy call or NULL otherwise.
3267 DSTWRITE is the number of bytes written into the destination obtained
3268 from the user-supplied size argument to the function (such as in
3269 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3270 MAXREAD is the user-supplied bound on the length of the source sequence
5aef8938 3271 (such as in strncat(d, s, N). It specifies the upper limit on the number
e6a18b5a 3272 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3273 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3274 expression EXP is a string function call (as opposed to a memory call
3275 like memcpy). As an exception, SRCSTR can also be an integer denoting
3276 the precomputed size of the source string or object (for functions like
3277 memcpy).
3278 DSTSIZE is the size of the destination object specified by the last
5aef8938 3279 argument to the _chk builtins, typically resulting from the expansion
e6a18b5a 3280 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3281 DSTSIZE).
5aef8938 3282
e6a18b5a 3283 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
5aef8938 3284 SIZE_MAX.
3285
e6a18b5a 3286 If the call is successfully verified as safe return true, otherwise
3287 return false. */
5aef8938 3288
3289static bool
e6a18b5a 3290check_access (tree exp, tree, tree, tree dstwrite,
3291 tree maxread, tree srcstr, tree dstsize)
5aef8938 3292{
e6a18b5a 3293 int opt = OPT_Wstringop_overflow_;
3294
5aef8938 3295 /* The size of the largest object is half the address space, or
e6a18b5a 3296 PTRDIFF_MAX. (This is way too permissive.) */
3297 tree maxobjsize = max_object_size ();
5aef8938 3298
e6a18b5a 3299 /* Either the length of the source string for string functions or
3300 the size of the source object for raw memory functions. */
5aef8938 3301 tree slen = NULL_TREE;
3302
8d6c6ef5 3303 tree range[2] = { NULL_TREE, NULL_TREE };
3304
5aef8938 3305 /* Set to true when the exact number of bytes written by a string
3306 function like strcpy is not known and the only thing that is
3307 known is that it must be at least one (for the terminating nul). */
3308 bool at_least_one = false;
e6a18b5a 3309 if (srcstr)
5aef8938 3310 {
e6a18b5a 3311 /* SRCSTR is normally a pointer to string but as a special case
5aef8938 3312 it can be an integer denoting the length of a string. */
e6a18b5a 3313 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
5aef8938 3314 {
3315 /* Try to determine the range of lengths the source string
8d6c6ef5 3316 refers to. If it can be determined and is less than
e6a18b5a 3317 the upper bound given by MAXREAD add one to it for
5aef8938 3318 the terminating nul. Otherwise, set it to one for
e6a18b5a 3319 the same reason, or to MAXREAD as appropriate. */
3320 get_range_strlen (srcstr, range);
3321 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
8d6c6ef5 3322 {
e6a18b5a 3323 if (maxread && tree_int_cst_le (maxread, range[0]))
3324 range[0] = range[1] = maxread;
8d6c6ef5 3325 else
3326 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3327 range[0], size_one_node);
3328
e6a18b5a 3329 if (maxread && tree_int_cst_le (maxread, range[1]))
3330 range[1] = maxread;
8d6c6ef5 3331 else if (!integer_all_onesp (range[1]))
3332 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3333 range[1], size_one_node);
3334
3335 slen = range[0];
3336 }
5aef8938 3337 else
3338 {
3339 at_least_one = true;
3340 slen = size_one_node;
3341 }
3342 }
3343 else
e6a18b5a 3344 slen = srcstr;
5aef8938 3345 }
3346
e6a18b5a 3347 if (!dstwrite && !maxread)
5aef8938 3348 {
3349 /* When the only available piece of data is the object size
3350 there is nothing to do. */
3351 if (!slen)
3352 return true;
3353
3354 /* Otherwise, when the length of the source sequence is known
e6a18b5a 3355 (as with strlen), set DSTWRITE to it. */
8d6c6ef5 3356 if (!range[0])
e6a18b5a 3357 dstwrite = slen;
5aef8938 3358 }
3359
e6a18b5a 3360 if (!dstsize)
3361 dstsize = maxobjsize;
5aef8938 3362
e6a18b5a 3363 if (dstwrite)
3364 get_size_range (dstwrite, range);
5aef8938 3365
e6a18b5a 3366 tree func = get_callee_fndecl (exp);
5aef8938 3367
3368 /* First check the number of bytes to be written against the maximum
3369 object size. */
c4183f31 3370 if (range[0]
3371 && TREE_CODE (range[0]) == INTEGER_CST
3372 && tree_int_cst_lt (maxobjsize, range[0]))
5aef8938 3373 {
864bd5de 3374 if (TREE_NO_WARNING (exp))
3375 return false;
3376
5aef8938 3377 location_t loc = tree_nonartificial_location (exp);
4d317237 3378 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3379
864bd5de 3380 bool warned;
5aef8938 3381 if (range[0] == range[1])
864bd5de 3382 warned = warning_at (loc, opt,
3383 "%K%qD specified size %E "
3384 "exceeds maximum object size %E",
3385 exp, func, range[0], maxobjsize);
3386 else
3387 warned = warning_at (loc, opt,
3388 "%K%qD specified size between %E and %E "
3389 "exceeds maximum object size %E",
3390 exp, func,
3391 range[0], range[1], maxobjsize);
3392 if (warned)
3393 TREE_NO_WARNING (exp) = true;
3394
5aef8938 3395 return false;
3396 }
3397
e6a18b5a 3398 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3399 constant, and in range of unsigned HOST_WIDE_INT. */
3400 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3401
5aef8938 3402 /* Next check the number of bytes to be written against the destination
3403 object size. */
e6a18b5a 3404 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
5aef8938 3405 {
3406 if (range[0]
c4183f31 3407 && TREE_CODE (range[0]) == INTEGER_CST
e6a18b5a 3408 && ((tree_fits_uhwi_p (dstsize)
3409 && tree_int_cst_lt (dstsize, range[0]))
c4183f31 3410 || (dstwrite
3411 && tree_fits_uhwi_p (dstwrite)
e6a18b5a 3412 && tree_int_cst_lt (dstwrite, range[0]))))
5aef8938 3413 {
080a1363 3414 if (TREE_NO_WARNING (exp))
3415 return false;
3416
5aef8938 3417 location_t loc = tree_nonartificial_location (exp);
4d317237 3418 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3419
e6a18b5a 3420 if (dstwrite == slen && at_least_one)
8d6c6ef5 3421 {
3422 /* This is a call to strcpy with a destination of 0 size
3423 and a source of unknown length. The call will write
3424 at least one byte past the end of the destination. */
3425 warning_at (loc, opt,
9098b938 3426 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3427 "of size %E overflows the destination",
e6a18b5a 3428 exp, func, range[0], dstsize);
8d6c6ef5 3429 }
3430 else if (tree_int_cst_equal (range[0], range[1]))
625a4dfc 3431 warning_n (loc, opt, tree_to_uhwi (range[0]),
3432 "%K%qD writing %E byte into a region "
3433 "of size %E overflows the destination",
3434 "%K%qD writing %E bytes into a region "
3435 "of size %E overflows the destination",
3436 exp, func, range[0], dstsize);
8d6c6ef5 3437 else if (tree_int_cst_sign_bit (range[1]))
3438 {
3439 /* Avoid printing the upper bound if it's invalid. */
3440 warning_at (loc, opt,
9098b938 3441 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3442 "of size %E overflows the destination",
e6a18b5a 3443 exp, func, range[0], dstsize);
8d6c6ef5 3444 }
5aef8938 3445 else
3446 warning_at (loc, opt,
9098b938 3447 "%K%qD writing between %E and %E bytes into "
8d6c6ef5 3448 "a region of size %E overflows the destination",
e6a18b5a 3449 exp, func, range[0], range[1],
3450 dstsize);
5aef8938 3451
3452 /* Return error when an overflow has been detected. */
3453 return false;
3454 }
3455 }
3456
3457 /* Check the maximum length of the source sequence against the size
3458 of the destination object if known, or against the maximum size
3459 of an object. */
e6a18b5a 3460 if (maxread)
5aef8938 3461 {
e6a18b5a 3462 get_size_range (maxread, range);
3463
3464 /* Use the lower end for MAXREAD from now on. */
3465 if (range[0])
3466 maxread = range[0];
5aef8938 3467
e6a18b5a 3468 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
5aef8938 3469 {
3470 location_t loc = tree_nonartificial_location (exp);
4d317237 3471 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3472
3473 if (tree_int_cst_lt (maxobjsize, range[0]))
3474 {
080a1363 3475 if (TREE_NO_WARNING (exp))
3476 return false;
3477
5aef8938 3478 /* Warn about crazy big sizes first since that's more
3479 likely to be meaningful than saying that the bound
3480 is greater than the object size if both are big. */
3481 if (range[0] == range[1])
3482 warning_at (loc, opt,
9098b938 3483 "%K%qD specified bound %E "
8d6c6ef5 3484 "exceeds maximum object size %E",
e6a18b5a 3485 exp, func,
8d6c6ef5 3486 range[0], maxobjsize);
5aef8938 3487 else
3488 warning_at (loc, opt,
9098b938 3489 "%K%qD specified bound between %E and %E "
8d6c6ef5 3490 "exceeds maximum object size %E",
e6a18b5a 3491 exp, func,
8d6c6ef5 3492 range[0], range[1], maxobjsize);
5aef8938 3493
3494 return false;
3495 }
3496
e6a18b5a 3497 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
5aef8938 3498 {
080a1363 3499 if (TREE_NO_WARNING (exp))
3500 return false;
3501
8d6c6ef5 3502 if (tree_int_cst_equal (range[0], range[1]))
5aef8938 3503 warning_at (loc, opt,
9098b938 3504 "%K%qD specified bound %E "
8d6c6ef5 3505 "exceeds destination size %E",
e6a18b5a 3506 exp, func,
3507 range[0], dstsize);
5aef8938 3508 else
3509 warning_at (loc, opt,
9098b938 3510 "%K%qD specified bound between %E and %E "
8d6c6ef5 3511 "exceeds destination size %E",
e6a18b5a 3512 exp, func,
3513 range[0], range[1], dstsize);
5aef8938 3514 return false;
3515 }
3516 }
3517 }
3518
e6a18b5a 3519 /* Check for reading past the end of SRC. */
8d6c6ef5 3520 if (slen
e6a18b5a 3521 && slen == srcstr
3522 && dstwrite && range[0]
8d6c6ef5 3523 && tree_int_cst_lt (slen, range[0]))
3524 {
080a1363 3525 if (TREE_NO_WARNING (exp))
3526 return false;
3527
8d6c6ef5 3528 location_t loc = tree_nonartificial_location (exp);
3529
3530 if (tree_int_cst_equal (range[0], range[1]))
625a4dfc 3531 warning_n (loc, opt, tree_to_uhwi (range[0]),
3532 "%K%qD reading %E byte from a region of size %E",
3533 "%K%qD reading %E bytes from a region of size %E",
e6a18b5a 3534 exp, func, range[0], slen);
8d6c6ef5 3535 else if (tree_int_cst_sign_bit (range[1]))
3536 {
3537 /* Avoid printing the upper bound if it's invalid. */
3538 warning_at (loc, opt,
9098b938 3539 "%K%qD reading %E or more bytes from a region "
8d6c6ef5 3540 "of size %E",
e6a18b5a 3541 exp, func, range[0], slen);
8d6c6ef5 3542 }
3543 else
3544 warning_at (loc, opt,
9098b938 3545 "%K%qD reading between %E and %E bytes from a region "
8d6c6ef5 3546 "of size %E",
e6a18b5a 3547 exp, func, range[0], range[1], slen);
8d6c6ef5 3548 return false;
3549 }
3550
5aef8938 3551 return true;
3552}
3553
3554/* Helper to compute the size of the object referenced by the DEST
d8aad786 3555 expression which must have pointer type, using Object Size type
5aef8938 3556 OSTYPE (only the least significant 2 bits are used). Return
24e3b821 3557 an estimate of the size of the object if successful or NULL when
3558 the size cannot be determined. When the referenced object involves
3559 a non-constant offset in some range the returned value represents
3560 the largest size given the smallest non-negative offset in the
3561 range. The function is intended for diagnostics and should not
3562 be used to influence code generation or optimization. */
5aef8938 3563
d8aad786 3564tree
8d6c6ef5 3565compute_objsize (tree dest, int ostype)
5aef8938 3566{
3567 unsigned HOST_WIDE_INT size;
d8aad786 3568
3569 /* Only the two least significant bits are meaningful. */
3570 ostype &= 3;
3571
3572 if (compute_builtin_object_size (dest, ostype, &size))
5aef8938 3573 return build_int_cst (sizetype, size);
3574
d8aad786 3575 if (TREE_CODE (dest) == SSA_NAME)
3576 {
3577 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3578 if (!is_gimple_assign (stmt))
3579 return NULL_TREE;
3580
24e3b821 3581 dest = gimple_assign_rhs1 (stmt);
3582
d8aad786 3583 tree_code code = gimple_assign_rhs_code (stmt);
24e3b821 3584 if (code == POINTER_PLUS_EXPR)
3585 {
3586 /* compute_builtin_object_size fails for addresses with
3587 non-constant offsets. Try to determine the range of
c1a0c86c 3588 such an offset here and use it to adjust the constant
24e3b821 3589 size. */
3590 tree off = gimple_assign_rhs2 (stmt);
c1a0c86c 3591 if (TREE_CODE (off) == INTEGER_CST)
3592 {
3593 if (tree size = compute_objsize (dest, ostype))
3594 {
3595 wide_int wioff = wi::to_wide (off);
3596 wide_int wisiz = wi::to_wide (size);
3597
3598 /* Ignore negative offsets for now. For others,
3599 use the lower bound as the most optimistic
3600 estimate of the (remaining) size. */
3601 if (wi::sign_mask (wioff))
3602 ;
3603 else if (wi::ltu_p (wioff, wisiz))
3604 return wide_int_to_tree (TREE_TYPE (size),
3605 wi::sub (wisiz, wioff));
3606 else
3607 return size_zero_node;
3608 }
3609 }
3610 else if (TREE_CODE (off) == SSA_NAME
24e3b821 3611 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3612 {
3613 wide_int min, max;
3614 enum value_range_type rng = get_range_info (off, &min, &max);
3615
3616 if (rng == VR_RANGE)
3617 {
3618 if (tree size = compute_objsize (dest, ostype))
3619 {
3620 wide_int wisiz = wi::to_wide (size);
3621
3622 /* Ignore negative offsets for now. For others,
3623 use the lower bound as the most optimistic
3624 estimate of the (remaining)size. */
3625 if (wi::sign_mask (min))
3626 ;
3627 else if (wi::ltu_p (min, wisiz))
3628 return wide_int_to_tree (TREE_TYPE (size),
3629 wi::sub (wisiz, min));
3630 else
3631 return size_zero_node;
3632 }
3633 }
3634 }
3635 }
3636 else if (code != ADDR_EXPR)
d8aad786 3637 return NULL_TREE;
d8aad786 3638 }
3639
24e3b821 3640 /* Unless computing the largest size (for memcpy and other raw memory
3641 functions), try to determine the size of the object from its type. */
3642 if (!ostype)
3643 return NULL_TREE;
3644
d8aad786 3645 if (TREE_CODE (dest) != ADDR_EXPR)
3646 return NULL_TREE;
3647
3648 tree type = TREE_TYPE (dest);
3649 if (TREE_CODE (type) == POINTER_TYPE)
3650 type = TREE_TYPE (type);
3651
3652 type = TYPE_MAIN_VARIANT (type);
3653
3654 if (TREE_CODE (type) == ARRAY_TYPE
d4ad98ea 3655 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
d8aad786 3656 {
3657 /* Return the constant size unless it's zero (that's a zero-length
3658 array likely at the end of a struct). */
3659 tree size = TYPE_SIZE_UNIT (type);
3660 if (size && TREE_CODE (size) == INTEGER_CST
3661 && !integer_zerop (size))
3662 return size;
3663 }
3664
5aef8938 3665 return NULL_TREE;
3666}
3667
3668/* Helper to determine and check the sizes of the source and the destination
8d6c6ef5 3669 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3670 call expression, DEST is the destination argument, SRC is the source
3671 argument or null, and LEN is the number of bytes. Use Object Size type-0
3672 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5aef8938 3673 (no overflow or invalid sizes), false otherwise. */
3674
3675static bool
e6a18b5a 3676check_memop_access (tree exp, tree dest, tree src, tree size)
5aef8938 3677{
5aef8938 3678 /* For functions like memset and memcpy that operate on raw memory
8d6c6ef5 3679 try to determine the size of the largest source and destination
3680 object using type-0 Object Size regardless of the object size
3681 type specified by the option. */
3682 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3683 tree dstsize = compute_objsize (dest, 0);
5aef8938 3684
e6a18b5a 3685 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3686 srcsize, dstsize);
8d6c6ef5 3687}
3688
3689/* Validate memchr arguments without performing any expansion.
3690 Return NULL_RTX. */
3691
3692static rtx
3693expand_builtin_memchr (tree exp, rtx)
3694{
3695 if (!validate_arglist (exp,
3696 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3697 return NULL_RTX;
3698
3699 tree arg1 = CALL_EXPR_ARG (exp, 0);
3700 tree len = CALL_EXPR_ARG (exp, 2);
3701
3702 /* Diagnose calls where the specified length exceeds the size
3703 of the object. */
3704 if (warn_stringop_overflow)
3705 {
3706 tree size = compute_objsize (arg1, 0);
e6a18b5a 3707 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3708 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
8d6c6ef5 3709 }
3710
3711 return NULL_RTX;
5aef8938 3712}
3713
c2f47e15 3714/* Expand a call EXP to the memcpy builtin.
3715 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3716 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3717 mode MODE if that's convenient). */
c2f47e15 3718
53800dbe 3719static rtx
a65c4d64 3720expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3721{
c2f47e15 3722 if (!validate_arglist (exp,
3723 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3724 return NULL_RTX;
5aef8938 3725
3726 tree dest = CALL_EXPR_ARG (exp, 0);
3727 tree src = CALL_EXPR_ARG (exp, 1);
3728 tree len = CALL_EXPR_ARG (exp, 2);
3729
e6a18b5a 3730 check_memop_access (exp, dest, src, len);
5aef8938 3731
d0fbba1a 3732 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3733 /*endp=*/ 0);
f21337ef 3734}
6840589f 3735
4d317237 3736/* Check a call EXP to the memmove built-in for validity.
3737 Return NULL_RTX on both success and failure. */
3738
3739static rtx
3740expand_builtin_memmove (tree exp, rtx)
3741{
3742 if (!validate_arglist (exp,
3743 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3744 return NULL_RTX;
3745
3746 tree dest = CALL_EXPR_ARG (exp, 0);
8d6c6ef5 3747 tree src = CALL_EXPR_ARG (exp, 1);
4d317237 3748 tree len = CALL_EXPR_ARG (exp, 2);
3749
e6a18b5a 3750 check_memop_access (exp, dest, src, len);
4d317237 3751
3752 return NULL_RTX;
3753}
3754
c2f47e15 3755/* Expand a call EXP to the mempcpy builtin.
3756 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3757 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3758 mode MODE if that's convenient). If ENDP is 0 return the
3759 destination pointer, if ENDP is 1 return the end pointer ala
3760 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3761 stpcpy. */
647661c6 3762
3763static rtx
d0fbba1a 3764expand_builtin_mempcpy (tree exp, rtx target)
647661c6 3765{
c2f47e15 3766 if (!validate_arglist (exp,
3767 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3768 return NULL_RTX;
5aef8938 3769
3770 tree dest = CALL_EXPR_ARG (exp, 0);
3771 tree src = CALL_EXPR_ARG (exp, 1);
3772 tree len = CALL_EXPR_ARG (exp, 2);
3773
24e3b821 3774 /* Policy does not generally allow using compute_objsize (which
3775 is used internally by check_memop_size) to change code generation
3776 or drive optimization decisions.
3777
3778 In this instance it is safe because the code we generate has
3779 the same semantics regardless of the return value of
3780 check_memop_sizes. Exactly the same amount of data is copied
3781 and the return value is exactly the same in both cases.
3782
3783 Furthermore, check_memop_size always uses mode 0 for the call to
3784 compute_objsize, so the imprecise nature of compute_objsize is
3785 avoided. */
3786
5aef8938 3787 /* Avoid expanding mempcpy into memcpy when the call is determined
3788 to overflow the buffer. This also prevents the same overflow
3789 from being diagnosed again when expanding memcpy. */
e6a18b5a 3790 if (!check_memop_access (exp, dest, src, len))
5aef8938 3791 return NULL_RTX;
3792
3793 return expand_builtin_mempcpy_args (dest, src, len,
d0fbba1a 3794 target, exp, /*endp=*/ 1);
f21337ef 3795}
3796
d0fbba1a 3797/* Helper function to do the actual work for expand of memory copy family
3798 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3799 of memory from SRC to DEST and assign to TARGET if convenient.
3800 If ENDP is 0 return the
3801 destination pointer, if ENDP is 1 return the end pointer ala
3802 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3803 stpcpy. */
c2f47e15 3804
3805static rtx
d0fbba1a 3806expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3807 rtx target, tree exp, int endp)
c2f47e15 3808{
d0fbba1a 3809 const char *src_str;
3810 unsigned int src_align = get_pointer_alignment (src);
3811 unsigned int dest_align = get_pointer_alignment (dest);
3812 rtx dest_mem, src_mem, dest_addr, len_rtx;
3813 HOST_WIDE_INT expected_size = -1;
3814 unsigned int expected_align = 0;
3815 unsigned HOST_WIDE_INT min_size;
3816 unsigned HOST_WIDE_INT max_size;
3817 unsigned HOST_WIDE_INT probable_max_size;
f21337ef 3818
d0fbba1a 3819 /* If DEST is not a pointer type, call the normal function. */
3820 if (dest_align == 0)
3821 return NULL_RTX;
a0c938f0 3822
d0fbba1a 3823 /* If either SRC is not a pointer type, don't do this
3824 operation in-line. */
3825 if (src_align == 0)
3826 return NULL_RTX;
9fe0e1b8 3827
d0fbba1a 3828 if (currently_expanding_gimple_stmt)
3829 stringop_block_profile (currently_expanding_gimple_stmt,
3830 &expected_align, &expected_size);
0862b7e9 3831
d0fbba1a 3832 if (expected_align < dest_align)
3833 expected_align = dest_align;
3834 dest_mem = get_memory_rtx (dest, len);
3835 set_mem_align (dest_mem, dest_align);
3836 len_rtx = expand_normal (len);
3837 determine_block_size (len, len_rtx, &min_size, &max_size,
3838 &probable_max_size);
3839 src_str = c_getstr (src);
647661c6 3840
d0fbba1a 3841 /* If SRC is a string constant and block move would be done
3842 by pieces, we can avoid loading the string from memory
3843 and only stored the computed constants. */
3844 if (src_str
3845 && CONST_INT_P (len_rtx)
3846 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3847 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3848 CONST_CAST (char *, src_str),
3849 dest_align, false))
3850 {
3851 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3852 builtin_memcpy_read_str,
d72123ce 3853 CONST_CAST (char *, src_str),
d0fbba1a 3854 dest_align, false, endp);
3855 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3856 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3857 return dest_mem;
3858 }
647661c6 3859
d0fbba1a 3860 src_mem = get_memory_rtx (src, len);
3861 set_mem_align (src_mem, src_align);
9fe0e1b8 3862
d0fbba1a 3863 /* Copy word part most expediently. */
21781799 3864 enum block_op_methods method = BLOCK_OP_NORMAL;
3865 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3866 method = BLOCK_OP_TAILCALL;
3867 if (endp == 1 && target != const0_rtx)
3868 method = BLOCK_OP_NO_LIBCALL_RET;
3869 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
d0fbba1a 3870 expected_align, expected_size,
3871 min_size, max_size, probable_max_size);
21781799 3872 if (dest_addr == pc_rtx)
3873 return NULL_RTX;
d0fbba1a 3874
3875 if (dest_addr == 0)
3876 {
3877 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3878 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3879 }
3880
3881 if (endp && target != const0_rtx)
3882 {
3883 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3884 /* stpcpy pointer to last byte. */
3885 if (endp == 2)
3886 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
d72123ce 3887 }
d0fbba1a 3888
3889 return dest_addr;
3890}
3891
3892static rtx
3893expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3894 rtx target, tree orig_exp, int endp)
3895{
3896 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3897 endp);
647661c6 3898}
3899
c2f47e15 3900/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3901 we failed, the caller should emit a normal call, otherwise try to
3902 get the result in TARGET, if convenient. If ENDP is 0 return the
3903 destination pointer, if ENDP is 1 return the end pointer ala
3904 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3905 stpcpy. */
3906
3907static rtx
3908expand_movstr (tree dest, tree src, rtx target, int endp)
3909{
8786db1e 3910 struct expand_operand ops[3];
727c62dd 3911 rtx dest_mem;
3912 rtx src_mem;
727c62dd 3913
8d74dc42 3914 if (!targetm.have_movstr ())
c2f47e15 3915 return NULL_RTX;
727c62dd 3916
d8ae1baa 3917 dest_mem = get_memory_rtx (dest, NULL);
3918 src_mem = get_memory_rtx (src, NULL);
727c62dd 3919 if (!endp)
3920 {
3921 target = force_reg (Pmode, XEXP (dest_mem, 0));
3922 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3923 }
3924
8786db1e 3925 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3926 create_fixed_operand (&ops[1], dest_mem);
3927 create_fixed_operand (&ops[2], src_mem);
8d74dc42 3928 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
1e1d5623 3929 return NULL_RTX;
727c62dd 3930
8786db1e 3931 if (endp && target != const0_rtx)
c5aba89c 3932 {
8786db1e 3933 target = ops[0].value;
3934 /* movstr is supposed to set end to the address of the NUL
3935 terminator. If the caller requested a mempcpy-like return value,
3936 adjust it. */
3937 if (endp == 1)
3938 {
29c05e22 3939 rtx tem = plus_constant (GET_MODE (target),
3940 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3941 emit_move_insn (target, force_operand (tem, NULL_RTX));
3942 }
c5aba89c 3943 }
727c62dd 3944 return target;
3945}
3946
5aef8938 3947/* Do some very basic size validation of a call to the strcpy builtin
3948 given by EXP. Return NULL_RTX to have the built-in expand to a call
3949 to the library function. */
3950
3951static rtx
3952expand_builtin_strcat (tree exp, rtx)
3953{
3954 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3955 || !warn_stringop_overflow)
3956 return NULL_RTX;
3957
3958 tree dest = CALL_EXPR_ARG (exp, 0);
3959 tree src = CALL_EXPR_ARG (exp, 1);
3960
3961 /* There is no way here to determine the length of the string in
3962 the destination to which the SRC string is being appended so
3963 just diagnose cases when the souce string is longer than
3964 the destination object. */
3965
8d6c6ef5 3966 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3967
e6a18b5a 3968 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3969 destsize);
5aef8938 3970
3971 return NULL_RTX;
3972}
3973
48e1416a 3974/* Expand expression EXP, which is a call to the strcpy builtin. Return
3975 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3976 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3977 convenient). */
902de8ed 3978
53800dbe 3979static rtx
a65c4d64 3980expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3981{
5aef8938 3982 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3983 return NULL_RTX;
3984
3985 tree dest = CALL_EXPR_ARG (exp, 0);
3986 tree src = CALL_EXPR_ARG (exp, 1);
3987
3988 if (warn_stringop_overflow)
3989 {
8d6c6ef5 3990 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
e6a18b5a 3991 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3992 src, destsize);
5aef8938 3993 }
3994
a788aa5f 3995 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
0b39ade8 3996 {
3997 /* Check to see if the argument was declared attribute nonstring
3998 and if so, issue a warning since at this point it's not known
3999 to be nul-terminated. */
4000 tree fndecl = get_callee_fndecl (exp);
4001 maybe_warn_nonstring_arg (fndecl, exp);
4002 return ret;
4003 }
4004
4005 return NULL_RTX;
c2f47e15 4006}
4007
4008/* Helper function to do the actual work for expand_builtin_strcpy. The
4009 arguments to the builtin_strcpy call DEST and SRC are broken out
4010 so that this can also be called without constructing an actual CALL_EXPR.
4011 The other arguments and return value are the same as for
4012 expand_builtin_strcpy. */
4013
4014static rtx
a788aa5f 4015expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
c2f47e15 4016{
a788aa5f 4017 /* Detect strcpy calls with unterminated arrays.. */
4018 if (tree nonstr = unterminated_array (src))
4019 {
4020 /* NONSTR refers to the non-nul terminated constant array. */
4021 if (!TREE_NO_WARNING (exp))
4022 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4023 return NULL_RTX;
4024 }
4025
c2f47e15 4026 return expand_movstr (dest, src, target, /*endp=*/0);
53800dbe 4027}
4028
c2f47e15 4029/* Expand a call EXP to the stpcpy builtin.
4030 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 4031 otherwise try to get the result in TARGET, if convenient (and in
4032 mode MODE if that's convenient). */
4033
4034static rtx
df6e8b42 4035expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3b824fa6 4036{
c2f47e15 4037 tree dst, src;
389dd41b 4038 location_t loc = EXPR_LOCATION (exp);
c2f47e15 4039
4040 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4041 return NULL_RTX;
4042
4043 dst = CALL_EXPR_ARG (exp, 0);
4044 src = CALL_EXPR_ARG (exp, 1);
4045
4d317237 4046 if (warn_stringop_overflow)
4047 {
8d6c6ef5 4048 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
e6a18b5a 4049 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4050 src, destsize);
4d317237 4051 }
4052
727c62dd 4053 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 4054 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 4055 {
b9a16870 4056 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 4057 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 4058 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 4059 }
3b824fa6 4060 else
4061 {
c2f47e15 4062 tree len, lenp1;
727c62dd 4063 rtx ret;
647661c6 4064
9fe0e1b8 4065 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 4066 compile-time, not an expression containing a string. This is
4067 because the latter will potentially produce pessimized code
4068 when used to produce the return value. */
2b84b289 4069 c_strlen_data data;
4070 memset (&data, 0, sizeof (c_strlen_data));
50e57712 4071 if (!c_getstr (src, NULL)
2b84b289 4072 || !(len = c_strlen (src, 0, &data, 1)))
c2f47e15 4073 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 4074
2b84b289 4075 if (data.decl && !TREE_NO_WARNING (exp))
4076 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, data.decl);
50e57712 4077
389dd41b 4078 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 4079 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
d0fbba1a 4080 target, exp, /*endp=*/2);
727c62dd 4081
4082 if (ret)
4083 return ret;
4084
4085 if (TREE_CODE (len) == INTEGER_CST)
4086 {
8ec3c5c2 4087 rtx len_rtx = expand_normal (len);
727c62dd 4088
971ba038 4089 if (CONST_INT_P (len_rtx))
727c62dd 4090 {
a788aa5f 4091 ret = expand_builtin_strcpy_args (exp, dst, src, target);
727c62dd 4092
4093 if (ret)
4094 {
4095 if (! target)
7ac87324 4096 {
4097 if (mode != VOIDmode)
4098 target = gen_reg_rtx (mode);
4099 else
4100 target = gen_reg_rtx (GET_MODE (ret));
4101 }
727c62dd 4102 if (GET_MODE (target) != GET_MODE (ret))
4103 ret = gen_lowpart (GET_MODE (target), ret);
4104
29c05e22 4105 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 4106 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 4107 gcc_assert (ret);
727c62dd 4108
4109 return target;
4110 }
4111 }
4112 }
4113
c2f47e15 4114 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 4115 }
4116}
4117
df6e8b42 4118/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4119 arguments while being careful to avoid duplicate warnings (which could
4120 be issued if the expander were to expand the call, resulting in it
4121 being emitted in expand_call(). */
4122
4123static rtx
4124expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4125{
4126 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4127 {
4128 /* The call has been successfully expanded. Check for nonstring
4129 arguments and issue warnings as appropriate. */
4130 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4131 return ret;
4132 }
4133
4134 return NULL_RTX;
4135}
4136
4d317237 4137/* Check a call EXP to the stpncpy built-in for validity.
4138 Return NULL_RTX on both success and failure. */
4139
4140static rtx
4141expand_builtin_stpncpy (tree exp, rtx)
4142{
4143 if (!validate_arglist (exp,
4144 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4145 || !warn_stringop_overflow)
4146 return NULL_RTX;
4147
aca1a787 4148 /* The source and destination of the call. */
4d317237 4149 tree dest = CALL_EXPR_ARG (exp, 0);
4150 tree src = CALL_EXPR_ARG (exp, 1);
4151
aca1a787 4152 /* The exact number of bytes to write (not the maximum). */
4d317237 4153 tree len = CALL_EXPR_ARG (exp, 2);
4d317237 4154
aca1a787 4155 /* The size of the destination object. */
8d6c6ef5 4156 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4d317237 4157
e6a18b5a 4158 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4d317237 4159
4160 return NULL_RTX;
4161}
4162
6840589f 4163/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4164 bytes from constant string DATA + OFFSET and return it as target
4165 constant. */
4166
09879952 4167rtx
aecda0d6 4168builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
f77c4496 4169 scalar_int_mode mode)
6840589f 4170{
4171 const char *str = (const char *) data;
4172
4173 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4174 return const0_rtx;
4175
4176 return c_readstr (str + offset, mode);
4177}
4178
5aef8938 4179/* Helper to check the sizes of sequences and the destination of calls
4180 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4181 success (no overflow or invalid sizes), false otherwise. */
4182
4183static bool
4184check_strncat_sizes (tree exp, tree objsize)
4185{
4186 tree dest = CALL_EXPR_ARG (exp, 0);
4187 tree src = CALL_EXPR_ARG (exp, 1);
e6a18b5a 4188 tree maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 4189
4190 /* Try to determine the range of lengths that the source expression
4191 refers to. */
4192 tree lenrange[2];
4193 get_range_strlen (src, lenrange);
4194
4195 /* Try to verify that the destination is big enough for the shortest
4196 string. */
4197
4198 if (!objsize && warn_stringop_overflow)
4199 {
4200 /* If it hasn't been provided by __strncat_chk, try to determine
4201 the size of the destination object into which the source is
4202 being copied. */
8d6c6ef5 4203 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 4204 }
4205
4206 /* Add one for the terminating nul. */
4207 tree srclen = (lenrange[0]
4208 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4209 size_one_node)
4210 : NULL_TREE);
4211
e6a18b5a 4212 /* The strncat function copies at most MAXREAD bytes and always appends
4213 the terminating nul so the specified upper bound should never be equal
4214 to (or greater than) the size of the destination. */
4215 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4216 && tree_int_cst_equal (objsize, maxread))
5aef8938 4217 {
4d317237 4218 location_t loc = tree_nonartificial_location (exp);
4219 loc = expansion_point_location_if_in_system_header (loc);
4220
4221 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 4222 "%K%qD specified bound %E equals destination size",
e6a18b5a 4223 exp, get_callee_fndecl (exp), maxread);
5aef8938 4224
4225 return false;
4226 }
4227
4228 if (!srclen
e6a18b5a 4229 || (maxread && tree_fits_uhwi_p (maxread)
5aef8938 4230 && tree_fits_uhwi_p (srclen)
e6a18b5a 4231 && tree_int_cst_lt (maxread, srclen)))
4232 srclen = maxread;
5aef8938 4233
e6a18b5a 4234 /* The number of bytes to write is LEN but check_access will also
5aef8938 4235 check SRCLEN if LEN's value isn't known. */
e6a18b5a 4236 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4237 objsize);
5aef8938 4238}
4239
4240/* Similar to expand_builtin_strcat, do some very basic size validation
4241 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4242 the built-in expand to a call to the library function. */
4243
4244static rtx
4245expand_builtin_strncat (tree exp, rtx)
4246{
4247 if (!validate_arglist (exp,
4248 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4249 || !warn_stringop_overflow)
4250 return NULL_RTX;
4251
4252 tree dest = CALL_EXPR_ARG (exp, 0);
4253 tree src = CALL_EXPR_ARG (exp, 1);
4254 /* The upper bound on the number of bytes to write. */
e6a18b5a 4255 tree maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 4256 /* The length of the source sequence. */
4257 tree slen = c_strlen (src, 1);
4258
4259 /* Try to determine the range of lengths that the source expression
4260 refers to. */
4261 tree lenrange[2];
4262 if (slen)
4263 lenrange[0] = lenrange[1] = slen;
4264 else
4265 get_range_strlen (src, lenrange);
4266
4267 /* Try to verify that the destination is big enough for the shortest
4268 string. First try to determine the size of the destination object
4269 into which the source is being copied. */
8d6c6ef5 4270 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 4271
4272 /* Add one for the terminating nul. */
4273 tree srclen = (lenrange[0]
4274 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4275 size_one_node)
4276 : NULL_TREE);
4277
e6a18b5a 4278 /* The strncat function copies at most MAXREAD bytes and always appends
4279 the terminating nul so the specified upper bound should never be equal
4280 to (or greater than) the size of the destination. */
4281 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4282 && tree_int_cst_equal (destsize, maxread))
5aef8938 4283 {
4d317237 4284 location_t loc = tree_nonartificial_location (exp);
4285 loc = expansion_point_location_if_in_system_header (loc);
4286
4287 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 4288 "%K%qD specified bound %E equals destination size",
e6a18b5a 4289 exp, get_callee_fndecl (exp), maxread);
5aef8938 4290
4291 return NULL_RTX;
4292 }
4293
4294 if (!srclen
e6a18b5a 4295 || (maxread && tree_fits_uhwi_p (maxread)
5aef8938 4296 && tree_fits_uhwi_p (srclen)
e6a18b5a 4297 && tree_int_cst_lt (maxread, srclen)))
4298 srclen = maxread;
5aef8938 4299
e6a18b5a 4300 /* The number of bytes to write is SRCLEN. */
4301 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
5aef8938 4302
4303 return NULL_RTX;
4304}
4305
48e1416a 4306/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 4307 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 4308
4309static rtx
a65c4d64 4310expand_builtin_strncpy (tree exp, rtx target)
ed09096d 4311{
389dd41b 4312 location_t loc = EXPR_LOCATION (exp);
c2f47e15 4313
4314 if (validate_arglist (exp,
4315 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 4316 {
c2f47e15 4317 tree dest = CALL_EXPR_ARG (exp, 0);
4318 tree src = CALL_EXPR_ARG (exp, 1);
5aef8938 4319 /* The number of bytes to write (not the maximum). */
c2f47e15 4320 tree len = CALL_EXPR_ARG (exp, 2);
5aef8938 4321 /* The length of the source sequence. */
c2f47e15 4322 tree slen = c_strlen (src, 1);
6840589f 4323
e6a18b5a 4324 if (warn_stringop_overflow)
4325 {
4326 tree destsize = compute_objsize (dest,
4327 warn_stringop_overflow - 1);
4328
4329 /* The number of bytes to write is LEN but check_access will also
4330 check SLEN if LEN's value isn't known. */
4331 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4332 destsize);
4333 }
5aef8938 4334
8ff6a5cd 4335 /* We must be passed a constant len and src parameter. */
e913b5cd 4336 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 4337 return NULL_RTX;
ed09096d 4338
389dd41b 4339 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 4340
4341 /* We're required to pad with trailing zeros if the requested
a0c938f0 4342 len is greater than strlen(s2)+1. In that case try to
6840589f 4343 use store_by_pieces, if it fails, punt. */
ed09096d 4344 if (tree_int_cst_lt (slen, len))
6840589f 4345 {
957d0361 4346 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 4347 const char *p = c_getstr (src);
6840589f 4348 rtx dest_mem;
4349
e913b5cd 4350 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4351 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 4352 builtin_strncpy_read_str,
364c0c59 4353 CONST_CAST (char *, p),
4354 dest_align, false))
c2f47e15 4355 return NULL_RTX;
6840589f 4356
d8ae1baa 4357 dest_mem = get_memory_rtx (dest, len);
e913b5cd 4358 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 4359 builtin_strncpy_read_str,
364c0c59 4360 CONST_CAST (char *, p), dest_align, false, 0);
a65c4d64 4361 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 4362 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 4363 return dest_mem;
6840589f 4364 }
ed09096d 4365 }
c2f47e15 4366 return NULL_RTX;
ed09096d 4367}
4368
ecc318ff 4369/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4370 bytes from constant string DATA + OFFSET and return it as target
4371 constant. */
4372
f656b751 4373rtx
aecda0d6 4374builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
f77c4496 4375 scalar_int_mode mode)
ecc318ff 4376{
4377 const char *c = (const char *) data;
364c0c59 4378 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 4379
4380 memset (p, *c, GET_MODE_SIZE (mode));
4381
4382 return c_readstr (p, mode);
4383}
4384
a7ec6974 4385/* Callback routine for store_by_pieces. Return the RTL of a register
4386 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4387 char value given in the RTL register data. For example, if mode is
4388 4 bytes wide, return the RTL for 0x01010101*data. */
4389
4390static rtx
aecda0d6 4391builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
f77c4496 4392 scalar_int_mode mode)
a7ec6974 4393{
4394 rtx target, coeff;
4395 size_t size;
4396 char *p;
4397
4398 size = GET_MODE_SIZE (mode);
f0ce3b1f 4399 if (size == 1)
4400 return (rtx) data;
a7ec6974 4401
364c0c59 4402 p = XALLOCAVEC (char, size);
a7ec6974 4403 memset (p, 1, size);
4404 coeff = c_readstr (p, mode);
4405
f0ce3b1f 4406 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 4407 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4408 return force_reg (mode, target);
4409}
4410
48e1416a 4411/* Expand expression EXP, which is a call to the memset builtin. Return
4412 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 4413 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 4414 convenient). */
902de8ed 4415
53800dbe 4416static rtx
3754d046 4417expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 4418{
c2f47e15 4419 if (!validate_arglist (exp,
4420 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4421 return NULL_RTX;
5aef8938 4422
4423 tree dest = CALL_EXPR_ARG (exp, 0);
4424 tree val = CALL_EXPR_ARG (exp, 1);
4425 tree len = CALL_EXPR_ARG (exp, 2);
4426
e6a18b5a 4427 check_memop_access (exp, dest, NULL_TREE, len);
5aef8938 4428
4429 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
c2f47e15 4430}
53800dbe 4431
c2f47e15 4432/* Helper function to do the actual work for expand_builtin_memset. The
4433 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4434 so that this can also be called without constructing an actual CALL_EXPR.
4435 The other arguments and return value are the same as for
4436 expand_builtin_memset. */
6b961939 4437
c2f47e15 4438static rtx
4439expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 4440 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 4441{
4442 tree fndecl, fn;
4443 enum built_in_function fcode;
3754d046 4444 machine_mode val_mode;
c2f47e15 4445 char c;
4446 unsigned int dest_align;
4447 rtx dest_mem, dest_addr, len_rtx;
4448 HOST_WIDE_INT expected_size = -1;
4449 unsigned int expected_align = 0;
36d63243 4450 unsigned HOST_WIDE_INT min_size;
4451 unsigned HOST_WIDE_INT max_size;
9db0f34d 4452 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 4453
957d0361 4454 dest_align = get_pointer_alignment (dest);
162719b3 4455
c2f47e15 4456 /* If DEST is not a pointer type, don't do this operation in-line. */
4457 if (dest_align == 0)
4458 return NULL_RTX;
6f428e8b 4459
8cee8dc0 4460 if (currently_expanding_gimple_stmt)
4461 stringop_block_profile (currently_expanding_gimple_stmt,
4462 &expected_align, &expected_size);
75a70cf9 4463
c2f47e15 4464 if (expected_align < dest_align)
4465 expected_align = dest_align;
6b961939 4466
c2f47e15 4467 /* If the LEN parameter is zero, return DEST. */
4468 if (integer_zerop (len))
4469 {
4470 /* Evaluate and ignore VAL in case it has side-effects. */
4471 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4472 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4473 }
7a3e5564 4474
c2f47e15 4475 /* Stabilize the arguments in case we fail. */
4476 dest = builtin_save_expr (dest);
4477 val = builtin_save_expr (val);
4478 len = builtin_save_expr (len);
a7ec6974 4479
c2f47e15 4480 len_rtx = expand_normal (len);
9db0f34d 4481 determine_block_size (len, len_rtx, &min_size, &max_size,
4482 &probable_max_size);
c2f47e15 4483 dest_mem = get_memory_rtx (dest, len);
03a5dda9 4484 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 4485
c2f47e15 4486 if (TREE_CODE (val) != INTEGER_CST)
4487 {
4488 rtx val_rtx;
a7ec6974 4489
c2f47e15 4490 val_rtx = expand_normal (val);
03a5dda9 4491 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 4492
c2f47e15 4493 /* Assume that we can memset by pieces if we can store
4494 * the coefficients by pieces (in the required modes).
4495 * We can't pass builtin_memset_gen_str as that emits RTL. */
4496 c = 1;
e913b5cd 4497 if (tree_fits_uhwi_p (len)
4498 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4499 builtin_memset_read_str, &c, dest_align,
4500 true))
c2f47e15 4501 {
03a5dda9 4502 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 4503 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4504 builtin_memset_gen_str, val_rtx, dest_align,
4505 true, 0);
c2f47e15 4506 }
4507 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4508 dest_align, expected_align,
9db0f34d 4509 expected_size, min_size, max_size,
4510 probable_max_size))
6b961939 4511 goto do_libcall;
48e1416a 4512
c2f47e15 4513 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4514 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4515 return dest_mem;
4516 }
53800dbe 4517
c2f47e15 4518 if (target_char_cast (val, &c))
4519 goto do_libcall;
ecc318ff 4520
c2f47e15 4521 if (c)
4522 {
e913b5cd 4523 if (tree_fits_uhwi_p (len)
4524 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4525 builtin_memset_read_str, &c, dest_align,
4526 true))
e913b5cd 4527 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4528 builtin_memset_read_str, &c, dest_align, true, 0);
03a5dda9 4529 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4530 gen_int_mode (c, val_mode),
c2f47e15 4531 dest_align, expected_align,
9db0f34d 4532 expected_size, min_size, max_size,
4533 probable_max_size))
c2f47e15 4534 goto do_libcall;
48e1416a 4535
c2f47e15 4536 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4537 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4538 return dest_mem;
4539 }
ecc318ff 4540
c2f47e15 4541 set_mem_align (dest_mem, dest_align);
4542 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4543 CALL_EXPR_TAILCALL (orig_exp)
4544 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 4545 expected_align, expected_size,
9db0f34d 4546 min_size, max_size,
4547 probable_max_size);
53800dbe 4548
c2f47e15 4549 if (dest_addr == 0)
4550 {
4551 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4552 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4553 }
53800dbe 4554
c2f47e15 4555 return dest_addr;
6b961939 4556
c2f47e15 4557 do_libcall:
4558 fndecl = get_callee_fndecl (orig_exp);
4559 fcode = DECL_FUNCTION_CODE (fndecl);
1e42d5c6 4560 if (fcode == BUILT_IN_MEMSET)
0568e9c1 4561 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4562 dest, val, len);
c2f47e15 4563 else if (fcode == BUILT_IN_BZERO)
0568e9c1 4564 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4565 dest, len);
c2f47e15 4566 else
4567 gcc_unreachable ();
a65c4d64 4568 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4569 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 4570 return expand_call (fn, target, target == const0_rtx);
53800dbe 4571}
4572
48e1416a 4573/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 4574 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 4575
ffc83088 4576static rtx
0b25db21 4577expand_builtin_bzero (tree exp)
ffc83088 4578{
c2f47e15 4579 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 4580 return NULL_RTX;
ffc83088 4581
5aef8938 4582 tree dest = CALL_EXPR_ARG (exp, 0);
4583 tree size = CALL_EXPR_ARG (exp, 1);
4584
e6a18b5a 4585 check_memop_access (exp, dest, NULL_TREE, size);
bf8e3599 4586
7369e7ba 4587 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 4588 memset(ptr x, int 0, size_t y). This is done this way
4589 so that if it isn't expanded inline, we fallback to
4590 calling bzero instead of memset. */
bf8e3599 4591
5aef8938 4592 location_t loc = EXPR_LOCATION (exp);
4593
c2f47e15 4594 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 4595 fold_convert_loc (loc,
4596 size_type_node, size),
c2f47e15 4597 const0_rtx, VOIDmode, exp);
ffc83088 4598}
4599
d6f01a40 4600/* Try to expand cmpstr operation ICODE with the given operands.
4601 Return the result rtx on success, otherwise return null. */
4602
4603static rtx
4604expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4605 HOST_WIDE_INT align)
4606{
4607 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4608
4609 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4610 target = NULL_RTX;
4611
4612 struct expand_operand ops[4];
4613 create_output_operand (&ops[0], target, insn_mode);
4614 create_fixed_operand (&ops[1], arg1_rtx);
4615 create_fixed_operand (&ops[2], arg2_rtx);
4616 create_integer_operand (&ops[3], align);
4617 if (maybe_expand_insn (icode, 4, ops))
4618 return ops[0].value;
4619 return NULL_RTX;
4620}
4621
7a3f89b5 4622/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 4623 Return NULL_RTX if we failed and the caller should emit a normal call,
3e346f54 4624 otherwise try to get the result in TARGET, if convenient.
4625 RESULT_EQ is true if we can relax the returned value to be either zero
4626 or nonzero, without caring about the sign. */
27d0c333 4627
53800dbe 4628static rtx
3e346f54 4629expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
53800dbe 4630{
c2f47e15 4631 if (!validate_arglist (exp,
4632 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4633 return NULL_RTX;
6f428e8b 4634
ea368aac 4635 tree arg1 = CALL_EXPR_ARG (exp, 0);
4636 tree arg2 = CALL_EXPR_ARG (exp, 1);
4637 tree len = CALL_EXPR_ARG (exp, 2);
a950155e 4638 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4639 bool no_overflow = true;
8d6c6ef5 4640
4641 /* Diagnose calls where the specified length exceeds the size of either
4642 object. */
a950155e 4643 tree size = compute_objsize (arg1, 0);
4644 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4645 len, /*maxread=*/NULL_TREE, size,
4646 /*objsize=*/NULL_TREE);
b3e6ae76 4647 if (no_overflow)
a950155e 4648 {
4649 size = compute_objsize (arg2, 0);
4650 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4651 len, /*maxread=*/NULL_TREE, size,
4652 /*objsize=*/NULL_TREE);
b3e6ae76 4653 }
a950155e 4654
ee1b788e 4655 /* If the specified length exceeds the size of either object,
4656 call the function. */
4657 if (!no_overflow)
4658 return NULL_RTX;
4659
b3e6ae76 4660 /* Due to the performance benefit, always inline the calls first
a950155e 4661 when result_eq is false. */
4662 rtx result = NULL_RTX;
b3e6ae76 4663
ee1b788e 4664 if (!result_eq && fcode != BUILT_IN_BCMP)
8d6c6ef5 4665 {
0dbefa15 4666 result = inline_expand_builtin_string_cmp (exp, target);
a950155e 4667 if (result)
4668 return result;
8d6c6ef5 4669 }
4670
3e346f54 4671 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4672 location_t loc = EXPR_LOCATION (exp);
b428c0a5 4673
ea368aac 4674 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4675 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
53800dbe 4676
ea368aac 4677 /* If we don't have POINTER_TYPE, call the function. */
4678 if (arg1_align == 0 || arg2_align == 0)
4679 return NULL_RTX;
53800dbe 4680
ea368aac 4681 rtx arg1_rtx = get_memory_rtx (arg1, len);
4682 rtx arg2_rtx = get_memory_rtx (arg2, len);
3e346f54 4683 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
53800dbe 4684
ea368aac 4685 /* Set MEM_SIZE as appropriate. */
3e346f54 4686 if (CONST_INT_P (len_rtx))
ea368aac 4687 {
3e346f54 4688 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4689 set_mem_size (arg2_rtx, INTVAL (len_rtx));
ea368aac 4690 }
83f88f8e 4691
3e346f54 4692 by_pieces_constfn constfn = NULL;
4693
719f3058 4694 const char *src_str = c_getstr (arg2);
4695 if (result_eq && src_str == NULL)
4696 {
4697 src_str = c_getstr (arg1);
4698 if (src_str != NULL)
092db747 4699 std::swap (arg1_rtx, arg2_rtx);
719f3058 4700 }
3e346f54 4701
4702 /* If SRC is a string constant and block move would be done
4703 by pieces, we can avoid loading the string from memory
4704 and only stored the computed constants. */
4705 if (src_str
4706 && CONST_INT_P (len_rtx)
4707 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4708 constfn = builtin_memcpy_read_str;
4709
a950155e 4710 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4711 TREE_TYPE (len), target,
4712 result_eq, constfn,
4713 CONST_CAST (char *, src_str));
3e346f54 4714
ea368aac 4715 if (result)
4716 {
4717 /* Return the value in the proper mode for this function. */
4718 if (GET_MODE (result) == mode)
4719 return result;
83f88f8e 4720
ea368aac 4721 if (target != 0)
4722 {
4723 convert_move (target, result, 0);
4724 return target;
4725 }
0cd832f0 4726
53800dbe 4727 return convert_to_mode (mode, result, 0);
ea368aac 4728 }
53800dbe 4729
61ffc71a 4730 return NULL_RTX;
6f428e8b 4731}
4732
c2f47e15 4733/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4734 if we failed the caller should emit a normal call, otherwise try to get
4735 the result in TARGET, if convenient. */
902de8ed 4736
53800dbe 4737static rtx
a65c4d64 4738expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4739{
c2f47e15 4740 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4741 return NULL_RTX;
bf8e3599 4742
a950155e 4743 /* Due to the performance benefit, always inline the calls first. */
4744 rtx result = NULL_RTX;
0dbefa15 4745 result = inline_expand_builtin_string_cmp (exp, target);
a950155e 4746 if (result)
4747 return result;
4748
d6f01a40 4749 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4750 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5c5d012b 4751 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4752 return NULL_RTX;
a0c938f0 4753
5c5d012b 4754 tree arg1 = CALL_EXPR_ARG (exp, 0);
4755 tree arg2 = CALL_EXPR_ARG (exp, 1);
6ac5504b 4756
5c5d012b 4757 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4758 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7a3f89b5 4759
5c5d012b 4760 /* If we don't have POINTER_TYPE, call the function. */
4761 if (arg1_align == 0 || arg2_align == 0)
4762 return NULL_RTX;
7a3f89b5 4763
5c5d012b 4764 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4765 arg1 = builtin_save_expr (arg1);
4766 arg2 = builtin_save_expr (arg2);
53800dbe 4767
5c5d012b 4768 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4769 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
d6f01a40 4770
5c5d012b 4771 /* Try to call cmpstrsi. */
4772 if (cmpstr_icode != CODE_FOR_nothing)
4773 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4774 MIN (arg1_align, arg2_align));
6ac5504b 4775
5c5d012b 4776 /* Try to determine at least one length and call cmpstrnsi. */
4777 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4778 {
4779 tree len;
4780 rtx arg3_rtx;
4781
4782 tree len1 = c_strlen (arg1, 1);
4783 tree len2 = c_strlen (arg2, 1);
4784
4785 if (len1)
4786 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4787 if (len2)
4788 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4789
4790 /* If we don't have a constant length for the first, use the length
4791 of the second, if we know it. We don't require a constant for
4792 this case; some cost analysis could be done if both are available
4793 but neither is constant. For now, assume they're equally cheap,
4794 unless one has side effects. If both strings have constant lengths,
4795 use the smaller. */
4796
4797 if (!len1)
4798 len = len2;
4799 else if (!len2)
4800 len = len1;
4801 else if (TREE_SIDE_EFFECTS (len1))
4802 len = len2;
4803 else if (TREE_SIDE_EFFECTS (len2))
4804 len = len1;
4805 else if (TREE_CODE (len1) != INTEGER_CST)
4806 len = len2;
4807 else if (TREE_CODE (len2) != INTEGER_CST)
4808 len = len1;
4809 else if (tree_int_cst_lt (len1, len2))
4810 len = len1;
4811 else
4812 len = len2;
3f8aefe2 4813
5c5d012b 4814 /* If both arguments have side effects, we cannot optimize. */
4815 if (len && !TREE_SIDE_EFFECTS (len))
6ac5504b 4816 {
5c5d012b 4817 arg3_rtx = expand_normal (len);
4818 result = expand_cmpstrn_or_cmpmem
4819 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4820 arg3_rtx, MIN (arg1_align, arg2_align));
6ac5504b 4821 }
5c5d012b 4822 }
4823
5c5d012b 4824 tree fndecl = get_callee_fndecl (exp);
5c5d012b 4825 if (result)
4826 {
0b39ade8 4827 /* Check to see if the argument was declared attribute nonstring
4828 and if so, issue a warning since at this point it's not known
4829 to be nul-terminated. */
4830 maybe_warn_nonstring_arg (fndecl, exp);
4831
5c5d012b 4832 /* Return the value in the proper mode for this function. */
4833 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4834 if (GET_MODE (result) == mode)
4835 return result;
4836 if (target == 0)
4837 return convert_to_mode (mode, result, 0);
4838 convert_move (target, result, 0);
4839 return target;
6ac5504b 4840 }
5c5d012b 4841
4842 /* Expand the library call ourselves using a stabilized argument
4843 list to avoid re-evaluating the function's arguments twice. */
4844 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4845 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4846 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4847 return expand_call (fn, target, target == const0_rtx);
83d79705 4848}
53800dbe 4849
48e1416a 4850/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4851 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4852 the result in TARGET, if convenient. */
27d0c333 4853
ed09096d 4854static rtx
a65c4d64 4855expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4856 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4857{
c2f47e15 4858 if (!validate_arglist (exp,
4859 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4860 return NULL_RTX;
ed09096d 4861
a950155e 4862 /* Due to the performance benefit, always inline the calls first. */
4863 rtx result = NULL_RTX;
0dbefa15 4864 result = inline_expand_builtin_string_cmp (exp, target);
a950155e 4865 if (result)
4866 return result;
4867
6e34e617 4868 /* If c_strlen can determine an expression for one of the string
6ac5504b 4869 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4870 using length MIN(strlen(string)+1, arg3). */
d6f01a40 4871 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5c5d012b 4872 if (cmpstrn_icode == CODE_FOR_nothing)
4873 return NULL_RTX;
27d0c333 4874
5c5d012b 4875 tree len;
4876
4877 tree arg1 = CALL_EXPR_ARG (exp, 0);
4878 tree arg2 = CALL_EXPR_ARG (exp, 1);
4879 tree arg3 = CALL_EXPR_ARG (exp, 2);
4880
4881 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4882 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4883
4884 tree len1 = c_strlen (arg1, 1);
4885 tree len2 = c_strlen (arg2, 1);
4886
4887 location_t loc = EXPR_LOCATION (exp);
4888
4889 if (len1)
4890 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4891 if (len2)
4892 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4893
4894 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4895
4896 /* If we don't have a constant length for the first, use the length
4897 of the second, if we know it. If neither string is constant length,
4898 use the given length argument. We don't require a constant for
4899 this case; some cost analysis could be done if both are available
4900 but neither is constant. For now, assume they're equally cheap,
4901 unless one has side effects. If both strings have constant lengths,
4902 use the smaller. */
4903
4904 if (!len1 && !len2)
4905 len = len3;
4906 else if (!len1)
4907 len = len2;
4908 else if (!len2)
4909 len = len1;
4910 else if (TREE_SIDE_EFFECTS (len1))
4911 len = len2;
4912 else if (TREE_SIDE_EFFECTS (len2))
4913 len = len1;
4914 else if (TREE_CODE (len1) != INTEGER_CST)
4915 len = len2;
4916 else if (TREE_CODE (len2) != INTEGER_CST)
4917 len = len1;
4918 else if (tree_int_cst_lt (len1, len2))
4919 len = len1;
4920 else
4921 len = len2;
4922
4923 /* If we are not using the given length, we must incorporate it here.
4924 The actual new length parameter will be MIN(len,arg3) in this case. */
4925 if (len != len3)
a55f0871 4926 {
4927 len = fold_convert_loc (loc, sizetype, len);
4928 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4929 }
5c5d012b 4930 rtx arg1_rtx = get_memory_rtx (arg1, len);
4931 rtx arg2_rtx = get_memory_rtx (arg2, len);
4932 rtx arg3_rtx = expand_normal (len);
a950155e 4933 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4934 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4935 MIN (arg1_align, arg2_align));
5c5d012b 4936
5c5d012b 4937 tree fndecl = get_callee_fndecl (exp);
5c5d012b 4938 if (result)
4939 {
0b39ade8 4940 /* Check to see if the argument was declared attribute nonstring
4941 and if so, issue a warning since at this point it's not known
4942 to be nul-terminated. */
4943 maybe_warn_nonstring_arg (fndecl, exp);
4944
5c5d012b 4945 /* Return the value in the proper mode for this function. */
4946 mode = TYPE_MODE (TREE_TYPE (exp));
4947 if (GET_MODE (result) == mode)
4948 return result;
4949 if (target == 0)
4950 return convert_to_mode (mode, result, 0);
4951 convert_move (target, result, 0);
4952 return target;
4953 }
4954
4955 /* Expand the library call ourselves using a stabilized argument
4956 list to avoid re-evaluating the function's arguments twice. */
4957 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4958 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4959 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4960 return expand_call (fn, target, target == const0_rtx);
49f0327b 4961}
4962
a66c9326 4963/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4964 if that's convenient. */
902de8ed 4965
a66c9326 4966rtx
aecda0d6 4967expand_builtin_saveregs (void)
53800dbe 4968{
1e0c0b35 4969 rtx val;
4970 rtx_insn *seq;
53800dbe 4971
4972 /* Don't do __builtin_saveregs more than once in a function.
4973 Save the result of the first call and reuse it. */
4974 if (saveregs_value != 0)
4975 return saveregs_value;
53800dbe 4976
a66c9326 4977 /* When this function is called, it means that registers must be
4978 saved on entry to this function. So we migrate the call to the
4979 first insn of this function. */
4980
4981 start_sequence ();
53800dbe 4982
a66c9326 4983 /* Do whatever the machine needs done in this case. */
45550790 4984 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 4985
a66c9326 4986 seq = get_insns ();
4987 end_sequence ();
53800dbe 4988
a66c9326 4989 saveregs_value = val;
53800dbe 4990
31d3e01c 4991 /* Put the insns after the NOTE that starts the function. If this
4992 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 4993 the code is placed at the start of the function. */
4994 push_topmost_sequence ();
0ec80471 4995 emit_insn_after (seq, entry_of_function ());
a66c9326 4996 pop_topmost_sequence ();
4997
4998 return val;
53800dbe 4999}
5000
79012a9d 5001/* Expand a call to __builtin_next_arg. */
27d0c333 5002
53800dbe 5003static rtx
79012a9d 5004expand_builtin_next_arg (void)
53800dbe 5005{
79012a9d 5006 /* Checking arguments is already done in fold_builtin_next_arg
5007 that must be called before this function. */
940ddc5c 5008 return expand_binop (ptr_mode, add_optab,
abe32cce 5009 crtl->args.internal_arg_pointer,
5010 crtl->args.arg_offset_rtx,
53800dbe 5011 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5012}
5013
a66c9326 5014/* Make it easier for the backends by protecting the valist argument
5015 from multiple evaluations. */
5016
5017static tree
389dd41b 5018stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 5019{
5f57a8b1 5020 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5021
182cf5a9 5022 /* The current way of determining the type of valist is completely
5023 bogus. We should have the information on the va builtin instead. */
5024 if (!vatype)
5025 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 5026
5027 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 5028 {
2d47cc32 5029 if (TREE_SIDE_EFFECTS (valist))
5030 valist = save_expr (valist);
11a61dea 5031
2d47cc32 5032 /* For this case, the backends will be expecting a pointer to
5f57a8b1 5033 vatype, but it's possible we've actually been given an array
5034 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 5035 So fix it. */
5036 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 5037 {
5f57a8b1 5038 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 5039 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 5040 }
a66c9326 5041 }
11a61dea 5042 else
a66c9326 5043 {
182cf5a9 5044 tree pt = build_pointer_type (vatype);
11a61dea 5045
2d47cc32 5046 if (! needs_lvalue)
5047 {
11a61dea 5048 if (! TREE_SIDE_EFFECTS (valist))
5049 return valist;
bf8e3599 5050
389dd41b 5051 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 5052 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 5053 }
2d47cc32 5054
11a61dea 5055 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 5056 valist = save_expr (valist);
182cf5a9 5057 valist = fold_build2_loc (loc, MEM_REF,
5058 vatype, valist, build_int_cst (pt, 0));
a66c9326 5059 }
5060
5061 return valist;
5062}
5063
2e15d750 5064/* The "standard" definition of va_list is void*. */
5065
5066tree
5067std_build_builtin_va_list (void)
5068{
5069 return ptr_type_node;
5070}
5071
5f57a8b1 5072/* The "standard" abi va_list is va_list_type_node. */
5073
5074tree
5075std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5076{
5077 return va_list_type_node;
5078}
5079
5080/* The "standard" type of va_list is va_list_type_node. */
5081
5082tree
5083std_canonical_va_list_type (tree type)
5084{
5085 tree wtype, htype;
5086
5f57a8b1 5087 wtype = va_list_type_node;
5088 htype = type;
b6da2e41 5089
5090 if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 5091 {
5092 /* If va_list is an array type, the argument may have decayed
5093 to a pointer type, e.g. by being passed to another function.
5094 In that case, unwrap both types so that we can compare the
5095 underlying records. */
5096 if (TREE_CODE (htype) == ARRAY_TYPE
5097 || POINTER_TYPE_P (htype))
5098 {
5099 wtype = TREE_TYPE (wtype);
5100 htype = TREE_TYPE (htype);
5101 }
5102 }
5103 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5104 return va_list_type_node;
5105
5106 return NULL_TREE;
5107}
5108
a66c9326 5109/* The "standard" implementation of va_start: just assign `nextarg' to
5110 the variable. */
27d0c333 5111
a66c9326 5112void
aecda0d6 5113std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 5114{
f03c17bc 5115 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5116 convert_move (va_r, nextarg, 0);
a66c9326 5117}
5118
c2f47e15 5119/* Expand EXP, a call to __builtin_va_start. */
27d0c333 5120
a66c9326 5121static rtx
c2f47e15 5122expand_builtin_va_start (tree exp)
a66c9326 5123{
5124 rtx nextarg;
c2f47e15 5125 tree valist;
389dd41b 5126 location_t loc = EXPR_LOCATION (exp);
a66c9326 5127
c2f47e15 5128 if (call_expr_nargs (exp) < 2)
cb166087 5129 {
389dd41b 5130 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 5131 return const0_rtx;
5132 }
a66c9326 5133
c2f47e15 5134 if (fold_builtin_next_arg (exp, true))
79012a9d 5135 return const0_rtx;
7c2f0500 5136
79012a9d 5137 nextarg = expand_builtin_next_arg ();
389dd41b 5138 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 5139
8a58ed0a 5140 if (targetm.expand_builtin_va_start)
5141 targetm.expand_builtin_va_start (valist, nextarg);
5142 else
5143 std_expand_builtin_va_start (valist, nextarg);
a66c9326 5144
5145 return const0_rtx;
5146}
5147
c2f47e15 5148/* Expand EXP, a call to __builtin_va_end. */
f7c44134 5149
a66c9326 5150static rtx
c2f47e15 5151expand_builtin_va_end (tree exp)
a66c9326 5152{
c2f47e15 5153 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 5154
8a15c04a 5155 /* Evaluate for side effects, if needed. I hate macros that don't
5156 do that. */
5157 if (TREE_SIDE_EFFECTS (valist))
5158 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 5159
5160 return const0_rtx;
5161}
5162
c2f47e15 5163/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 5164 builtin rather than just as an assignment in stdarg.h because of the
5165 nastiness of array-type va_list types. */
f7c44134 5166
a66c9326 5167static rtx
c2f47e15 5168expand_builtin_va_copy (tree exp)
a66c9326 5169{
5170 tree dst, src, t;
389dd41b 5171 location_t loc = EXPR_LOCATION (exp);
a66c9326 5172
c2f47e15 5173 dst = CALL_EXPR_ARG (exp, 0);
5174 src = CALL_EXPR_ARG (exp, 1);
a66c9326 5175
389dd41b 5176 dst = stabilize_va_list_loc (loc, dst, 1);
5177 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 5178
5f57a8b1 5179 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5180
5181 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 5182 {
5f57a8b1 5183 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 5184 TREE_SIDE_EFFECTS (t) = 1;
5185 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5186 }
5187 else
5188 {
11a61dea 5189 rtx dstb, srcb, size;
5190
5191 /* Evaluate to pointers. */
5192 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5193 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 5194 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5195 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 5196
85d654dd 5197 dstb = convert_memory_address (Pmode, dstb);
5198 srcb = convert_memory_address (Pmode, srcb);
726ec87c 5199
11a61dea 5200 /* "Dereference" to BLKmode memories. */
5201 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 5202 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 5203 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 5204 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 5205 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 5206 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 5207
5208 /* Copy. */
0378dbdc 5209 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 5210 }
5211
5212 return const0_rtx;
5213}
5214
53800dbe 5215/* Expand a call to one of the builtin functions __builtin_frame_address or
5216 __builtin_return_address. */
27d0c333 5217
53800dbe 5218static rtx
c2f47e15 5219expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 5220{
53800dbe 5221 /* The argument must be a nonnegative integer constant.
5222 It counts the number of frames to scan up the stack.
5b252e95 5223 The value is either the frame pointer value or the return
5224 address saved in that frame. */
c2f47e15 5225 if (call_expr_nargs (exp) == 0)
53800dbe 5226 /* Warning about missing arg was already issued. */
5227 return const0_rtx;
e913b5cd 5228 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 5229 {
5b252e95 5230 error ("invalid argument to %qD", fndecl);
53800dbe 5231 return const0_rtx;
5232 }
5233 else
5234 {
5b252e95 5235 /* Number of frames to scan up the stack. */
5236 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5237
5238 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
53800dbe 5239
5240 /* Some ports cannot access arbitrary stack frames. */
5241 if (tem == NULL)
5242 {
5b252e95 5243 warning (0, "unsupported argument to %qD", fndecl);
53800dbe 5244 return const0_rtx;
5245 }
5246
5b252e95 5247 if (count)
5248 {
5249 /* Warn since no effort is made to ensure that any frame
5250 beyond the current one exists or can be safely reached. */
5251 warning (OPT_Wframe_address, "calling %qD with "
5252 "a nonzero argument is unsafe", fndecl);
5253 }
5254
53800dbe 5255 /* For __builtin_frame_address, return what we've got. */
5256 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5257 return tem;
5258
8ad4c111 5259 if (!REG_P (tem)
53800dbe 5260 && ! CONSTANT_P (tem))
99182918 5261 tem = copy_addr_to_reg (tem);
53800dbe 5262 return tem;
5263 }
5264}
5265
990495a7 5266/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
2b29cc6a 5267 failed and the caller should emit a normal call. */
15c6cf6b 5268
53800dbe 5269static rtx
2b29cc6a 5270expand_builtin_alloca (tree exp)
53800dbe 5271{
5272 rtx op0;
15c6cf6b 5273 rtx result;
581bf1c2 5274 unsigned int align;
370e45b9 5275 tree fndecl = get_callee_fndecl (exp);
2b34677f 5276 HOST_WIDE_INT max_size;
5277 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
2b29cc6a 5278 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
370e45b9 5279 bool valid_arglist
2b34677f 5280 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5281 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5282 VOID_TYPE)
5283 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5284 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5285 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
581bf1c2 5286
5287 if (!valid_arglist)
c2f47e15 5288 return NULL_RTX;
53800dbe 5289
8e18705e 5290 if ((alloca_for_var
5291 && warn_vla_limit >= HOST_WIDE_INT_MAX
5292 && warn_alloc_size_limit < warn_vla_limit)
5293 || (!alloca_for_var
5294 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5295 && warn_alloc_size_limit < warn_alloca_limit
5296 ))
370e45b9 5297 {
8e18705e 5298 /* -Walloca-larger-than and -Wvla-larger-than settings of
5299 less than HOST_WIDE_INT_MAX override the more general
5300 -Walloc-size-larger-than so unless either of the former
5301 options is smaller than the last one (wchich would imply
5302 that the call was already checked), check the alloca
5303 arguments for overflow. */
370e45b9 5304 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5305 int idx[] = { 0, -1 };
5306 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5307 }
5308
53800dbe 5309 /* Compute the argument. */
c2f47e15 5310 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 5311
581bf1c2 5312 /* Compute the alignment. */
2b34677f 5313 align = (fcode == BUILT_IN_ALLOCA
5314 ? BIGGEST_ALIGNMENT
5315 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5316
5317 /* Compute the maximum size. */
5318 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5319 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5320 : -1);
581bf1c2 5321
2b29cc6a 5322 /* Allocate the desired space. If the allocation stems from the declaration
5323 of a variable-sized object, it cannot accumulate. */
2b34677f 5324 result
5325 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
85d654dd 5326 result = convert_memory_address (ptr_mode, result);
15c6cf6b 5327
5328 return result;
53800dbe 5329}
5330
829e6a9b 5331/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5332 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5333 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5334 handle_builtin_stack_restore function. */
d08919a7 5335
5336static rtx
5337expand_asan_emit_allocas_unpoison (tree exp)
5338{
5339 tree arg0 = CALL_EXPR_ARG (exp, 0);
829e6a9b 5340 tree arg1 = CALL_EXPR_ARG (exp, 1);
cd2ee6ee 5341 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
829e6a9b 5342 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5343 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5344 stack_pointer_rtx, NULL_RTX, 0,
5345 OPTAB_LIB_WIDEN);
5346 off = convert_modes (ptr_mode, Pmode, off, 0);
5347 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5348 OPTAB_LIB_WIDEN);
d08919a7 5349 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
9e9e5c15 5350 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5351 top, ptr_mode, bot, ptr_mode);
d08919a7 5352 return ret;
5353}
5354
74bdbe96 5355/* Expand a call to bswap builtin in EXP.
5356 Return NULL_RTX if a normal call should be emitted rather than expanding the
5357 function in-line. If convenient, the result should be placed in TARGET.
5358 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 5359
5360static rtx
3754d046 5361expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 5362 rtx subtarget)
42791117 5363{
42791117 5364 tree arg;
5365 rtx op0;
5366
c2f47e15 5367 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5368 return NULL_RTX;
42791117 5369
c2f47e15 5370 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 5371 op0 = expand_expr (arg,
5372 subtarget && GET_MODE (subtarget) == target_mode
5373 ? subtarget : NULL_RTX,
5374 target_mode, EXPAND_NORMAL);
5375 if (GET_MODE (op0) != target_mode)
5376 op0 = convert_to_mode (target_mode, op0, 1);
42791117 5377
74bdbe96 5378 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 5379
5380 gcc_assert (target);
5381
74bdbe96 5382 return convert_to_mode (target_mode, target, 1);
42791117 5383}
5384
c2f47e15 5385/* Expand a call to a unary builtin in EXP.
5386 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 5387 function in-line. If convenient, the result should be placed in TARGET.
5388 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 5389
53800dbe 5390static rtx
3754d046 5391expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 5392 rtx subtarget, optab op_optab)
53800dbe 5393{
5394 rtx op0;
c2f47e15 5395
5396 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5397 return NULL_RTX;
53800dbe 5398
5399 /* Compute the argument. */
f97eea22 5400 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5401 (subtarget
5402 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5403 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 5404 VOIDmode, EXPAND_NORMAL);
6a08d0ab 5405 /* Compute op, into TARGET if possible.
53800dbe 5406 Set TARGET to wherever the result comes back. */
c2f47e15 5407 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 5408 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 5409 gcc_assert (target);
7d3f6cc7 5410
efb070c8 5411 return convert_to_mode (target_mode, target, 0);
53800dbe 5412}
89cfe6e5 5413
48e1416a 5414/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 5415 as the builtin_expect semantic should've been already executed by
5416 tree branch prediction pass. */
89cfe6e5 5417
5418static rtx
c2f47e15 5419expand_builtin_expect (tree exp, rtx target)
89cfe6e5 5420{
1e4adcfc 5421 tree arg;
89cfe6e5 5422
c2f47e15 5423 if (call_expr_nargs (exp) < 2)
89cfe6e5 5424 return const0_rtx;
c2f47e15 5425 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 5426
c2f47e15 5427 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 5428 /* When guessing was done, the hints should be already stripped away. */
07311427 5429 gcc_assert (!flag_guess_branch_prob
852f689e 5430 || optimize == 0 || seen_error ());
89cfe6e5 5431 return target;
5432}
689df48e 5433
01107f42 5434/* Expand a call to __builtin_expect_with_probability. We just return our
5435 argument as the builtin_expect semantic should've been already executed by
5436 tree branch prediction pass. */
5437
5438static rtx
5439expand_builtin_expect_with_probability (tree exp, rtx target)
5440{
5441 tree arg;
5442
5443 if (call_expr_nargs (exp) < 3)
5444 return const0_rtx;
5445 arg = CALL_EXPR_ARG (exp, 0);
5446
5447 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5448 /* When guessing was done, the hints should be already stripped away. */
5449 gcc_assert (!flag_guess_branch_prob
5450 || optimize == 0 || seen_error ());
5451 return target;
5452}
5453
5454
fca0886c 5455/* Expand a call to __builtin_assume_aligned. We just return our first
5456 argument as the builtin_assume_aligned semantic should've been already
5457 executed by CCP. */
5458
5459static rtx
5460expand_builtin_assume_aligned (tree exp, rtx target)
5461{
5462 if (call_expr_nargs (exp) < 2)
5463 return const0_rtx;
5464 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5465 EXPAND_NORMAL);
5466 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5467 && (call_expr_nargs (exp) < 3
5468 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5469 return target;
5470}
5471
c22de3f0 5472void
aecda0d6 5473expand_builtin_trap (void)
a0ef1725 5474{
4db8dd0c 5475 if (targetm.have_trap ())
f73960eb 5476 {
4db8dd0c 5477 rtx_insn *insn = emit_insn (targetm.gen_trap ());
f73960eb 5478 /* For trap insns when not accumulating outgoing args force
5479 REG_ARGS_SIZE note to prevent crossjumping of calls with
5480 different args sizes. */
5481 if (!ACCUMULATE_OUTGOING_ARGS)
f6a1fc98 5482 add_args_size_note (insn, stack_pointer_delta);
f73960eb 5483 }
a0ef1725 5484 else
61ffc71a 5485 {
5486 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5487 tree call_expr = build_call_expr (fn, 0);
5488 expand_call (call_expr, NULL_RTX, false);
5489 }
5490
a0ef1725 5491 emit_barrier ();
5492}
78a74442 5493
d2b48f0c 5494/* Expand a call to __builtin_unreachable. We do nothing except emit
5495 a barrier saying that control flow will not pass here.
5496
5497 It is the responsibility of the program being compiled to ensure
5498 that control flow does never reach __builtin_unreachable. */
5499static void
5500expand_builtin_unreachable (void)
5501{
5502 emit_barrier ();
5503}
5504
c2f47e15 5505/* Expand EXP, a call to fabs, fabsf or fabsl.
5506 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 5507 the function inline. If convenient, the result should be placed
5508 in TARGET. SUBTARGET may be used as the target for computing
5509 the operand. */
5510
5511static rtx
c2f47e15 5512expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 5513{
3754d046 5514 machine_mode mode;
78a74442 5515 tree arg;
5516 rtx op0;
5517
c2f47e15 5518 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5519 return NULL_RTX;
78a74442 5520
c2f47e15 5521 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 5522 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 5523 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 5524 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 5525 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5526}
5527
c2f47e15 5528/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 5529 Return NULL is a normal call should be emitted rather than expanding the
5530 function inline. If convenient, the result should be placed in TARGET.
5531 SUBTARGET may be used as the target for computing the operand. */
5532
5533static rtx
c2f47e15 5534expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 5535{
5536 rtx op0, op1;
5537 tree arg;
5538
c2f47e15 5539 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5540 return NULL_RTX;
270436f3 5541
c2f47e15 5542 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 5543 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 5544
c2f47e15 5545 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 5546 op1 = expand_normal (arg);
270436f3 5547
5548 return expand_copysign (op0, op1, target);
5549}
5550
ac8fb6db 5551/* Expand a call to __builtin___clear_cache. */
5552
5553static rtx
32e17df0 5554expand_builtin___clear_cache (tree exp)
ac8fb6db 5555{
32e17df0 5556 if (!targetm.code_for_clear_cache)
5557 {
ac8fb6db 5558#ifdef CLEAR_INSN_CACHE
32e17df0 5559 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5560 does something. Just do the default expansion to a call to
5561 __clear_cache(). */
5562 return NULL_RTX;
ac8fb6db 5563#else
32e17df0 5564 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5565 does nothing. There is no need to call it. Do nothing. */
5566 return const0_rtx;
ac8fb6db 5567#endif /* CLEAR_INSN_CACHE */
32e17df0 5568 }
5569
ac8fb6db 5570 /* We have a "clear_cache" insn, and it will handle everything. */
5571 tree begin, end;
5572 rtx begin_rtx, end_rtx;
ac8fb6db 5573
5574 /* We must not expand to a library call. If we did, any
5575 fallback library function in libgcc that might contain a call to
5576 __builtin___clear_cache() would recurse infinitely. */
5577 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5578 {
5579 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5580 return const0_rtx;
5581 }
5582
32e17df0 5583 if (targetm.have_clear_cache ())
ac8fb6db 5584 {
8786db1e 5585 struct expand_operand ops[2];
ac8fb6db 5586
5587 begin = CALL_EXPR_ARG (exp, 0);
5588 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5589
5590 end = CALL_EXPR_ARG (exp, 1);
5591 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5592
8786db1e 5593 create_address_operand (&ops[0], begin_rtx);
5594 create_address_operand (&ops[1], end_rtx);
32e17df0 5595 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8786db1e 5596 return const0_rtx;
ac8fb6db 5597 }
5598 return const0_rtx;
ac8fb6db 5599}
5600
4ee9c684 5601/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5602
5603static rtx
5604round_trampoline_addr (rtx tramp)
5605{
5606 rtx temp, addend, mask;
5607
5608 /* If we don't need too much alignment, we'll have been guaranteed
5609 proper alignment by get_trampoline_type. */
5610 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5611 return tramp;
5612
5613 /* Round address up to desired boundary. */
5614 temp = gen_reg_rtx (Pmode);
0359f9f5 5615 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5616 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 5617
5618 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5619 temp, 0, OPTAB_LIB_WIDEN);
5620 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5621 temp, 0, OPTAB_LIB_WIDEN);
5622
5623 return tramp;
5624}
5625
5626static rtx
c307f106 5627expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 5628{
5629 tree t_tramp, t_func, t_chain;
82c7907c 5630 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 5631
c2f47e15 5632 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 5633 POINTER_TYPE, VOID_TYPE))
5634 return NULL_RTX;
5635
c2f47e15 5636 t_tramp = CALL_EXPR_ARG (exp, 0);
5637 t_func = CALL_EXPR_ARG (exp, 1);
5638 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 5639
8ec3c5c2 5640 r_tramp = expand_normal (t_tramp);
82c7907c 5641 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5642 MEM_NOTRAP_P (m_tramp) = 1;
5643
c307f106 5644 /* If ONSTACK, the TRAMP argument should be the address of a field
5645 within the local function's FRAME decl. Either way, let's see if
5646 we can fill in the MEM_ATTRs for this memory. */
82c7907c 5647 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 5648 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 5649
c307f106 5650 /* Creator of a heap trampoline is responsible for making sure the
5651 address is aligned to at least STACK_BOUNDARY. Normally malloc
5652 will ensure this anyhow. */
82c7907c 5653 tmp = round_trampoline_addr (r_tramp);
5654 if (tmp != r_tramp)
5655 {
5656 m_tramp = change_address (m_tramp, BLKmode, tmp);
5657 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 5658 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 5659 }
5660
5661 /* The FUNC argument should be the address of the nested function.
5662 Extract the actual function decl to pass to the hook. */
5663 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5664 t_func = TREE_OPERAND (t_func, 0);
5665 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5666
8ec3c5c2 5667 r_chain = expand_normal (t_chain);
4ee9c684 5668
5669 /* Generate insns to initialize the trampoline. */
82c7907c 5670 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 5671
c307f106 5672 if (onstack)
5673 {
5674 trampolines_created = 1;
8bc8a8f4 5675
a27e3913 5676 if (targetm.calls.custom_function_descriptors != 0)
5677 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5678 "trampoline generated for nested function %qD", t_func);
c307f106 5679 }
8bc8a8f4 5680
4ee9c684 5681 return const0_rtx;
5682}
5683
5684static rtx
c2f47e15 5685expand_builtin_adjust_trampoline (tree exp)
4ee9c684 5686{
5687 rtx tramp;
5688
c2f47e15 5689 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 5690 return NULL_RTX;
5691
c2f47e15 5692 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 5693 tramp = round_trampoline_addr (tramp);
82c7907c 5694 if (targetm.calls.trampoline_adjust_address)
5695 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 5696
5697 return tramp;
5698}
5699
a27e3913 5700/* Expand a call to the builtin descriptor initialization routine.
5701 A descriptor is made up of a couple of pointers to the static
5702 chain and the code entry in this order. */
5703
5704static rtx
5705expand_builtin_init_descriptor (tree exp)
5706{
5707 tree t_descr, t_func, t_chain;
5708 rtx m_descr, r_descr, r_func, r_chain;
5709
5710 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5711 VOID_TYPE))
5712 return NULL_RTX;
5713
5714 t_descr = CALL_EXPR_ARG (exp, 0);
5715 t_func = CALL_EXPR_ARG (exp, 1);
5716 t_chain = CALL_EXPR_ARG (exp, 2);
5717
5718 r_descr = expand_normal (t_descr);
5719 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5720 MEM_NOTRAP_P (m_descr) = 1;
5721
5722 r_func = expand_normal (t_func);
5723 r_chain = expand_normal (t_chain);
5724
5725 /* Generate insns to initialize the descriptor. */
5726 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5727 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5728 POINTER_SIZE / BITS_PER_UNIT), r_func);
5729
5730 return const0_rtx;
5731}
5732
5733/* Expand a call to the builtin descriptor adjustment routine. */
5734
5735static rtx
5736expand_builtin_adjust_descriptor (tree exp)
5737{
5738 rtx tramp;
5739
5740 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5741 return NULL_RTX;
5742
5743 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5744
5745 /* Unalign the descriptor to allow runtime identification. */
5746 tramp = plus_constant (ptr_mode, tramp,
5747 targetm.calls.custom_function_descriptors);
5748
5749 return force_operand (tramp, NULL_RTX);
5750}
5751
93f564d6 5752/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5753 function. The function first checks whether the back end provides
5754 an insn to implement signbit for the respective mode. If not, it
5755 checks whether the floating point format of the value is such that
10902624 5756 the sign bit can be extracted. If that is not the case, error out.
5757 EXP is the expression that is a call to the builtin function; if
5758 convenient, the result should be placed in TARGET. */
27f261ef 5759static rtx
5760expand_builtin_signbit (tree exp, rtx target)
5761{
5762 const struct real_format *fmt;
299dd9fa 5763 scalar_float_mode fmode;
f77c4496 5764 scalar_int_mode rmode, imode;
c2f47e15 5765 tree arg;
ca4f1f5b 5766 int word, bitpos;
27eda240 5767 enum insn_code icode;
27f261ef 5768 rtx temp;
389dd41b 5769 location_t loc = EXPR_LOCATION (exp);
27f261ef 5770
c2f47e15 5771 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5772 return NULL_RTX;
27f261ef 5773
c2f47e15 5774 arg = CALL_EXPR_ARG (exp, 0);
299dd9fa 5775 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
03b7a719 5776 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
27f261ef 5777 fmt = REAL_MODE_FORMAT (fmode);
5778
93f564d6 5779 arg = builtin_save_expr (arg);
5780
5781 /* Expand the argument yielding a RTX expression. */
5782 temp = expand_normal (arg);
5783
5784 /* Check if the back end provides an insn that handles signbit for the
5785 argument's mode. */
d6bf3b14 5786 icode = optab_handler (signbit_optab, fmode);
27eda240 5787 if (icode != CODE_FOR_nothing)
93f564d6 5788 {
1e0c0b35 5789 rtx_insn *last = get_last_insn ();
93f564d6 5790 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 5791 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5792 return target;
5793 delete_insns_since (last);
93f564d6 5794 }
5795
27f261ef 5796 /* For floating point formats without a sign bit, implement signbit
5797 as "ARG < 0.0". */
8d564692 5798 bitpos = fmt->signbit_ro;
ca4f1f5b 5799 if (bitpos < 0)
27f261ef 5800 {
5801 /* But we can't do this if the format supports signed zero. */
10902624 5802 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
27f261ef 5803
389dd41b 5804 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 5805 build_real (TREE_TYPE (arg), dconst0));
27f261ef 5806 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5807 }
5808
ca4f1f5b 5809 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 5810 {
2cf1bb25 5811 imode = int_mode_for_mode (fmode).require ();
ca4f1f5b 5812 temp = gen_lowpart (imode, temp);
24fd4260 5813 }
5814 else
5815 {
ca4f1f5b 5816 imode = word_mode;
5817 /* Handle targets with different FP word orders. */
5818 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5819 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5820 else
a0c938f0 5821 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5822 temp = operand_subword_force (temp, word, fmode);
5823 bitpos = bitpos % BITS_PER_WORD;
5824 }
5825
44b0f1d0 5826 /* Force the intermediate word_mode (or narrower) result into a
5827 register. This avoids attempting to create paradoxical SUBREGs
5828 of floating point modes below. */
5829 temp = force_reg (imode, temp);
5830
ca4f1f5b 5831 /* If the bitpos is within the "result mode" lowpart, the operation
5832 can be implement with a single bitwise AND. Otherwise, we need
5833 a right shift and an AND. */
5834
5835 if (bitpos < GET_MODE_BITSIZE (rmode))
5836 {
796b6678 5837 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5838
4a46f016 5839 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5840 temp = gen_lowpart (rmode, temp);
24fd4260 5841 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5842 immed_wide_int_const (mask, rmode),
ca4f1f5b 5843 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5844 }
ca4f1f5b 5845 else
5846 {
5847 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5848 significant bit, then truncate the result to the desired mode
ca4f1f5b 5849 and mask just this bit. */
f5ff0b21 5850 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5851 temp = gen_lowpart (rmode, temp);
5852 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5853 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5854 }
5855
27f261ef 5856 return temp;
5857}
73673831 5858
5859/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5860 call. EXP is the call. FN is the
73673831 5861 identificator of the actual function. IGNORE is nonzero if the
5862 value is to be ignored. */
5863
5864static rtx
c2f47e15 5865expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5866{
5867 tree id, decl;
5868 tree call;
5869
5870 /* If we are not profiling, just call the function. */
5871 if (!profile_arc_flag)
5872 return NULL_RTX;
5873
5874 /* Otherwise call the wrapper. This should be equivalent for the rest of
5875 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5876 code necessary for keeping the profiling sane. */
73673831 5877
5878 switch (DECL_FUNCTION_CODE (fn))
5879 {
5880 case BUILT_IN_FORK:
5881 id = get_identifier ("__gcov_fork");
5882 break;
5883
5884 case BUILT_IN_EXECL:
5885 id = get_identifier ("__gcov_execl");
5886 break;
5887
5888 case BUILT_IN_EXECV:
5889 id = get_identifier ("__gcov_execv");
5890 break;
5891
5892 case BUILT_IN_EXECLP:
5893 id = get_identifier ("__gcov_execlp");
5894 break;
5895
5896 case BUILT_IN_EXECLE:
5897 id = get_identifier ("__gcov_execle");
5898 break;
5899
5900 case BUILT_IN_EXECVP:
5901 id = get_identifier ("__gcov_execvp");
5902 break;
5903
5904 case BUILT_IN_EXECVE:
5905 id = get_identifier ("__gcov_execve");
5906 break;
5907
5908 default:
64db345d 5909 gcc_unreachable ();
73673831 5910 }
5911
e60a6f7b 5912 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5913 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5914 DECL_EXTERNAL (decl) = 1;
5915 TREE_PUBLIC (decl) = 1;
5916 DECL_ARTIFICIAL (decl) = 1;
5917 TREE_NOTHROW (decl) = 1;
e82d310b 5918 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5919 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5920 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5921 return expand_call (call, target, ignore);
c2f47e15 5922 }
48e1416a 5923
b6a5fc45 5924
5925\f
3e272de8 5926/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5927 the pointer in these functions is void*, the tree optimizers may remove
5928 casts. The mode computed in expand_builtin isn't reliable either, due
5929 to __sync_bool_compare_and_swap.
5930
5931 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5932 group of builtins. This gives us log2 of the mode size. */
5933
3754d046 5934static inline machine_mode
3e272de8 5935get_builtin_sync_mode (int fcode_diff)
5936{
ad3a13b5 5937 /* The size is not negotiable, so ask not to get BLKmode in return
5938 if the target indicates that a smaller size would be better. */
517be012 5939 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
3e272de8 5940}
5941
041e0215 5942/* Expand the memory expression LOC and return the appropriate memory operand
5943 for the builtin_sync operations. */
5944
5945static rtx
3754d046 5946get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5947{
5948 rtx addr, mem;
fcbc2234 5949 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5950 ? TREE_TYPE (TREE_TYPE (loc))
5951 : TREE_TYPE (loc));
5952 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
041e0215 5953
fcbc2234 5954 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
ed825d83 5955 addr = convert_memory_address (addr_mode, addr);
041e0215 5956
5957 /* Note that we explicitly do not want any alias information for this
5958 memory, so that we kill all other live memories. Otherwise we don't
5959 satisfy the full barrier semantics of the intrinsic. */
fcbc2234 5960 mem = gen_rtx_MEM (mode, addr);
5961
5962 set_mem_addr_space (mem, addr_space);
5963
5964 mem = validize_mem (mem);
041e0215 5965
153c3b50 5966 /* The alignment needs to be at least according to that of the mode. */
5967 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 5968 get_pointer_alignment (loc)));
c94cfd1c 5969 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 5970 MEM_VOLATILE_P (mem) = 1;
5971
5972 return mem;
5973}
5974
1cd6e20d 5975/* Make sure an argument is in the right mode.
5976 EXP is the tree argument.
5977 MODE is the mode it should be in. */
5978
5979static rtx
3754d046 5980expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 5981{
5982 rtx val;
3754d046 5983 machine_mode old_mode;
1cd6e20d 5984
5985 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5986 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5987 of CONST_INTs, where we know the old_mode only from the call argument. */
5988
5989 old_mode = GET_MODE (val);
5990 if (old_mode == VOIDmode)
5991 old_mode = TYPE_MODE (TREE_TYPE (exp));
5992 val = convert_modes (mode, old_mode, val, 1);
5993 return val;
5994}
5995
5996
b6a5fc45 5997/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 5998 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 5999 that corresponds to the arithmetic or logical operation from the name;
6000 an exception here is that NOT actually means NAND. TARGET is an optional
6001 place for us to store the results; AFTER is true if this is the
1cd6e20d 6002 fetch_and_xxx form. */
b6a5fc45 6003
6004static rtx
3754d046 6005expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 6006 enum rtx_code code, bool after,
1cd6e20d 6007 rtx target)
b6a5fc45 6008{
041e0215 6009 rtx val, mem;
e60a6f7b 6010 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 6011
cf73e559 6012 if (code == NOT && warn_sync_nand)
6013 {
6014 tree fndecl = get_callee_fndecl (exp);
6015 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6016
6017 static bool warned_f_a_n, warned_n_a_f;
6018
6019 switch (fcode)
6020 {
2797f13a 6021 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6022 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6023 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6024 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6025 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 6026 if (warned_f_a_n)
6027 break;
6028
b9a16870 6029 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 6030 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 6031 warned_f_a_n = true;
6032 break;
6033
2797f13a 6034 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6035 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6036 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6037 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6038 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 6039 if (warned_n_a_f)
6040 break;
6041
b9a16870 6042 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 6043 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 6044 warned_n_a_f = true;
6045 break;
6046
6047 default:
6048 gcc_unreachable ();
6049 }
6050 }
6051
b6a5fc45 6052 /* Expand the operands. */
c2f47e15 6053 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 6054 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 6055
a372f7ca 6056 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 6057 after);
b6a5fc45 6058}
6059
6060/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 6061 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 6062 true if this is the boolean form. TARGET is a place for us to store the
6063 results; this is NOT optional if IS_BOOL is true. */
6064
6065static rtx
3754d046 6066expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 6067 bool is_bool, rtx target)
b6a5fc45 6068{
041e0215 6069 rtx old_val, new_val, mem;
ba885f6a 6070 rtx *pbool, *poval;
b6a5fc45 6071
6072 /* Expand the operands. */
c2f47e15 6073 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 6074 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6075 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 6076
ba885f6a 6077 pbool = poval = NULL;
6078 if (target != const0_rtx)
6079 {
6080 if (is_bool)
6081 pbool = &target;
6082 else
6083 poval = &target;
6084 }
6085 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 6086 false, MEMMODEL_SYNC_SEQ_CST,
6087 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 6088 return NULL_RTX;
c2f47e15 6089
1cd6e20d 6090 return target;
b6a5fc45 6091}
6092
6093/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6094 general form is actually an atomic exchange, and some targets only
6095 support a reduced form with the second argument being a constant 1.
48e1416a 6096 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 6097 the results. */
b6a5fc45 6098
6099static rtx
3754d046 6100expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 6101 rtx target)
b6a5fc45 6102{
041e0215 6103 rtx val, mem;
b6a5fc45 6104
6105 /* Expand the operands. */
c2f47e15 6106 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 6107 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6108
7821cde1 6109 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 6110}
6111
6112/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6113
6114static void
3754d046 6115expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 6116{
6117 rtx mem;
6118
6119 /* Expand the operands. */
6120 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6121
a372f7ca 6122 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 6123}
6124
6125/* Given an integer representing an ``enum memmodel'', verify its
6126 correctness and return the memory model enum. */
6127
6128static enum memmodel
6129get_memmodel (tree exp)
6130{
6131 rtx op;
7f738025 6132 unsigned HOST_WIDE_INT val;
2cb724f9 6133 source_location loc
6134 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 6135
6136 /* If the parameter is not a constant, it's a run time value so we'll just
6137 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6138 if (TREE_CODE (exp) != INTEGER_CST)
6139 return MEMMODEL_SEQ_CST;
6140
6141 op = expand_normal (exp);
7f738025 6142
6143 val = INTVAL (op);
6144 if (targetm.memmodel_check)
6145 val = targetm.memmodel_check (val);
6146 else if (val & ~MEMMODEL_MASK)
6147 {
2cb724f9 6148 warning_at (loc, OPT_Winvalid_memory_model,
6149 "unknown architecture specifier in memory model to builtin");
7f738025 6150 return MEMMODEL_SEQ_CST;
6151 }
6152
a372f7ca 6153 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6154 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 6155 {
2cb724f9 6156 warning_at (loc, OPT_Winvalid_memory_model,
6157 "invalid memory model argument to builtin");
1cd6e20d 6158 return MEMMODEL_SEQ_CST;
6159 }
7f738025 6160
3070f133 6161 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6162 be conservative and promote consume to acquire. */
6163 if (val == MEMMODEL_CONSUME)
6164 val = MEMMODEL_ACQUIRE;
6165
7f738025 6166 return (enum memmodel) val;
1cd6e20d 6167}
6168
6169/* Expand the __atomic_exchange intrinsic:
6170 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6171 EXP is the CALL_EXPR.
6172 TARGET is an optional place for us to store the results. */
6173
6174static rtx
3754d046 6175expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 6176{
6177 rtx val, mem;
6178 enum memmodel model;
6179
6180 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 6181
6182 if (!flag_inline_atomics)
6183 return NULL_RTX;
6184
6185 /* Expand the operands. */
6186 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6187 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6188
7821cde1 6189 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 6190}
6191
6192/* Expand the __atomic_compare_exchange intrinsic:
6193 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6194 TYPE desired, BOOL weak,
6195 enum memmodel success,
6196 enum memmodel failure)
6197 EXP is the CALL_EXPR.
6198 TARGET is an optional place for us to store the results. */
6199
6200static rtx
3754d046 6201expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 6202 rtx target)
6203{
1e0c0b35 6204 rtx expect, desired, mem, oldval;
6205 rtx_code_label *label;
1cd6e20d 6206 enum memmodel success, failure;
6207 tree weak;
6208 bool is_weak;
2cb724f9 6209 source_location loc
6210 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 6211
6212 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6213 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6214
086f4e33 6215 if (failure > success)
6216 {
2cb724f9 6217 warning_at (loc, OPT_Winvalid_memory_model,
6218 "failure memory model cannot be stronger than success "
6219 "memory model for %<__atomic_compare_exchange%>");
086f4e33 6220 success = MEMMODEL_SEQ_CST;
6221 }
6222
a372f7ca 6223 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 6224 {
2cb724f9 6225 warning_at (loc, OPT_Winvalid_memory_model,
6226 "invalid failure memory model for "
6227 "%<__atomic_compare_exchange%>");
086f4e33 6228 failure = MEMMODEL_SEQ_CST;
6229 success = MEMMODEL_SEQ_CST;
1cd6e20d 6230 }
6231
086f4e33 6232
1cd6e20d 6233 if (!flag_inline_atomics)
6234 return NULL_RTX;
6235
6236 /* Expand the operands. */
6237 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6238
6239 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6240 expect = convert_memory_address (Pmode, expect);
c401b131 6241 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 6242 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6243
6244 weak = CALL_EXPR_ARG (exp, 3);
6245 is_weak = false;
e913b5cd 6246 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 6247 is_weak = true;
6248
d86e3752 6249 if (target == const0_rtx)
6250 target = NULL;
d86e3752 6251
3c29a9ea 6252 /* Lest the rtl backend create a race condition with an imporoper store
6253 to memory, always create a new pseudo for OLDVAL. */
6254 oldval = NULL;
6255
6256 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 6257 is_weak, success, failure))
1cd6e20d 6258 return NULL_RTX;
6259
d86e3752 6260 /* Conditionally store back to EXPECT, lest we create a race condition
6261 with an improper store to memory. */
6262 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6263 the normal case where EXPECT is totally private, i.e. a register. At
6264 which point the store can be unconditional. */
6265 label = gen_label_rtx ();
62589f76 6266 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6267 GET_MODE (target), 1, label);
d86e3752 6268 emit_move_insn (expect, oldval);
6269 emit_label (label);
c401b131 6270
1cd6e20d 6271 return target;
6272}
6273
5a5ef659 6274/* Helper function for expand_ifn_atomic_compare_exchange - expand
6275 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6276 call. The weak parameter must be dropped to match the expected parameter
6277 list and the expected argument changed from value to pointer to memory
6278 slot. */
6279
6280static void
6281expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6282{
6283 unsigned int z;
6284 vec<tree, va_gc> *vec;
6285
6286 vec_alloc (vec, 5);
6287 vec->quick_push (gimple_call_arg (call, 0));
6288 tree expected = gimple_call_arg (call, 1);
6289 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6290 TREE_TYPE (expected));
6291 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6292 if (expd != x)
6293 emit_move_insn (x, expd);
6294 tree v = make_tree (TREE_TYPE (expected), x);
6295 vec->quick_push (build1 (ADDR_EXPR,
6296 build_pointer_type (TREE_TYPE (expected)), v));
6297 vec->quick_push (gimple_call_arg (call, 2));
6298 /* Skip the boolean weak parameter. */
6299 for (z = 4; z < 6; z++)
6300 vec->quick_push (gimple_call_arg (call, z));
5eaf31bb 6301 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
52acb7ae 6302 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5eaf31bb 6303 gcc_assert (bytes_log2 < 5);
5a5ef659 6304 built_in_function fncode
6305 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5eaf31bb 6306 + bytes_log2);
5a5ef659 6307 tree fndecl = builtin_decl_explicit (fncode);
6308 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6309 fndecl);
6310 tree exp = build_call_vec (boolean_type_node, fn, vec);
6311 tree lhs = gimple_call_lhs (call);
6312 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6313 if (lhs)
6314 {
6315 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6316 if (GET_MODE (boolret) != mode)
6317 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6318 x = force_reg (mode, x);
6319 write_complex_part (target, boolret, true);
6320 write_complex_part (target, x, false);
6321 }
6322}
6323
6324/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6325
6326void
6327expand_ifn_atomic_compare_exchange (gcall *call)
6328{
6329 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6330 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
517be012 6331 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5a5ef659 6332 rtx expect, desired, mem, oldval, boolret;
6333 enum memmodel success, failure;
6334 tree lhs;
6335 bool is_weak;
6336 source_location loc
6337 = expansion_point_location_if_in_system_header (gimple_location (call));
6338
6339 success = get_memmodel (gimple_call_arg (call, 4));
6340 failure = get_memmodel (gimple_call_arg (call, 5));
6341
6342 if (failure > success)
6343 {
6344 warning_at (loc, OPT_Winvalid_memory_model,
6345 "failure memory model cannot be stronger than success "
6346 "memory model for %<__atomic_compare_exchange%>");
6347 success = MEMMODEL_SEQ_CST;
6348 }
6349
6350 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6351 {
6352 warning_at (loc, OPT_Winvalid_memory_model,
6353 "invalid failure memory model for "
6354 "%<__atomic_compare_exchange%>");
6355 failure = MEMMODEL_SEQ_CST;
6356 success = MEMMODEL_SEQ_CST;
6357 }
6358
6359 if (!flag_inline_atomics)
6360 {
6361 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6362 return;
6363 }
6364
6365 /* Expand the operands. */
6366 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6367
6368 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6369 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6370
6371 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6372
6373 boolret = NULL;
6374 oldval = NULL;
6375
6376 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6377 is_weak, success, failure))
6378 {
6379 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6380 return;
6381 }
6382
6383 lhs = gimple_call_lhs (call);
6384 if (lhs)
6385 {
6386 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6387 if (GET_MODE (boolret) != mode)
6388 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6389 write_complex_part (target, boolret, true);
6390 write_complex_part (target, oldval, false);
6391 }
6392}
6393
1cd6e20d 6394/* Expand the __atomic_load intrinsic:
6395 TYPE __atomic_load (TYPE *object, enum memmodel)
6396 EXP is the CALL_EXPR.
6397 TARGET is an optional place for us to store the results. */
6398
6399static rtx
3754d046 6400expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 6401{
6402 rtx mem;
6403 enum memmodel model;
6404
6405 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 6406 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 6407 {
2cb724f9 6408 source_location loc
6409 = expansion_point_location_if_in_system_header (input_location);
6410 warning_at (loc, OPT_Winvalid_memory_model,
6411 "invalid memory model for %<__atomic_load%>");
086f4e33 6412 model = MEMMODEL_SEQ_CST;
1cd6e20d 6413 }
6414
6415 if (!flag_inline_atomics)
6416 return NULL_RTX;
6417
6418 /* Expand the operand. */
6419 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6420
6421 return expand_atomic_load (target, mem, model);
6422}
6423
6424
6425/* Expand the __atomic_store intrinsic:
6426 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6427 EXP is the CALL_EXPR.
6428 TARGET is an optional place for us to store the results. */
6429
6430static rtx
3754d046 6431expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 6432{
6433 rtx mem, val;
6434 enum memmodel model;
6435
6436 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 6437 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6438 || is_mm_release (model)))
1cd6e20d 6439 {
2cb724f9 6440 source_location loc
6441 = expansion_point_location_if_in_system_header (input_location);
6442 warning_at (loc, OPT_Winvalid_memory_model,
6443 "invalid memory model for %<__atomic_store%>");
086f4e33 6444 model = MEMMODEL_SEQ_CST;
1cd6e20d 6445 }
6446
6447 if (!flag_inline_atomics)
6448 return NULL_RTX;
6449
6450 /* Expand the operands. */
6451 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6452 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6453
8808bf16 6454 return expand_atomic_store (mem, val, model, false);
1cd6e20d 6455}
6456
6457/* Expand the __atomic_fetch_XXX intrinsic:
6458 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6459 EXP is the CALL_EXPR.
6460 TARGET is an optional place for us to store the results.
6461 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6462 FETCH_AFTER is true if returning the result of the operation.
6463 FETCH_AFTER is false if returning the value before the operation.
6464 IGNORE is true if the result is not used.
6465 EXT_CALL is the correct builtin for an external call if this cannot be
6466 resolved to an instruction sequence. */
6467
6468static rtx
3754d046 6469expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 6470 enum rtx_code code, bool fetch_after,
6471 bool ignore, enum built_in_function ext_call)
6472{
6473 rtx val, mem, ret;
6474 enum memmodel model;
6475 tree fndecl;
6476 tree addr;
6477
6478 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6479
6480 /* Expand the operands. */
6481 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6482 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6483
6484 /* Only try generating instructions if inlining is turned on. */
6485 if (flag_inline_atomics)
6486 {
6487 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6488 if (ret)
6489 return ret;
6490 }
6491
6492 /* Return if a different routine isn't needed for the library call. */
6493 if (ext_call == BUILT_IN_NONE)
6494 return NULL_RTX;
6495
6496 /* Change the call to the specified function. */
6497 fndecl = get_callee_fndecl (exp);
6498 addr = CALL_EXPR_FN (exp);
6499 STRIP_NOPS (addr);
6500
6501 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 6502 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 6503
a2f95d97 6504 /* If we will emit code after the call, the call can not be a tail call.
6505 If it is emitted as a tail call, a barrier is emitted after it, and
6506 then all trailing code is removed. */
6507 if (!ignore)
6508 CALL_EXPR_TAILCALL (exp) = 0;
6509
1cd6e20d 6510 /* Expand the call here so we can emit trailing code. */
6511 ret = expand_call (exp, target, ignore);
6512
6513 /* Replace the original function just in case it matters. */
6514 TREE_OPERAND (addr, 0) = fndecl;
6515
6516 /* Then issue the arithmetic correction to return the right result. */
6517 if (!ignore)
c449f851 6518 {
6519 if (code == NOT)
6520 {
6521 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6522 OPTAB_LIB_WIDEN);
6523 ret = expand_simple_unop (mode, NOT, ret, target, true);
6524 }
6525 else
6526 ret = expand_simple_binop (mode, code, ret, val, target, true,
6527 OPTAB_LIB_WIDEN);
6528 }
1cd6e20d 6529 return ret;
6530}
6531
9c1a31e4 6532/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6533
6534void
6535expand_ifn_atomic_bit_test_and (gcall *call)
6536{
6537 tree ptr = gimple_call_arg (call, 0);
6538 tree bit = gimple_call_arg (call, 1);
6539 tree flag = gimple_call_arg (call, 2);
6540 tree lhs = gimple_call_lhs (call);
6541 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6542 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6543 enum rtx_code code;
6544 optab optab;
6545 struct expand_operand ops[5];
6546
6547 gcc_assert (flag_inline_atomics);
6548
6549 if (gimple_call_num_args (call) == 4)
6550 model = get_memmodel (gimple_call_arg (call, 3));
6551
6552 rtx mem = get_builtin_sync_mem (ptr, mode);
6553 rtx val = expand_expr_force_mode (bit, mode);
6554
6555 switch (gimple_call_internal_fn (call))
6556 {
6557 case IFN_ATOMIC_BIT_TEST_AND_SET:
6558 code = IOR;
6559 optab = atomic_bit_test_and_set_optab;
6560 break;
6561 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6562 code = XOR;
6563 optab = atomic_bit_test_and_complement_optab;
6564 break;
6565 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6566 code = AND;
6567 optab = atomic_bit_test_and_reset_optab;
6568 break;
6569 default:
6570 gcc_unreachable ();
6571 }
6572
6573 if (lhs == NULL_TREE)
6574 {
6575 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6576 val, NULL_RTX, true, OPTAB_DIRECT);
6577 if (code == AND)
6578 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6579 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6580 return;
6581 }
6582
6583 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6584 enum insn_code icode = direct_optab_handler (optab, mode);
6585 gcc_assert (icode != CODE_FOR_nothing);
6586 create_output_operand (&ops[0], target, mode);
6587 create_fixed_operand (&ops[1], mem);
6588 create_convert_operand_to (&ops[2], val, mode, true);
6589 create_integer_operand (&ops[3], model);
6590 create_integer_operand (&ops[4], integer_onep (flag));
6591 if (maybe_expand_insn (icode, 5, ops))
6592 return;
6593
6594 rtx bitval = val;
6595 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6596 val, NULL_RTX, true, OPTAB_DIRECT);
6597 rtx maskval = val;
6598 if (code == AND)
6599 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6600 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6601 code, model, false);
6602 if (integer_onep (flag))
6603 {
6604 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6605 NULL_RTX, true, OPTAB_DIRECT);
6606 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6607 true, OPTAB_DIRECT);
6608 }
6609 else
6610 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6611 OPTAB_DIRECT);
6612 if (result != target)
6613 emit_move_insn (target, result);
6614}
6615
10b744a3 6616/* Expand an atomic clear operation.
6617 void _atomic_clear (BOOL *obj, enum memmodel)
6618 EXP is the call expression. */
6619
6620static rtx
6621expand_builtin_atomic_clear (tree exp)
6622{
3754d046 6623 machine_mode mode;
10b744a3 6624 rtx mem, ret;
6625 enum memmodel model;
6626
517be012 6627 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
10b744a3 6628 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6629 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6630
a372f7ca 6631 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 6632 {
2cb724f9 6633 source_location loc
6634 = expansion_point_location_if_in_system_header (input_location);
6635 warning_at (loc, OPT_Winvalid_memory_model,
6636 "invalid memory model for %<__atomic_store%>");
086f4e33 6637 model = MEMMODEL_SEQ_CST;
10b744a3 6638 }
6639
6640 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6641 Failing that, a store is issued by __atomic_store. The only way this can
6642 fail is if the bool type is larger than a word size. Unlikely, but
6643 handle it anyway for completeness. Assume a single threaded model since
6644 there is no atomic support in this case, and no barriers are required. */
6645 ret = expand_atomic_store (mem, const0_rtx, model, true);
6646 if (!ret)
6647 emit_move_insn (mem, const0_rtx);
6648 return const0_rtx;
6649}
6650
6651/* Expand an atomic test_and_set operation.
6652 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6653 EXP is the call expression. */
6654
6655static rtx
7821cde1 6656expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 6657{
7821cde1 6658 rtx mem;
10b744a3 6659 enum memmodel model;
3754d046 6660 machine_mode mode;
10b744a3 6661
517be012 6662 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
10b744a3 6663 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6664 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6665
7821cde1 6666 return expand_atomic_test_and_set (target, mem, model);
10b744a3 6667}
6668
6669
1cd6e20d 6670/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6671 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6672
6673static tree
6674fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6675{
6676 int size;
3754d046 6677 machine_mode mode;
1cd6e20d 6678 unsigned int mode_align, type_align;
6679
6680 if (TREE_CODE (arg0) != INTEGER_CST)
6681 return NULL_TREE;
b6a5fc45 6682
517be012 6683 /* We need a corresponding integer mode for the access to be lock-free. */
1cd6e20d 6684 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
517be012 6685 if (!int_mode_for_size (size, 0).exists (&mode))
6686 return boolean_false_node;
6687
1cd6e20d 6688 mode_align = GET_MODE_ALIGNMENT (mode);
6689
4ca99588 6690 if (TREE_CODE (arg1) == INTEGER_CST)
6691 {
6692 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6693
6694 /* Either this argument is null, or it's a fake pointer encoding
6695 the alignment of the object. */
ac29ece2 6696 val = least_bit_hwi (val);
4ca99588 6697 val *= BITS_PER_UNIT;
6698
6699 if (val == 0 || mode_align < val)
6700 type_align = mode_align;
6701 else
6702 type_align = val;
6703 }
1cd6e20d 6704 else
6705 {
6706 tree ttype = TREE_TYPE (arg1);
6707
6708 /* This function is usually invoked and folded immediately by the front
6709 end before anything else has a chance to look at it. The pointer
6710 parameter at this point is usually cast to a void *, so check for that
6711 and look past the cast. */
2f8a2ead 6712 if (CONVERT_EXPR_P (arg1)
6713 && POINTER_TYPE_P (ttype)
6714 && VOID_TYPE_P (TREE_TYPE (ttype))
6715 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
1cd6e20d 6716 arg1 = TREE_OPERAND (arg1, 0);
6717
6718 ttype = TREE_TYPE (arg1);
6719 gcc_assert (POINTER_TYPE_P (ttype));
6720
6721 /* Get the underlying type of the object. */
6722 ttype = TREE_TYPE (ttype);
6723 type_align = TYPE_ALIGN (ttype);
6724 }
6725
47ae02b7 6726 /* If the object has smaller alignment, the lock free routines cannot
1cd6e20d 6727 be used. */
6728 if (type_align < mode_align)
06308d2a 6729 return boolean_false_node;
1cd6e20d 6730
6731 /* Check if a compare_and_swap pattern exists for the mode which represents
6732 the required size. The pattern is not allowed to fail, so the existence
d5f5fa27 6733 of the pattern indicates support is present. Also require that an
6734 atomic load exists for the required size. */
6735 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
06308d2a 6736 return boolean_true_node;
1cd6e20d 6737 else
06308d2a 6738 return boolean_false_node;
1cd6e20d 6739}
6740
6741/* Return true if the parameters to call EXP represent an object which will
6742 always generate lock free instructions. The first argument represents the
6743 size of the object, and the second parameter is a pointer to the object
6744 itself. If NULL is passed for the object, then the result is based on
6745 typical alignment for an object of the specified size. Otherwise return
6746 false. */
6747
6748static rtx
6749expand_builtin_atomic_always_lock_free (tree exp)
6750{
6751 tree size;
6752 tree arg0 = CALL_EXPR_ARG (exp, 0);
6753 tree arg1 = CALL_EXPR_ARG (exp, 1);
6754
6755 if (TREE_CODE (arg0) != INTEGER_CST)
6756 {
6757 error ("non-constant argument 1 to __atomic_always_lock_free");
6758 return const0_rtx;
6759 }
6760
6761 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 6762 if (size == boolean_true_node)
1cd6e20d 6763 return const1_rtx;
6764 return const0_rtx;
6765}
6766
6767/* Return a one or zero if it can be determined that object ARG1 of size ARG
6768 is lock free on this architecture. */
6769
6770static tree
6771fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6772{
6773 if (!flag_inline_atomics)
6774 return NULL_TREE;
6775
6776 /* If it isn't always lock free, don't generate a result. */
06308d2a 6777 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6778 return boolean_true_node;
1cd6e20d 6779
6780 return NULL_TREE;
6781}
6782
6783/* Return true if the parameters to call EXP represent an object which will
6784 always generate lock free instructions. The first argument represents the
6785 size of the object, and the second parameter is a pointer to the object
6786 itself. If NULL is passed for the object, then the result is based on
6787 typical alignment for an object of the specified size. Otherwise return
6788 NULL*/
6789
6790static rtx
6791expand_builtin_atomic_is_lock_free (tree exp)
6792{
6793 tree size;
6794 tree arg0 = CALL_EXPR_ARG (exp, 0);
6795 tree arg1 = CALL_EXPR_ARG (exp, 1);
6796
6797 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6798 {
6799 error ("non-integer argument 1 to __atomic_is_lock_free");
6800 return NULL_RTX;
6801 }
6802
6803 if (!flag_inline_atomics)
6804 return NULL_RTX;
6805
6806 /* If the value is known at compile time, return the RTX for it. */
6807 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 6808 if (size == boolean_true_node)
1cd6e20d 6809 return const1_rtx;
6810
6811 return NULL_RTX;
6812}
6813
1cd6e20d 6814/* Expand the __atomic_thread_fence intrinsic:
6815 void __atomic_thread_fence (enum memmodel)
6816 EXP is the CALL_EXPR. */
6817
6818static void
6819expand_builtin_atomic_thread_fence (tree exp)
6820{
fe54c06b 6821 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6822 expand_mem_thread_fence (model);
1cd6e20d 6823}
6824
6825/* Expand the __atomic_signal_fence intrinsic:
6826 void __atomic_signal_fence (enum memmodel)
6827 EXP is the CALL_EXPR. */
6828
6829static void
6830expand_builtin_atomic_signal_fence (tree exp)
6831{
fe54c06b 6832 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6833 expand_mem_signal_fence (model);
b6a5fc45 6834}
6835
6836/* Expand the __sync_synchronize intrinsic. */
6837
6838static void
2797f13a 6839expand_builtin_sync_synchronize (void)
b6a5fc45 6840{
a372f7ca 6841 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 6842}
6843
badaa04c 6844static rtx
6845expand_builtin_thread_pointer (tree exp, rtx target)
6846{
6847 enum insn_code icode;
6848 if (!validate_arglist (exp, VOID_TYPE))
6849 return const0_rtx;
6850 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6851 if (icode != CODE_FOR_nothing)
6852 {
6853 struct expand_operand op;
3ed779c3 6854 /* If the target is not sutitable then create a new target. */
6855 if (target == NULL_RTX
6856 || !REG_P (target)
6857 || GET_MODE (target) != Pmode)
badaa04c 6858 target = gen_reg_rtx (Pmode);
6859 create_output_operand (&op, target, Pmode);
6860 expand_insn (icode, 1, &op);
6861 return target;
6862 }
6863 error ("__builtin_thread_pointer is not supported on this target");
6864 return const0_rtx;
6865}
6866
6867static void
6868expand_builtin_set_thread_pointer (tree exp)
6869{
6870 enum insn_code icode;
6871 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6872 return;
6873 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6874 if (icode != CODE_FOR_nothing)
6875 {
6876 struct expand_operand op;
6877 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6878 Pmode, EXPAND_NORMAL);
6f343c10 6879 create_input_operand (&op, val, Pmode);
badaa04c 6880 expand_insn (icode, 1, &op);
6881 return;
6882 }
6883 error ("__builtin_set_thread_pointer is not supported on this target");
6884}
6885
53800dbe 6886\f
0e80b01d 6887/* Emit code to restore the current value of stack. */
6888
6889static void
6890expand_stack_restore (tree var)
6891{
1e0c0b35 6892 rtx_insn *prev;
6893 rtx sa = expand_normal (var);
0e80b01d 6894
6895 sa = convert_memory_address (Pmode, sa);
6896
6897 prev = get_last_insn ();
6898 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 6899
6900 record_new_stack_level ();
6901
0e80b01d 6902 fixup_args_size_notes (prev, get_last_insn (), 0);
6903}
6904
0e80b01d 6905/* Emit code to save the current value of stack. */
6906
6907static rtx
6908expand_stack_save (void)
6909{
6910 rtx ret = NULL_RTX;
6911
0e80b01d 6912 emit_stack_save (SAVE_BLOCK, &ret);
6913 return ret;
6914}
6915
a7babc1e 6916/* Emit code to get the openacc gang, worker or vector id or size. */
6917
6918static rtx
6919expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6920{
6921 const char *name;
6922 rtx fallback_retval;
6923 rtx_insn *(*gen_fn) (rtx, rtx);
6924 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6925 {
6926 case BUILT_IN_GOACC_PARLEVEL_ID:
6927 name = "__builtin_goacc_parlevel_id";
6928 fallback_retval = const0_rtx;
6929 gen_fn = targetm.gen_oacc_dim_pos;
6930 break;
6931 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6932 name = "__builtin_goacc_parlevel_size";
6933 fallback_retval = const1_rtx;
6934 gen_fn = targetm.gen_oacc_dim_size;
6935 break;
6936 default:
6937 gcc_unreachable ();
6938 }
6939
6940 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6941 {
6942 error ("%qs only supported in OpenACC code", name);
6943 return const0_rtx;
6944 }
6945
6946 tree arg = CALL_EXPR_ARG (exp, 0);
6947 if (TREE_CODE (arg) != INTEGER_CST)
6948 {
6949 error ("non-constant argument 0 to %qs", name);
6950 return const0_rtx;
6951 }
6952
6953 int dim = TREE_INT_CST_LOW (arg);
6954 switch (dim)
6955 {
6956 case GOMP_DIM_GANG:
6957 case GOMP_DIM_WORKER:
6958 case GOMP_DIM_VECTOR:
6959 break;
6960 default:
6961 error ("illegal argument 0 to %qs", name);
6962 return const0_rtx;
6963 }
6964
6965 if (ignore)
6966 return target;
6967
2b895374 6968 if (target == NULL_RTX)
6969 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6970
a7babc1e 6971 if (!targetm.have_oacc_dim_size ())
6972 {
6973 emit_move_insn (target, fallback_retval);
6974 return target;
6975 }
6976
6977 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6978 emit_insn (gen_fn (reg, GEN_INT (dim)));
6979 if (reg != target)
6980 emit_move_insn (target, reg);
6981
6982 return target;
6983}
ca4c3545 6984
b3e6ae76 6985/* Expand a string compare operation using a sequence of char comparison
a950155e 6986 to get rid of the calling overhead, with result going to TARGET if
6987 that's convenient.
6988
6989 VAR_STR is the variable string source;
6990 CONST_STR is the constant string source;
6991 LENGTH is the number of chars to compare;
6992 CONST_STR_N indicates which source string is the constant string;
6993 IS_MEMCMP indicates whether it's a memcmp or strcmp.
b3e6ae76 6994
a950155e 6995 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6996
0dbefa15 6997 target = (int) (unsigned char) var_str[0]
6998 - (int) (unsigned char) const_str[0];
a950155e 6999 if (target != 0)
7000 goto ne_label;
7001 ...
0dbefa15 7002 target = (int) (unsigned char) var_str[length - 2]
7003 - (int) (unsigned char) const_str[length - 2];
a950155e 7004 if (target != 0)
7005 goto ne_label;
0dbefa15 7006 target = (int) (unsigned char) var_str[length - 1]
7007 - (int) (unsigned char) const_str[length - 1];
a950155e 7008 ne_label:
7009 */
7010
7011static rtx
b3e6ae76 7012inline_string_cmp (rtx target, tree var_str, const char *const_str,
a950155e 7013 unsigned HOST_WIDE_INT length,
0dbefa15 7014 int const_str_n, machine_mode mode)
a950155e 7015{
7016 HOST_WIDE_INT offset = 0;
b3e6ae76 7017 rtx var_rtx_array
a950155e 7018 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7019 rtx var_rtx = NULL_RTX;
b3e6ae76 7020 rtx const_rtx = NULL_RTX;
7021 rtx result = target ? target : gen_reg_rtx (mode);
7022 rtx_code_label *ne_label = gen_label_rtx ();
0dbefa15 7023 tree unit_type_node = unsigned_char_type_node;
b3e6ae76 7024 scalar_int_mode unit_mode
7025 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
a950155e 7026
7027 start_sequence ();
7028
7029 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7030 {
b3e6ae76 7031 var_rtx
a950155e 7032 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
b3e6ae76 7033 const_rtx = c_readstr (const_str + offset, unit_mode);
a950155e 7034 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7035 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
b3e6ae76 7036
0dbefa15 7037 op0 = convert_modes (mode, unit_mode, op0, 1);
7038 op1 = convert_modes (mode, unit_mode, op1, 1);
b3e6ae76 7039 result = expand_simple_binop (mode, MINUS, op0, op1,
0dbefa15 7040 result, 1, OPTAB_WIDEN);
b3e6ae76 7041 if (i < length - 1)
7042 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7043 mode, true, ne_label);
7044 offset += GET_MODE_SIZE (unit_mode);
a950155e 7045 }
7046
7047 emit_label (ne_label);
7048 rtx_insn *insns = get_insns ();
7049 end_sequence ();
7050 emit_insn (insns);
7051
7052 return result;
7053}
7054
b3e6ae76 7055/* Inline expansion a call to str(n)cmp, with result going to
a950155e 7056 TARGET if that's convenient.
7057 If the call is not been inlined, return NULL_RTX. */
7058static rtx
0dbefa15 7059inline_expand_builtin_string_cmp (tree exp, rtx target)
a950155e 7060{
7061 tree fndecl = get_callee_fndecl (exp);
7062 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7063 unsigned HOST_WIDE_INT length = 0;
7064 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7065
9c7661c8 7066 /* Do NOT apply this inlining expansion when optimizing for size or
7067 optimization level below 2. */
7068 if (optimize < 2 || optimize_insn_for_size_p ())
7069 return NULL_RTX;
7070
a950155e 7071 gcc_checking_assert (fcode == BUILT_IN_STRCMP
b3e6ae76 7072 || fcode == BUILT_IN_STRNCMP
a950155e 7073 || fcode == BUILT_IN_MEMCMP);
7074
0dbefa15 7075 /* On a target where the type of the call (int) has same or narrower presicion
7076 than unsigned char, give up the inlining expansion. */
7077 if (TYPE_PRECISION (unsigned_char_type_node)
7078 >= TYPE_PRECISION (TREE_TYPE (exp)))
7079 return NULL_RTX;
7080
a950155e 7081 tree arg1 = CALL_EXPR_ARG (exp, 0);
7082 tree arg2 = CALL_EXPR_ARG (exp, 1);
7083 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7084
7085 unsigned HOST_WIDE_INT len1 = 0;
7086 unsigned HOST_WIDE_INT len2 = 0;
7087 unsigned HOST_WIDE_INT len3 = 0;
7088
7089 const char *src_str1 = c_getstr (arg1, &len1);
7090 const char *src_str2 = c_getstr (arg2, &len2);
b3e6ae76 7091
a950155e 7092 /* If neither strings is constant string, the call is not qualify. */
7093 if (!src_str1 && !src_str2)
7094 return NULL_RTX;
7095
7096 /* For strncmp, if the length is not a const, not qualify. */
7097 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
7098 return NULL_RTX;
7099
7100 int const_str_n = 0;
7101 if (!len1)
7102 const_str_n = 2;
7103 else if (!len2)
7104 const_str_n = 1;
7105 else if (len2 > len1)
7106 const_str_n = 1;
7107 else
7108 const_str_n = 2;
7109
7110 gcc_checking_assert (const_str_n > 0);
7111 length = (const_str_n == 1) ? len1 : len2;
7112
7113 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
7114 length = len3;
7115
b3e6ae76 7116 /* If the length of the comparision is larger than the threshold,
a950155e 7117 do nothing. */
b3e6ae76 7118 if (length > (unsigned HOST_WIDE_INT)
a950155e 7119 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7120 return NULL_RTX;
7121
7122 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7123
7124 /* Now, start inline expansion the call. */
b3e6ae76 7125 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
a950155e 7126 (const_str_n == 1) ? src_str1 : src_str2, length,
0dbefa15 7127 const_str_n, mode);
a950155e 7128}
7129
123081ef 7130/* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7131 represents the size of the first argument to that call, or VOIDmode
7132 if the argument is a pointer. IGNORE will be true if the result
7133 isn't used. */
7134static rtx
7135expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7136 bool ignore)
7137{
7138 rtx val, failsafe;
7139 unsigned nargs = call_expr_nargs (exp);
7140
7141 tree arg0 = CALL_EXPR_ARG (exp, 0);
7142
7143 if (mode == VOIDmode)
7144 {
7145 mode = TYPE_MODE (TREE_TYPE (arg0));
7146 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7147 }
7148
7149 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7150
7151 /* An optional second argument can be used as a failsafe value on
7152 some machines. If it isn't present, then the failsafe value is
7153 assumed to be 0. */
7154 if (nargs > 1)
7155 {
7156 tree arg1 = CALL_EXPR_ARG (exp, 1);
7157 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7158 }
7159 else
7160 failsafe = const0_rtx;
7161
7162 /* If the result isn't used, the behavior is undefined. It would be
7163 nice to emit a warning here, but path splitting means this might
7164 happen with legitimate code. So simply drop the builtin
7165 expansion in that case; we've handled any side-effects above. */
7166 if (ignore)
7167 return const0_rtx;
7168
7169 /* If we don't have a suitable target, create one to hold the result. */
7170 if (target == NULL || GET_MODE (target) != mode)
7171 target = gen_reg_rtx (mode);
7172
7173 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7174 val = convert_modes (mode, VOIDmode, val, false);
7175
7176 return targetm.speculation_safe_value (mode, target, val, failsafe);
7177}
7178
53800dbe 7179/* Expand an expression EXP that calls a built-in function,
7180 with result going to TARGET if that's convenient
7181 (and in mode MODE if that's convenient).
7182 SUBTARGET may be used as the target for computing one of EXP's operands.
7183 IGNORE is nonzero if the value is to be ignored. */
7184
7185rtx
3754d046 7186expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 7187 int ignore)
53800dbe 7188{
c6e6ecb1 7189 tree fndecl = get_callee_fndecl (exp);
53800dbe 7190 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 7191 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 7192 int flags;
53800dbe 7193
4e2f4ed5 7194 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7195 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7196
f9acf11a 7197 /* When ASan is enabled, we don't want to expand some memory/string
7198 builtins and rely on libsanitizer's hooks. This allows us to avoid
7199 redundant checks and be sure, that possible overflow will be detected
7200 by ASan. */
7201
7202 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7203 return expand_call (exp, target, ignore);
7204
53800dbe 7205 /* When not optimizing, generate calls to library functions for a certain
7206 set of builtins. */
cd9ff771 7207 if (!optimize
b6a5fc45 7208 && !called_as_built_in (fndecl)
73037a1e 7209 && fcode != BUILT_IN_FORK
7210 && fcode != BUILT_IN_EXECL
7211 && fcode != BUILT_IN_EXECV
7212 && fcode != BUILT_IN_EXECLP
7213 && fcode != BUILT_IN_EXECLE
7214 && fcode != BUILT_IN_EXECVP
7215 && fcode != BUILT_IN_EXECVE
2b34677f 7216 && !ALLOCA_FUNCTION_CODE_P (fcode)
1e42d5c6 7217 && fcode != BUILT_IN_FREE)
cd9ff771 7218 return expand_call (exp, target, ignore);
53800dbe 7219
8d6d7930 7220 /* The built-in function expanders test for target == const0_rtx
7221 to determine whether the function's result will be ignored. */
7222 if (ignore)
7223 target = const0_rtx;
7224
7225 /* If the result of a pure or const built-in function is ignored, and
7226 none of its arguments are volatile, we can avoid expanding the
7227 built-in call and just evaluate the arguments for side-effects. */
7228 if (target == const0_rtx
67fa4078 7229 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7230 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 7231 {
7232 bool volatilep = false;
7233 tree arg;
c2f47e15 7234 call_expr_arg_iterator iter;
8d6d7930 7235
c2f47e15 7236 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7237 if (TREE_THIS_VOLATILE (arg))
8d6d7930 7238 {
7239 volatilep = true;
7240 break;
7241 }
7242
7243 if (! volatilep)
7244 {
c2f47e15 7245 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7246 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 7247 return const0_rtx;
7248 }
7249 }
7250
53800dbe 7251 switch (fcode)
7252 {
4f35b1fc 7253 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 7254 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 7255 case BUILT_IN_FABSD32:
7256 case BUILT_IN_FABSD64:
7257 case BUILT_IN_FABSD128:
c2f47e15 7258 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 7259 if (target)
a0c938f0 7260 return target;
78a74442 7261 break;
7262
4f35b1fc 7263 CASE_FLT_FN (BUILT_IN_COPYSIGN):
012f068a 7264 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
c2f47e15 7265 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 7266 if (target)
7267 return target;
7268 break;
7269
7d3f6cc7 7270 /* Just do a normal library call if we were unable to fold
7271 the values. */
4f35b1fc 7272 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 7273 break;
53800dbe 7274
7e0713b1 7275 CASE_FLT_FN (BUILT_IN_FMA):
8c32188e 7276 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7e0713b1 7277 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7278 if (target)
7279 return target;
7280 break;
7281
a67a90e5 7282 CASE_FLT_FN (BUILT_IN_ILOGB):
7283 if (! flag_unsafe_math_optimizations)
7284 break;
12f08300 7285 gcc_fallthrough ();
7286 CASE_FLT_FN (BUILT_IN_ISINF):
7287 CASE_FLT_FN (BUILT_IN_FINITE):
7288 case BUILT_IN_ISFINITE:
7289 case BUILT_IN_ISNORMAL:
f97eea22 7290 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 7291 if (target)
7292 return target;
7293 break;
7294
80ff6494 7295 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 7296 CASE_FLT_FN (BUILT_IN_LCEIL):
7297 CASE_FLT_FN (BUILT_IN_LLCEIL):
7298 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 7299 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 7300 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 7301 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 7302 if (target)
7303 return target;
7304 break;
7305
80ff6494 7306 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 7307 CASE_FLT_FN (BUILT_IN_LRINT):
7308 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 7309 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 7310 CASE_FLT_FN (BUILT_IN_LROUND):
7311 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 7312 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 7313 if (target)
7314 return target;
7315 break;
7316
4f35b1fc 7317 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 7318 target = expand_builtin_powi (exp, target);
757c219d 7319 if (target)
7320 return target;
7321 break;
7322
d735c391 7323 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 7324 target = expand_builtin_cexpi (exp, target);
d735c391 7325 gcc_assert (target);
7326 return target;
7327
4f35b1fc 7328 CASE_FLT_FN (BUILT_IN_SIN):
7329 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 7330 if (! flag_unsafe_math_optimizations)
7331 break;
7332 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7333 if (target)
7334 return target;
7335 break;
7336
c3147c1a 7337 CASE_FLT_FN (BUILT_IN_SINCOS):
7338 if (! flag_unsafe_math_optimizations)
7339 break;
7340 target = expand_builtin_sincos (exp);
7341 if (target)
7342 return target;
7343 break;
7344
53800dbe 7345 case BUILT_IN_APPLY_ARGS:
7346 return expand_builtin_apply_args ();
7347
7348 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7349 FUNCTION with a copy of the parameters described by
7350 ARGUMENTS, and ARGSIZE. It returns a block of memory
7351 allocated on the stack into which is stored all the registers
7352 that might possibly be used for returning the result of a
7353 function. ARGUMENTS is the value returned by
7354 __builtin_apply_args. ARGSIZE is the number of bytes of
7355 arguments that must be copied. ??? How should this value be
7356 computed? We'll also need a safe worst case value for varargs
7357 functions. */
7358 case BUILT_IN_APPLY:
c2f47e15 7359 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 7360 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 7361 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 7362 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 7363 return const0_rtx;
7364 else
7365 {
53800dbe 7366 rtx ops[3];
7367
c2f47e15 7368 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7369 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7370 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 7371
7372 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7373 }
7374
7375 /* __builtin_return (RESULT) causes the function to return the
7376 value described by RESULT. RESULT is address of the block of
7377 memory returned by __builtin_apply. */
7378 case BUILT_IN_RETURN:
c2f47e15 7379 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7380 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 7381 return const0_rtx;
7382
7383 case BUILT_IN_SAVEREGS:
a66c9326 7384 return expand_builtin_saveregs ();
53800dbe 7385
48dc2227 7386 case BUILT_IN_VA_ARG_PACK:
7387 /* All valid uses of __builtin_va_arg_pack () are removed during
7388 inlining. */
b8c23db3 7389 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 7390 return const0_rtx;
7391
4e1d7ea4 7392 case BUILT_IN_VA_ARG_PACK_LEN:
7393 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7394 inlining. */
b8c23db3 7395 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 7396 return const0_rtx;
7397
53800dbe 7398 /* Return the address of the first anonymous stack arg. */
7399 case BUILT_IN_NEXT_ARG:
c2f47e15 7400 if (fold_builtin_next_arg (exp, false))
a0c938f0 7401 return const0_rtx;
79012a9d 7402 return expand_builtin_next_arg ();
53800dbe 7403
ac8fb6db 7404 case BUILT_IN_CLEAR_CACHE:
7405 target = expand_builtin___clear_cache (exp);
7406 if (target)
7407 return target;
7408 break;
7409
53800dbe 7410 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 7411 return expand_builtin_classify_type (exp);
53800dbe 7412
7413 case BUILT_IN_CONSTANT_P:
4ee9c684 7414 return const0_rtx;
53800dbe 7415
7416 case BUILT_IN_FRAME_ADDRESS:
7417 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 7418 return expand_builtin_frame_address (fndecl, exp);
53800dbe 7419
7420 /* Returns the address of the area where the structure is returned.
7421 0 otherwise. */
7422 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 7423 if (call_expr_nargs (exp) != 0
9342ee68 7424 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 7425 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 7426 return const0_rtx;
53800dbe 7427 else
9342ee68 7428 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 7429
2b34677f 7430 CASE_BUILT_IN_ALLOCA:
2b29cc6a 7431 target = expand_builtin_alloca (exp);
53800dbe 7432 if (target)
7433 return target;
7434 break;
7435
d08919a7 7436 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7437 return expand_asan_emit_allocas_unpoison (exp);
7438
4ee9c684 7439 case BUILT_IN_STACK_SAVE:
7440 return expand_stack_save ();
7441
7442 case BUILT_IN_STACK_RESTORE:
c2f47e15 7443 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 7444 return const0_rtx;
7445
74bdbe96 7446 case BUILT_IN_BSWAP16:
42791117 7447 case BUILT_IN_BSWAP32:
7448 case BUILT_IN_BSWAP64:
74bdbe96 7449 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 7450 if (target)
7451 return target;
7452 break;
7453
4f35b1fc 7454 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 7455 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7456 subtarget, ffs_optab);
6a08d0ab 7457 if (target)
7458 return target;
7459 break;
7460
4f35b1fc 7461 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 7462 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7463 subtarget, clz_optab);
6a08d0ab 7464 if (target)
7465 return target;
7466 break;
7467
4f35b1fc 7468 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 7469 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7470 subtarget, ctz_optab);
6a08d0ab 7471 if (target)
7472 return target;
7473 break;
7474
d8492bd3 7475 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 7476 target = expand_builtin_unop (target_mode, exp, target,
7477 subtarget, clrsb_optab);
7478 if (target)
7479 return target;
7480 break;
7481
4f35b1fc 7482 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 7483 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7484 subtarget, popcount_optab);
6a08d0ab 7485 if (target)
7486 return target;
7487 break;
7488
4f35b1fc 7489 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 7490 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 7491 subtarget, parity_optab);
53800dbe 7492 if (target)
7493 return target;
7494 break;
7495
7496 case BUILT_IN_STRLEN:
c2f47e15 7497 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 7498 if (target)
7499 return target;
7500 break;
7501
864bd5de 7502 case BUILT_IN_STRNLEN:
7503 target = expand_builtin_strnlen (exp, target, target_mode);
7504 if (target)
7505 return target;
7506 break;
7507
5aef8938 7508 case BUILT_IN_STRCAT:
7509 target = expand_builtin_strcat (exp, target);
7510 if (target)
7511 return target;
7512 break;
7513
53800dbe 7514 case BUILT_IN_STRCPY:
a65c4d64 7515 target = expand_builtin_strcpy (exp, target);
53800dbe 7516 if (target)
7517 return target;
7518 break;
bf8e3599 7519
5aef8938 7520 case BUILT_IN_STRNCAT:
7521 target = expand_builtin_strncat (exp, target);
7522 if (target)
7523 return target;
7524 break;
7525
ed09096d 7526 case BUILT_IN_STRNCPY:
a65c4d64 7527 target = expand_builtin_strncpy (exp, target);
ed09096d 7528 if (target)
7529 return target;
7530 break;
bf8e3599 7531
3b824fa6 7532 case BUILT_IN_STPCPY:
dc369150 7533 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 7534 if (target)
7535 return target;
7536 break;
7537
4d317237 7538 case BUILT_IN_STPNCPY:
7539 target = expand_builtin_stpncpy (exp, target);
7540 if (target)
7541 return target;
7542 break;
7543
8d6c6ef5 7544 case BUILT_IN_MEMCHR:
7545 target = expand_builtin_memchr (exp, target);
7546 if (target)
7547 return target;
7548 break;
7549
53800dbe 7550 case BUILT_IN_MEMCPY:
a65c4d64 7551 target = expand_builtin_memcpy (exp, target);
3b824fa6 7552 if (target)
7553 return target;
7554 break;
7555
4d317237 7556 case BUILT_IN_MEMMOVE:
7557 target = expand_builtin_memmove (exp, target);
7558 if (target)
7559 return target;
7560 break;
7561
3b824fa6 7562 case BUILT_IN_MEMPCPY:
d0fbba1a 7563 target = expand_builtin_mempcpy (exp, target);
53800dbe 7564 if (target)
7565 return target;
7566 break;
7567
7568 case BUILT_IN_MEMSET:
c2f47e15 7569 target = expand_builtin_memset (exp, target, mode);
53800dbe 7570 if (target)
7571 return target;
7572 break;
7573
ffc83088 7574 case BUILT_IN_BZERO:
0b25db21 7575 target = expand_builtin_bzero (exp);
ffc83088 7576 if (target)
7577 return target;
7578 break;
7579
b3e6ae76 7580 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
72dbc21d 7581 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7582 when changing it to a strcmp call. */
7583 case BUILT_IN_STRCMP_EQ:
7584 target = expand_builtin_memcmp (exp, target, true);
7585 if (target)
7586 return target;
7587
7588 /* Change this call back to a BUILT_IN_STRCMP. */
b3e6ae76 7589 TREE_OPERAND (exp, 1)
72dbc21d 7590 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7591
7592 /* Delete the last parameter. */
7593 unsigned int i;
7594 vec<tree, va_gc> *arg_vec;
7595 vec_alloc (arg_vec, 2);
7596 for (i = 0; i < 2; i++)
7597 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7598 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7599 /* FALLTHROUGH */
7600
53800dbe 7601 case BUILT_IN_STRCMP:
a65c4d64 7602 target = expand_builtin_strcmp (exp, target);
53800dbe 7603 if (target)
7604 return target;
7605 break;
7606
72dbc21d 7607 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7608 back to a BUILT_IN_STRNCMP. */
7609 case BUILT_IN_STRNCMP_EQ:
7610 target = expand_builtin_memcmp (exp, target, true);
7611 if (target)
7612 return target;
7613
7614 /* Change it back to a BUILT_IN_STRNCMP. */
b3e6ae76 7615 TREE_OPERAND (exp, 1)
72dbc21d 7616 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7617 /* FALLTHROUGH */
7618
ed09096d 7619 case BUILT_IN_STRNCMP:
7620 target = expand_builtin_strncmp (exp, target, mode);
7621 if (target)
7622 return target;
7623 break;
7624
071f1696 7625 case BUILT_IN_BCMP:
53800dbe 7626 case BUILT_IN_MEMCMP:
3e346f54 7627 case BUILT_IN_MEMCMP_EQ:
7628 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
53800dbe 7629 if (target)
7630 return target;
3e346f54 7631 if (fcode == BUILT_IN_MEMCMP_EQ)
7632 {
7633 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7634 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7635 }
53800dbe 7636 break;
53800dbe 7637
7638 case BUILT_IN_SETJMP:
12f08300 7639 /* This should have been lowered to the builtins below. */
2c8a1497 7640 gcc_unreachable ();
7641
7642 case BUILT_IN_SETJMP_SETUP:
7643 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7644 and the receiver label. */
c2f47e15 7645 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 7646 {
c2f47e15 7647 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 7648 VOIDmode, EXPAND_NORMAL);
c2f47e15 7649 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 7650 rtx_insn *label_r = label_rtx (label);
2c8a1497 7651
7652 /* This is copied from the handling of non-local gotos. */
7653 expand_builtin_setjmp_setup (buf_addr, label_r);
7654 nonlocal_goto_handler_labels
a4de1c23 7655 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 7656 nonlocal_goto_handler_labels);
7657 /* ??? Do not let expand_label treat us as such since we would
7658 not want to be both on the list of non-local labels and on
7659 the list of forced labels. */
7660 FORCED_LABEL (label) = 0;
7661 return const0_rtx;
7662 }
7663 break;
7664
2c8a1497 7665 case BUILT_IN_SETJMP_RECEIVER:
7666 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 7667 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 7668 {
c2f47e15 7669 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 7670 rtx_insn *label_r = label_rtx (label);
2c8a1497 7671
7672 expand_builtin_setjmp_receiver (label_r);
7673 return const0_rtx;
7674 }
6b7f6858 7675 break;
53800dbe 7676
7677 /* __builtin_longjmp is passed a pointer to an array of five words.
7678 It's similar to the C library longjmp function but works with
7679 __builtin_setjmp above. */
7680 case BUILT_IN_LONGJMP:
c2f47e15 7681 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 7682 {
c2f47e15 7683 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 7684 VOIDmode, EXPAND_NORMAL);
c2f47e15 7685 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 7686
7687 if (value != const1_rtx)
7688 {
1e5fcbe2 7689 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 7690 return const0_rtx;
7691 }
7692
7693 expand_builtin_longjmp (buf_addr, value);
7694 return const0_rtx;
7695 }
2c8a1497 7696 break;
53800dbe 7697
4ee9c684 7698 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 7699 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 7700 if (target)
7701 return target;
7702 break;
7703
843d08a9 7704 /* This updates the setjmp buffer that is its argument with the value
7705 of the current stack pointer. */
7706 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 7707 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 7708 {
7709 rtx buf_addr
c2f47e15 7710 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 7711
7712 expand_builtin_update_setjmp_buf (buf_addr);
7713 return const0_rtx;
7714 }
7715 break;
7716
53800dbe 7717 case BUILT_IN_TRAP:
a0ef1725 7718 expand_builtin_trap ();
53800dbe 7719 return const0_rtx;
7720
d2b48f0c 7721 case BUILT_IN_UNREACHABLE:
7722 expand_builtin_unreachable ();
7723 return const0_rtx;
7724
4f35b1fc 7725 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 7726 case BUILT_IN_SIGNBITD32:
7727 case BUILT_IN_SIGNBITD64:
7728 case BUILT_IN_SIGNBITD128:
27f261ef 7729 target = expand_builtin_signbit (exp, target);
7730 if (target)
7731 return target;
7732 break;
7733
53800dbe 7734 /* Various hooks for the DWARF 2 __throw routine. */
7735 case BUILT_IN_UNWIND_INIT:
7736 expand_builtin_unwind_init ();
7737 return const0_rtx;
7738 case BUILT_IN_DWARF_CFA:
7739 return virtual_cfa_rtx;
7740#ifdef DWARF2_UNWIND_INFO
f8f023a5 7741 case BUILT_IN_DWARF_SP_COLUMN:
7742 return expand_builtin_dwarf_sp_column ();
695e919b 7743 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 7744 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 7745 return const0_rtx;
53800dbe 7746#endif
7747 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 7748 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 7749 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 7750 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 7751 case BUILT_IN_EH_RETURN:
c2f47e15 7752 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7753 CALL_EXPR_ARG (exp, 1));
53800dbe 7754 return const0_rtx;
df4b504c 7755 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 7756 return expand_builtin_eh_return_data_regno (exp);
26093bf4 7757 case BUILT_IN_EXTEND_POINTER:
c2f47e15 7758 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 7759 case BUILT_IN_EH_POINTER:
7760 return expand_builtin_eh_pointer (exp);
7761 case BUILT_IN_EH_FILTER:
7762 return expand_builtin_eh_filter (exp);
7763 case BUILT_IN_EH_COPY_VALUES:
7764 return expand_builtin_eh_copy_values (exp);
26093bf4 7765
7ccc713a 7766 case BUILT_IN_VA_START:
c2f47e15 7767 return expand_builtin_va_start (exp);
a66c9326 7768 case BUILT_IN_VA_END:
c2f47e15 7769 return expand_builtin_va_end (exp);
a66c9326 7770 case BUILT_IN_VA_COPY:
c2f47e15 7771 return expand_builtin_va_copy (exp);
89cfe6e5 7772 case BUILT_IN_EXPECT:
c2f47e15 7773 return expand_builtin_expect (exp, target);
01107f42 7774 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7775 return expand_builtin_expect_with_probability (exp, target);
fca0886c 7776 case BUILT_IN_ASSUME_ALIGNED:
7777 return expand_builtin_assume_aligned (exp, target);
5e3608d8 7778 case BUILT_IN_PREFETCH:
c2f47e15 7779 expand_builtin_prefetch (exp);
5e3608d8 7780 return const0_rtx;
7781
4ee9c684 7782 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 7783 return expand_builtin_init_trampoline (exp, true);
7784 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7785 return expand_builtin_init_trampoline (exp, false);
4ee9c684 7786 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 7787 return expand_builtin_adjust_trampoline (exp);
4ee9c684 7788
a27e3913 7789 case BUILT_IN_INIT_DESCRIPTOR:
7790 return expand_builtin_init_descriptor (exp);
7791 case BUILT_IN_ADJUST_DESCRIPTOR:
7792 return expand_builtin_adjust_descriptor (exp);
7793
73673831 7794 case BUILT_IN_FORK:
7795 case BUILT_IN_EXECL:
7796 case BUILT_IN_EXECV:
7797 case BUILT_IN_EXECLP:
7798 case BUILT_IN_EXECLE:
7799 case BUILT_IN_EXECVP:
7800 case BUILT_IN_EXECVE:
c2f47e15 7801 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 7802 if (target)
7803 return target;
7804 break;
53800dbe 7805
2797f13a 7806 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7807 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7808 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7809 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7810 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7811 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 7812 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 7813 if (target)
7814 return target;
7815 break;
7816
2797f13a 7817 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7818 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7819 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7820 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7821 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7822 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 7823 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 7824 if (target)
7825 return target;
7826 break;
7827
2797f13a 7828 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7829 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7830 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7831 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7832 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7833 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 7834 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 7835 if (target)
7836 return target;
7837 break;
7838
2797f13a 7839 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7840 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7841 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7842 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7843 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7844 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 7845 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 7846 if (target)
7847 return target;
7848 break;
7849
2797f13a 7850 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7851 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7852 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7853 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7854 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7855 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 7856 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 7857 if (target)
7858 return target;
7859 break;
7860
2797f13a 7861 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7862 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7863 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7864 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7865 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7866 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 7867 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 7868 if (target)
7869 return target;
7870 break;
7871
2797f13a 7872 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7873 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7874 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7875 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7876 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7877 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 7878 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 7879 if (target)
7880 return target;
7881 break;
7882
2797f13a 7883 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7884 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7885 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7886 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7887 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7888 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 7889 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 7890 if (target)
7891 return target;
7892 break;
7893
2797f13a 7894 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7895 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7896 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7897 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7898 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7899 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 7900 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 7901 if (target)
7902 return target;
7903 break;
7904
2797f13a 7905 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7906 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7907 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7908 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7909 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7910 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 7911 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 7912 if (target)
7913 return target;
7914 break;
7915
2797f13a 7916 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7917 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7918 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7919 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7920 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7921 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 7922 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 7923 if (target)
7924 return target;
7925 break;
7926
2797f13a 7927 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7928 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7929 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7930 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7931 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7932 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 7933 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 7934 if (target)
7935 return target;
7936 break;
7937
2797f13a 7938 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7939 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7940 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7941 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7942 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 7943 if (mode == VOIDmode)
7944 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 7945 if (!target || !register_operand (target, mode))
7946 target = gen_reg_rtx (mode);
3e272de8 7947
2797f13a 7948 mode = get_builtin_sync_mode
7949 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 7950 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 7951 if (target)
7952 return target;
7953 break;
7954
2797f13a 7955 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7956 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7957 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7958 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7959 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7960 mode = get_builtin_sync_mode
7961 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 7962 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 7963 if (target)
7964 return target;
7965 break;
7966
2797f13a 7967 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7968 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7969 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7970 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7971 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7972 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7973 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 7974 if (target)
7975 return target;
7976 break;
7977
2797f13a 7978 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7979 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7980 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7981 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7982 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7983 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7984 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 7985 return const0_rtx;
7986
2797f13a 7987 case BUILT_IN_SYNC_SYNCHRONIZE:
7988 expand_builtin_sync_synchronize ();
b6a5fc45 7989 return const0_rtx;
7990
1cd6e20d 7991 case BUILT_IN_ATOMIC_EXCHANGE_1:
7992 case BUILT_IN_ATOMIC_EXCHANGE_2:
7993 case BUILT_IN_ATOMIC_EXCHANGE_4:
7994 case BUILT_IN_ATOMIC_EXCHANGE_8:
7995 case BUILT_IN_ATOMIC_EXCHANGE_16:
7996 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7997 target = expand_builtin_atomic_exchange (mode, exp, target);
7998 if (target)
7999 return target;
8000 break;
8001
8002 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8003 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8004 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8005 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8006 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 8007 {
8008 unsigned int nargs, z;
f1f41a6c 8009 vec<tree, va_gc> *vec;
2c201ad1 8010
8011 mode =
8012 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8013 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8014 if (target)
8015 return target;
8016
8017 /* If this is turned into an external library call, the weak parameter
8018 must be dropped to match the expected parameter list. */
8019 nargs = call_expr_nargs (exp);
f1f41a6c 8020 vec_alloc (vec, nargs - 1);
2c201ad1 8021 for (z = 0; z < 3; z++)
f1f41a6c 8022 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 8023 /* Skip the boolean weak parameter. */
8024 for (z = 4; z < 6; z++)
f1f41a6c 8025 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 8026 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8027 break;
8028 }
1cd6e20d 8029
8030 case BUILT_IN_ATOMIC_LOAD_1:
8031 case BUILT_IN_ATOMIC_LOAD_2:
8032 case BUILT_IN_ATOMIC_LOAD_4:
8033 case BUILT_IN_ATOMIC_LOAD_8:
8034 case BUILT_IN_ATOMIC_LOAD_16:
8035 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8036 target = expand_builtin_atomic_load (mode, exp, target);
8037 if (target)
8038 return target;
8039 break;
8040
8041 case BUILT_IN_ATOMIC_STORE_1:
8042 case BUILT_IN_ATOMIC_STORE_2:
8043 case BUILT_IN_ATOMIC_STORE_4:
8044 case BUILT_IN_ATOMIC_STORE_8:
8045 case BUILT_IN_ATOMIC_STORE_16:
8046 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8047 target = expand_builtin_atomic_store (mode, exp);
8048 if (target)
8049 return const0_rtx;
8050 break;
8051
8052 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8053 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8054 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8055 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8056 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8057 {
8058 enum built_in_function lib;
8059 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8060 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8061 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8062 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8063 ignore, lib);
8064 if (target)
8065 return target;
8066 break;
8067 }
8068 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8069 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8070 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8071 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8072 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8073 {
8074 enum built_in_function lib;
8075 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8076 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8077 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8078 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8079 ignore, lib);
8080 if (target)
8081 return target;
8082 break;
8083 }
8084 case BUILT_IN_ATOMIC_AND_FETCH_1:
8085 case BUILT_IN_ATOMIC_AND_FETCH_2:
8086 case BUILT_IN_ATOMIC_AND_FETCH_4:
8087 case BUILT_IN_ATOMIC_AND_FETCH_8:
8088 case BUILT_IN_ATOMIC_AND_FETCH_16:
8089 {
8090 enum built_in_function lib;
8091 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8092 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8093 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8094 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8095 ignore, lib);
8096 if (target)
8097 return target;
8098 break;
8099 }
8100 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8101 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8102 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8103 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8104 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8105 {
8106 enum built_in_function lib;
8107 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8108 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8109 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8110 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8111 ignore, lib);
8112 if (target)
8113 return target;
8114 break;
8115 }
8116 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8117 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8118 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8119 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8120 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8121 {
8122 enum built_in_function lib;
8123 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8124 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8125 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8126 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8127 ignore, lib);
8128 if (target)
8129 return target;
8130 break;
8131 }
8132 case BUILT_IN_ATOMIC_OR_FETCH_1:
8133 case BUILT_IN_ATOMIC_OR_FETCH_2:
8134 case BUILT_IN_ATOMIC_OR_FETCH_4:
8135 case BUILT_IN_ATOMIC_OR_FETCH_8:
8136 case BUILT_IN_ATOMIC_OR_FETCH_16:
8137 {
8138 enum built_in_function lib;
8139 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8140 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8141 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8142 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8143 ignore, lib);
8144 if (target)
8145 return target;
8146 break;
8147 }
8148 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8149 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8150 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8151 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8152 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8153 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8154 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8155 ignore, BUILT_IN_NONE);
8156 if (target)
8157 return target;
8158 break;
8159
8160 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8161 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8162 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8163 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8164 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8165 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8166 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8167 ignore, BUILT_IN_NONE);
8168 if (target)
8169 return target;
8170 break;
8171
8172 case BUILT_IN_ATOMIC_FETCH_AND_1:
8173 case BUILT_IN_ATOMIC_FETCH_AND_2:
8174 case BUILT_IN_ATOMIC_FETCH_AND_4:
8175 case BUILT_IN_ATOMIC_FETCH_AND_8:
8176 case BUILT_IN_ATOMIC_FETCH_AND_16:
8177 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8178 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8179 ignore, BUILT_IN_NONE);
8180 if (target)
8181 return target;
8182 break;
8183
8184 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8185 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8186 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8187 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8188 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8189 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8190 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8191 ignore, BUILT_IN_NONE);
8192 if (target)
8193 return target;
8194 break;
8195
8196 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8197 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8198 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8199 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8200 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8201 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8202 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8203 ignore, BUILT_IN_NONE);
8204 if (target)
8205 return target;
8206 break;
8207
8208 case BUILT_IN_ATOMIC_FETCH_OR_1:
8209 case BUILT_IN_ATOMIC_FETCH_OR_2:
8210 case BUILT_IN_ATOMIC_FETCH_OR_4:
8211 case BUILT_IN_ATOMIC_FETCH_OR_8:
8212 case BUILT_IN_ATOMIC_FETCH_OR_16:
8213 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8214 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8215 ignore, BUILT_IN_NONE);
8216 if (target)
8217 return target;
8218 break;
10b744a3 8219
8220 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 8221 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 8222
8223 case BUILT_IN_ATOMIC_CLEAR:
8224 return expand_builtin_atomic_clear (exp);
1cd6e20d 8225
8226 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8227 return expand_builtin_atomic_always_lock_free (exp);
8228
8229 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8230 target = expand_builtin_atomic_is_lock_free (exp);
8231 if (target)
8232 return target;
8233 break;
8234
8235 case BUILT_IN_ATOMIC_THREAD_FENCE:
8236 expand_builtin_atomic_thread_fence (exp);
8237 return const0_rtx;
8238
8239 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8240 expand_builtin_atomic_signal_fence (exp);
8241 return const0_rtx;
8242
0a39fd54 8243 case BUILT_IN_OBJECT_SIZE:
8244 return expand_builtin_object_size (exp);
8245
8246 case BUILT_IN_MEMCPY_CHK:
8247 case BUILT_IN_MEMPCPY_CHK:
8248 case BUILT_IN_MEMMOVE_CHK:
8249 case BUILT_IN_MEMSET_CHK:
8250 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8251 if (target)
8252 return target;
8253 break;
8254
8255 case BUILT_IN_STRCPY_CHK:
8256 case BUILT_IN_STPCPY_CHK:
8257 case BUILT_IN_STRNCPY_CHK:
1063acde 8258 case BUILT_IN_STPNCPY_CHK:
0a39fd54 8259 case BUILT_IN_STRCAT_CHK:
b356dfef 8260 case BUILT_IN_STRNCAT_CHK:
0a39fd54 8261 case BUILT_IN_SNPRINTF_CHK:
8262 case BUILT_IN_VSNPRINTF_CHK:
8263 maybe_emit_chk_warning (exp, fcode);
8264 break;
8265
8266 case BUILT_IN_SPRINTF_CHK:
8267 case BUILT_IN_VSPRINTF_CHK:
8268 maybe_emit_sprintf_chk_warning (exp, fcode);
8269 break;
8270
2c281b15 8271 case BUILT_IN_FREE:
f74ea1c2 8272 if (warn_free_nonheap_object)
8273 maybe_emit_free_warning (exp);
2c281b15 8274 break;
8275
badaa04c 8276 case BUILT_IN_THREAD_POINTER:
8277 return expand_builtin_thread_pointer (exp, target);
8278
8279 case BUILT_IN_SET_THREAD_POINTER:
8280 expand_builtin_set_thread_pointer (exp);
8281 return const0_rtx;
8282
ca4c3545 8283 case BUILT_IN_ACC_ON_DEVICE:
1ae4e7aa 8284 /* Do library call, if we failed to expand the builtin when
8285 folding. */
ca4c3545 8286 break;
8287
a7babc1e 8288 case BUILT_IN_GOACC_PARLEVEL_ID:
8289 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8290 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8291
123081ef 8292 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8293 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8294
8295 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8296 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8297 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8298 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8299 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8300 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8301 return expand_speculation_safe_value (mode, exp, target, ignore);
8302
92482ee0 8303 default: /* just do library call, if unknown builtin */
146c1b4f 8304 break;
53800dbe 8305 }
8306
8307 /* The switch statement above can drop through to cause the function
8308 to be called normally. */
8309 return expand_call (exp, target, ignore);
8310}
650e4c94 8311
805e22b2 8312/* Determine whether a tree node represents a call to a built-in
52203a9d 8313 function. If the tree T is a call to a built-in function with
8314 the right number of arguments of the appropriate types, return
8315 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8316 Otherwise the return value is END_BUILTINS. */
aecda0d6 8317
805e22b2 8318enum built_in_function
b7bf20db 8319builtin_mathfn_code (const_tree t)
805e22b2 8320{
b7bf20db 8321 const_tree fndecl, arg, parmlist;
8322 const_tree argtype, parmtype;
8323 const_call_expr_arg_iterator iter;
805e22b2 8324
d44e3710 8325 if (TREE_CODE (t) != CALL_EXPR)
805e22b2 8326 return END_BUILTINS;
8327
c6e6ecb1 8328 fndecl = get_callee_fndecl (t);
a0e9bfbb 8329 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8330 return END_BUILTINS;
805e22b2 8331
52203a9d 8332 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 8333 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 8334 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 8335 {
52203a9d 8336 /* If a function doesn't take a variable number of arguments,
8337 the last element in the list will have type `void'. */
8338 parmtype = TREE_VALUE (parmlist);
8339 if (VOID_TYPE_P (parmtype))
8340 {
b7bf20db 8341 if (more_const_call_expr_args_p (&iter))
52203a9d 8342 return END_BUILTINS;
8343 return DECL_FUNCTION_CODE (fndecl);
8344 }
8345
b7bf20db 8346 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 8347 return END_BUILTINS;
48e1416a 8348
b7bf20db 8349 arg = next_const_call_expr_arg (&iter);
c2f47e15 8350 argtype = TREE_TYPE (arg);
52203a9d 8351
8352 if (SCALAR_FLOAT_TYPE_P (parmtype))
8353 {
8354 if (! SCALAR_FLOAT_TYPE_P (argtype))
8355 return END_BUILTINS;
8356 }
8357 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8358 {
8359 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8360 return END_BUILTINS;
8361 }
8362 else if (POINTER_TYPE_P (parmtype))
8363 {
8364 if (! POINTER_TYPE_P (argtype))
8365 return END_BUILTINS;
8366 }
8367 else if (INTEGRAL_TYPE_P (parmtype))
8368 {
8369 if (! INTEGRAL_TYPE_P (argtype))
8370 return END_BUILTINS;
8371 }
8372 else
e9f80ff5 8373 return END_BUILTINS;
e9f80ff5 8374 }
8375
52203a9d 8376 /* Variable-length argument list. */
805e22b2 8377 return DECL_FUNCTION_CODE (fndecl);
8378}
8379
c2f47e15 8380/* Fold a call to __builtin_constant_p, if we know its argument ARG will
8381 evaluate to a constant. */
650e4c94 8382
8383static tree
c2f47e15 8384fold_builtin_constant_p (tree arg)
650e4c94 8385{
650e4c94 8386 /* We return 1 for a numeric type that's known to be a constant
8387 value at compile-time or for an aggregate type that's a
8388 literal constant. */
c2f47e15 8389 STRIP_NOPS (arg);
650e4c94 8390
8391 /* If we know this is a constant, emit the constant of one. */
c2f47e15 8392 if (CONSTANT_CLASS_P (arg)
8393 || (TREE_CODE (arg) == CONSTRUCTOR
8394 && TREE_CONSTANT (arg)))
650e4c94 8395 return integer_one_node;
c2f47e15 8396 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 8397 {
c2f47e15 8398 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 8399 if (TREE_CODE (op) == STRING_CST
8400 || (TREE_CODE (op) == ARRAY_REF
8401 && integer_zerop (TREE_OPERAND (op, 1))
8402 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8403 return integer_one_node;
8404 }
650e4c94 8405
1fb4300c 8406 /* If this expression has side effects, show we don't know it to be a
8407 constant. Likewise if it's a pointer or aggregate type since in
8408 those case we only want literals, since those are only optimized
f97c71a1 8409 when generating RTL, not later.
8410 And finally, if we are compiling an initializer, not code, we
8411 need to return a definite result now; there's not going to be any
8412 more optimization done. */
c2f47e15 8413 if (TREE_SIDE_EFFECTS (arg)
8414 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8415 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 8416 || cfun == 0
0b049e15 8417 || folding_initializer
8418 || force_folding_builtin_constant_p)
650e4c94 8419 return integer_zero_node;
8420
c2f47e15 8421 return NULL_TREE;
650e4c94 8422}
8423
01107f42 8424/* Create builtin_expect or builtin_expect_with_probability
8425 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8426 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8427 builtin_expect_with_probability instead uses third argument as PROBABILITY
8428 value. */
4ee9c684 8429
8430static tree
c83059be 8431build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
01107f42 8432 tree predictor, tree probability)
4ee9c684 8433{
76f5a783 8434 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 8435
01107f42 8436 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8437 : BUILT_IN_EXPECT_WITH_PROBABILITY);
76f5a783 8438 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8439 ret_type = TREE_TYPE (TREE_TYPE (fn));
8440 pred_type = TREE_VALUE (arg_types);
8441 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8442
389dd41b 8443 pred = fold_convert_loc (loc, pred_type, pred);
8444 expected = fold_convert_loc (loc, expected_type, expected);
01107f42 8445
8446 if (probability)
8447 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8448 else
8449 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8450 predictor);
76f5a783 8451
8452 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8453 build_int_cst (ret_type, 0));
8454}
8455
01107f42 8456/* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
76f5a783 8457 NULL_TREE if no simplification is possible. */
8458
c83059be 8459tree
01107f42 8460fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8461 tree arg3)
76f5a783 8462{
083bada9 8463 tree inner, fndecl, inner_arg0;
76f5a783 8464 enum tree_code code;
8465
083bada9 8466 /* Distribute the expected value over short-circuiting operators.
8467 See through the cast from truthvalue_type_node to long. */
8468 inner_arg0 = arg0;
d09ef31a 8469 while (CONVERT_EXPR_P (inner_arg0)
083bada9 8470 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8471 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8472 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8473
76f5a783 8474 /* If this is a builtin_expect within a builtin_expect keep the
8475 inner one. See through a comparison against a constant. It
8476 might have been added to create a thruthvalue. */
083bada9 8477 inner = inner_arg0;
8478
76f5a783 8479 if (COMPARISON_CLASS_P (inner)
8480 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8481 inner = TREE_OPERAND (inner, 0);
8482
8483 if (TREE_CODE (inner) == CALL_EXPR
8484 && (fndecl = get_callee_fndecl (inner))
a0e9bfbb 8485 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8486 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
76f5a783 8487 return arg0;
8488
083bada9 8489 inner = inner_arg0;
76f5a783 8490 code = TREE_CODE (inner);
8491 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8492 {
8493 tree op0 = TREE_OPERAND (inner, 0);
8494 tree op1 = TREE_OPERAND (inner, 1);
2f2a7720 8495 arg1 = save_expr (arg1);
76f5a783 8496
01107f42 8497 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8498 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
76f5a783 8499 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8500
389dd41b 8501 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 8502 }
8503
8504 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 8505 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 8506 return NULL_TREE;
4ee9c684 8507
76f5a783 8508 /* If we expect that a comparison against the argument will fold to
8509 a constant return the constant. In practice, this means a true
8510 constant or the address of a non-weak symbol. */
083bada9 8511 inner = inner_arg0;
4ee9c684 8512 STRIP_NOPS (inner);
8513 if (TREE_CODE (inner) == ADDR_EXPR)
8514 {
8515 do
8516 {
8517 inner = TREE_OPERAND (inner, 0);
8518 }
8519 while (TREE_CODE (inner) == COMPONENT_REF
8520 || TREE_CODE (inner) == ARRAY_REF);
53e9c5c4 8521 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
c2f47e15 8522 return NULL_TREE;
4ee9c684 8523 }
8524
76f5a783 8525 /* Otherwise, ARG0 already has the proper type for the return value. */
8526 return arg0;
4ee9c684 8527}
8528
c2f47e15 8529/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 8530
539a3a92 8531static tree
c2f47e15 8532fold_builtin_classify_type (tree arg)
539a3a92 8533{
c2f47e15 8534 if (arg == 0)
7002a1c8 8535 return build_int_cst (integer_type_node, no_type_class);
539a3a92 8536
7002a1c8 8537 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 8538}
8539
c2f47e15 8540/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 8541
8542static tree
c7cbde74 8543fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 8544{
c2f47e15 8545 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 8546 return NULL_TREE;
8547 else
8548 {
2b84b289 8549 c_strlen_data data;
8550 memset (&data, 0, sizeof (c_strlen_data));
8551 tree len = c_strlen (arg, 0, &data);
e6e27594 8552
8553 if (len)
c7cbde74 8554 return fold_convert_loc (loc, type, len);
e6e27594 8555
2b84b289 8556 if (!data.decl)
8557 c_strlen (arg, 1, &data);
7af57b1c 8558
2b84b289 8559 if (data.decl)
7af57b1c 8560 {
8561 if (EXPR_HAS_LOCATION (arg))
8562 loc = EXPR_LOCATION (arg);
8563 else if (loc == UNKNOWN_LOCATION)
8564 loc = input_location;
2b84b289 8565 warn_string_no_nul (loc, "strlen", arg, data.decl);
7af57b1c 8566 }
8567
e6e27594 8568 return NULL_TREE;
8569 }
8570}
8571
92c43e3c 8572/* Fold a call to __builtin_inf or __builtin_huge_val. */
8573
8574static tree
389dd41b 8575fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 8576{
aa870c1b 8577 REAL_VALUE_TYPE real;
8578
40f4dbd5 8579 /* __builtin_inff is intended to be usable to define INFINITY on all
8580 targets. If an infinity is not available, INFINITY expands "to a
8581 positive constant of type float that overflows at translation
8582 time", footnote "In this case, using INFINITY will violate the
8583 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8584 Thus we pedwarn to ensure this constraint violation is
8585 diagnosed. */
92c43e3c 8586 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 8587 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 8588
aa870c1b 8589 real_inf (&real);
8590 return build_real (type, real);
92c43e3c 8591}
8592
d735c391 8593/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8594 NULL_TREE if no simplification can be made. */
8595
8596static tree
389dd41b 8597fold_builtin_sincos (location_t loc,
8598 tree arg0, tree arg1, tree arg2)
d735c391 8599{
c2f47e15 8600 tree type;
6c21be92 8601 tree fndecl, call = NULL_TREE;
d735c391 8602
c2f47e15 8603 if (!validate_arg (arg0, REAL_TYPE)
8604 || !validate_arg (arg1, POINTER_TYPE)
8605 || !validate_arg (arg2, POINTER_TYPE))
d735c391 8606 return NULL_TREE;
8607
d735c391 8608 type = TREE_TYPE (arg0);
d735c391 8609
8610 /* Calculate the result when the argument is a constant. */
e3240774 8611 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
6c21be92 8612 if (fn == END_BUILTINS)
d735c391 8613 return NULL_TREE;
8614
6c21be92 8615 /* Canonicalize sincos to cexpi. */
8616 if (TREE_CODE (arg0) == REAL_CST)
8617 {
8618 tree complex_type = build_complex_type (type);
744fe358 8619 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
6c21be92 8620 }
8621 if (!call)
8622 {
8623 if (!targetm.libc_has_function (function_c99_math_complex)
8624 || !builtin_decl_implicit_p (fn))
8625 return NULL_TREE;
8626 fndecl = builtin_decl_explicit (fn);
8627 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8628 call = builtin_save_expr (call);
8629 }
d735c391 8630
8234e9d3 8631 tree ptype = build_pointer_type (type);
8632 arg1 = fold_convert (ptype, arg1);
8633 arg2 = fold_convert (ptype, arg2);
a75b1c71 8634 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 8635 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8636 build_fold_indirect_ref_loc (loc, arg1),
6c21be92 8637 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
d735c391 8638 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8639 build_fold_indirect_ref_loc (loc, arg2),
6c21be92 8640 fold_build1_loc (loc, REALPART_EXPR, type, call)));
d735c391 8641}
8642
c2f47e15 8643/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8644 Return NULL_TREE if no simplification can be made. */
9c8a1629 8645
8646static tree
389dd41b 8647fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8648{
c2f47e15 8649 if (!validate_arg (arg1, POINTER_TYPE)
8650 || !validate_arg (arg2, POINTER_TYPE)
8651 || !validate_arg (len, INTEGER_TYPE))
8652 return NULL_TREE;
9c8a1629 8653
8654 /* If the LEN parameter is zero, return zero. */
8655 if (integer_zerop (len))
389dd41b 8656 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8657 arg1, arg2);
9c8a1629 8658
8659 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8660 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8661 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 8662
c4fef134 8663 /* If len parameter is one, return an expression corresponding to
8664 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8665 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8666 {
8667 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8668 tree cst_uchar_ptr_node
8669 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8670
389dd41b 8671 tree ind1
8672 = fold_convert_loc (loc, integer_type_node,
8673 build1 (INDIRECT_REF, cst_uchar_node,
8674 fold_convert_loc (loc,
8675 cst_uchar_ptr_node,
c4fef134 8676 arg1)));
389dd41b 8677 tree ind2
8678 = fold_convert_loc (loc, integer_type_node,
8679 build1 (INDIRECT_REF, cst_uchar_node,
8680 fold_convert_loc (loc,
8681 cst_uchar_ptr_node,
c4fef134 8682 arg2)));
389dd41b 8683 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 8684 }
9c8a1629 8685
c2f47e15 8686 return NULL_TREE;
9c8a1629 8687}
8688
c2f47e15 8689/* Fold a call to builtin isascii with argument ARG. */
d49367d4 8690
8691static tree
389dd41b 8692fold_builtin_isascii (location_t loc, tree arg)
d49367d4 8693{
c2f47e15 8694 if (!validate_arg (arg, INTEGER_TYPE))
8695 return NULL_TREE;
d49367d4 8696 else
8697 {
8698 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 8699 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8700 build_int_cst (integer_type_node,
c90b5d40 8701 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 8702 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 8703 arg, integer_zero_node);
d49367d4 8704 }
8705}
8706
c2f47e15 8707/* Fold a call to builtin toascii with argument ARG. */
d49367d4 8708
8709static tree
389dd41b 8710fold_builtin_toascii (location_t loc, tree arg)
d49367d4 8711{
c2f47e15 8712 if (!validate_arg (arg, INTEGER_TYPE))
8713 return NULL_TREE;
48e1416a 8714
c2f47e15 8715 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 8716 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8717 build_int_cst (integer_type_node, 0x7f));
d49367d4 8718}
8719
c2f47e15 8720/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 8721
8722static tree
389dd41b 8723fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 8724{
c2f47e15 8725 if (!validate_arg (arg, INTEGER_TYPE))
8726 return NULL_TREE;
df1cf42e 8727 else
8728 {
8729 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 8730 /* According to the C standard, isdigit is unaffected by locale.
8731 However, it definitely is affected by the target character set. */
624d37a6 8732 unsigned HOST_WIDE_INT target_digit0
8733 = lang_hooks.to_target_charset ('0');
8734
8735 if (target_digit0 == 0)
8736 return NULL_TREE;
8737
389dd41b 8738 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 8739 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8740 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 8741 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 8742 build_int_cst (unsigned_type_node, 9));
df1cf42e 8743 }
8744}
27f261ef 8745
c2f47e15 8746/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 8747
8748static tree
389dd41b 8749fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 8750{
c2f47e15 8751 if (!validate_arg (arg, REAL_TYPE))
8752 return NULL_TREE;
d1aade50 8753
389dd41b 8754 arg = fold_convert_loc (loc, type, arg);
389dd41b 8755 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8756}
8757
c2f47e15 8758/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 8759
8760static tree
389dd41b 8761fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 8762{
c2f47e15 8763 if (!validate_arg (arg, INTEGER_TYPE))
8764 return NULL_TREE;
d1aade50 8765
389dd41b 8766 arg = fold_convert_loc (loc, type, arg);
389dd41b 8767 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8768}
8769
abe4dcf6 8770/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8771
8772static tree
389dd41b 8773fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 8774{
239d491a 8775 if (validate_arg (arg, COMPLEX_TYPE)
8776 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 8777 {
8778 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 8779
abe4dcf6 8780 if (atan2_fn)
8781 {
c2f47e15 8782 tree new_arg = builtin_save_expr (arg);
389dd41b 8783 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8784 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8785 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 8786 }
8787 }
48e1416a 8788
abe4dcf6 8789 return NULL_TREE;
8790}
8791
3838b9ae 8792/* Fold a call to builtin frexp, we can assume the base is 2. */
8793
8794static tree
389dd41b 8795fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 8796{
8797 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8798 return NULL_TREE;
48e1416a 8799
3838b9ae 8800 STRIP_NOPS (arg0);
48e1416a 8801
3838b9ae 8802 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8803 return NULL_TREE;
48e1416a 8804
389dd41b 8805 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 8806
8807 /* Proceed if a valid pointer type was passed in. */
8808 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8809 {
8810 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8811 tree frac, exp;
48e1416a 8812
3838b9ae 8813 switch (value->cl)
8814 {
8815 case rvc_zero:
8816 /* For +-0, return (*exp = 0, +-0). */
8817 exp = integer_zero_node;
8818 frac = arg0;
8819 break;
8820 case rvc_nan:
8821 case rvc_inf:
8822 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 8823 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 8824 case rvc_normal:
8825 {
8826 /* Since the frexp function always expects base 2, and in
8827 GCC normalized significands are already in the range
8828 [0.5, 1.0), we have exactly what frexp wants. */
8829 REAL_VALUE_TYPE frac_rvt = *value;
8830 SET_REAL_EXP (&frac_rvt, 0);
8831 frac = build_real (rettype, frac_rvt);
7002a1c8 8832 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 8833 }
8834 break;
8835 default:
8836 gcc_unreachable ();
8837 }
48e1416a 8838
3838b9ae 8839 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8840 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 8841 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8842 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 8843 }
8844
8845 return NULL_TREE;
8846}
8847
ebf8b4f5 8848/* Fold a call to builtin modf. */
8849
8850static tree
389dd41b 8851fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 8852{
8853 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8854 return NULL_TREE;
48e1416a 8855
ebf8b4f5 8856 STRIP_NOPS (arg0);
48e1416a 8857
ebf8b4f5 8858 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8859 return NULL_TREE;
48e1416a 8860
389dd41b 8861 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 8862
8863 /* Proceed if a valid pointer type was passed in. */
8864 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8865 {
8866 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8867 REAL_VALUE_TYPE trunc, frac;
8868
8869 switch (value->cl)
8870 {
8871 case rvc_nan:
8872 case rvc_zero:
8873 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8874 trunc = frac = *value;
8875 break;
8876 case rvc_inf:
8877 /* For +-Inf, return (*arg1 = arg0, +-0). */
8878 frac = dconst0;
8879 frac.sign = value->sign;
8880 trunc = *value;
8881 break;
8882 case rvc_normal:
8883 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8884 real_trunc (&trunc, VOIDmode, value);
8885 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8886 /* If the original number was negative and already
8887 integral, then the fractional part is -0.0. */
8888 if (value->sign && frac.cl == rvc_zero)
8889 frac.sign = value->sign;
8890 break;
8891 }
48e1416a 8892
ebf8b4f5 8893 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8894 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 8895 build_real (rettype, trunc));
8896 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8897 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 8898 build_real (rettype, frac));
8899 }
48e1416a 8900
ebf8b4f5 8901 return NULL_TREE;
8902}
8903
12f08300 8904/* Given a location LOC, an interclass builtin function decl FNDECL
8905 and its single argument ARG, return an folded expression computing
8906 the same, or NULL_TREE if we either couldn't or didn't want to fold
8907 (the latter happen if there's an RTL instruction available). */
8908
8909static tree
8910fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8911{
8912 machine_mode mode;
8913
8914 if (!validate_arg (arg, REAL_TYPE))
8915 return NULL_TREE;
8916
8917 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8918 return NULL_TREE;
8919
8920 mode = TYPE_MODE (TREE_TYPE (arg));
8921
8922 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7f38718f 8923
12f08300 8924 /* If there is no optab, try generic code. */
8925 switch (DECL_FUNCTION_CODE (fndecl))
8926 {
8927 tree result;
a65c4d64 8928
12f08300 8929 CASE_FLT_FN (BUILT_IN_ISINF):
8930 {
8931 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8932 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8933 tree type = TREE_TYPE (arg);
8934 REAL_VALUE_TYPE r;
8935 char buf[128];
8936
8937 if (is_ibm_extended)
8938 {
8939 /* NaN and Inf are encoded in the high-order double value
8940 only. The low-order value is not significant. */
8941 type = double_type_node;
8942 mode = DFmode;
8943 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8944 }
8945 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8946 real_from_string (&r, buf);
8947 result = build_call_expr (isgr_fn, 2,
8948 fold_build1_loc (loc, ABS_EXPR, type, arg),
8949 build_real (type, r));
8950 return result;
8951 }
8952 CASE_FLT_FN (BUILT_IN_FINITE):
8953 case BUILT_IN_ISFINITE:
8954 {
8955 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8956 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8957 tree type = TREE_TYPE (arg);
8958 REAL_VALUE_TYPE r;
8959 char buf[128];
8960
8961 if (is_ibm_extended)
8962 {
8963 /* NaN and Inf are encoded in the high-order double value
8964 only. The low-order value is not significant. */
8965 type = double_type_node;
8966 mode = DFmode;
8967 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8968 }
8969 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8970 real_from_string (&r, buf);
8971 result = build_call_expr (isle_fn, 2,
8972 fold_build1_loc (loc, ABS_EXPR, type, arg),
8973 build_real (type, r));
8974 /*result = fold_build2_loc (loc, UNGT_EXPR,
8975 TREE_TYPE (TREE_TYPE (fndecl)),
8976 fold_build1_loc (loc, ABS_EXPR, type, arg),
8977 build_real (type, r));
8978 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8979 TREE_TYPE (TREE_TYPE (fndecl)),
8980 result);*/
8981 return result;
8982 }
8983 case BUILT_IN_ISNORMAL:
8984 {
8985 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8986 islessequal(fabs(x),DBL_MAX). */
8987 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8988 tree type = TREE_TYPE (arg);
8989 tree orig_arg, max_exp, min_exp;
8990 machine_mode orig_mode = mode;
8991 REAL_VALUE_TYPE rmax, rmin;
8992 char buf[128];
8993
8994 orig_arg = arg = builtin_save_expr (arg);
8995 if (is_ibm_extended)
8996 {
8997 /* Use double to test the normal range of IBM extended
8998 precision. Emin for IBM extended precision is
8999 different to emin for IEEE double, being 53 higher
9000 since the low double exponent is at least 53 lower
9001 than the high double exponent. */
9002 type = double_type_node;
9003 mode = DFmode;
9004 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9005 }
9006 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9007
9008 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9009 real_from_string (&rmax, buf);
9010 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9011 real_from_string (&rmin, buf);
9012 max_exp = build_real (type, rmax);
9013 min_exp = build_real (type, rmin);
9014
9015 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9016 if (is_ibm_extended)
9017 {
9018 /* Testing the high end of the range is done just using
9019 the high double, using the same test as isfinite().
9020 For the subnormal end of the range we first test the
9021 high double, then if its magnitude is equal to the
9022 limit of 0x1p-969, we test whether the low double is
9023 non-zero and opposite sign to the high double. */
9024 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9025 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9026 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9027 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9028 arg, min_exp);
9029 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9030 complex_double_type_node, orig_arg);
9031 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9032 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9033 tree zero = build_real (type, dconst0);
9034 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9035 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9036 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9037 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9038 fold_build3 (COND_EXPR,
9039 integer_type_node,
9040 hilt, logt, lolt));
9041 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9042 eq_min, ok_lo);
9043 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9044 gt_min, eq_min);
9045 }
9046 else
9047 {
9048 tree const isge_fn
9049 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9050 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9051 }
9052 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9053 max_exp, min_exp);
9054 return result;
9055 }
9056 default:
9057 break;
9058 }
9059
9060 return NULL_TREE;
9061}
9062
9063/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 9064 ARG is the argument for the call. */
726069ba 9065
9066static tree
12f08300 9067fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 9068{
12f08300 9069 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9070
c2f47e15 9071 if (!validate_arg (arg, REAL_TYPE))
d43cee80 9072 return NULL_TREE;
726069ba 9073
726069ba 9074 switch (builtin_index)
9075 {
12f08300 9076 case BUILT_IN_ISINF:
9077 if (!HONOR_INFINITIES (arg))
9078 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9079
9080 return NULL_TREE;
9081
c319d56a 9082 case BUILT_IN_ISINF_SIGN:
9083 {
9084 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9085 /* In a boolean context, GCC will fold the inner COND_EXPR to
9086 1. So e.g. "if (isinf_sign(x))" would be folded to just
9087 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
6cfc7001 9088 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
b9a16870 9089 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 9090 tree tmp = NULL_TREE;
9091
9092 arg = builtin_save_expr (arg);
9093
9094 if (signbit_fn && isinf_fn)
9095 {
389dd41b 9096 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9097 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 9098
389dd41b 9099 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9100 signbit_call, integer_zero_node);
389dd41b 9101 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9102 isinf_call, integer_zero_node);
48e1416a 9103
389dd41b 9104 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 9105 integer_minus_one_node, integer_one_node);
389dd41b 9106 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9107 isinf_call, tmp,
c319d56a 9108 integer_zero_node);
9109 }
9110
9111 return tmp;
9112 }
9113
12f08300 9114 case BUILT_IN_ISFINITE:
9115 if (!HONOR_NANS (arg)
9116 && !HONOR_INFINITIES (arg))
9117 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9118
9119 return NULL_TREE;
9120
9121 case BUILT_IN_ISNAN:
9122 if (!HONOR_NANS (arg))
9123 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9124
9125 {
9126 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9127 if (is_ibm_extended)
9128 {
9129 /* NaN and Inf are encoded in the high-order double value
9130 only. The low-order value is not significant. */
9131 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9132 }
9133 }
9134 arg = builtin_save_expr (arg);
9135 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9136
726069ba 9137 default:
64db345d 9138 gcc_unreachable ();
726069ba 9139 }
9140}
9141
12f08300 9142/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9143 This builtin will generate code to return the appropriate floating
9144 point classification depending on the value of the floating point
9145 number passed in. The possible return values must be supplied as
9146 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9147 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9148 one floating point argument which is "type generic". */
9149
9150static tree
9151fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9152{
9153 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9154 arg, type, res, tmp;
9155 machine_mode mode;
9156 REAL_VALUE_TYPE r;
9157 char buf[128];
9158
9159 /* Verify the required arguments in the original call. */
9160 if (nargs != 6
9161 || !validate_arg (args[0], INTEGER_TYPE)
9162 || !validate_arg (args[1], INTEGER_TYPE)
9163 || !validate_arg (args[2], INTEGER_TYPE)
9164 || !validate_arg (args[3], INTEGER_TYPE)
9165 || !validate_arg (args[4], INTEGER_TYPE)
9166 || !validate_arg (args[5], REAL_TYPE))
9167 return NULL_TREE;
9168
9169 fp_nan = args[0];
9170 fp_infinite = args[1];
9171 fp_normal = args[2];
9172 fp_subnormal = args[3];
9173 fp_zero = args[4];
9174 arg = args[5];
9175 type = TREE_TYPE (arg);
9176 mode = TYPE_MODE (type);
9177 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9178
9179 /* fpclassify(x) ->
9180 isnan(x) ? FP_NAN :
9181 (fabs(x) == Inf ? FP_INFINITE :
9182 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9183 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9184
9185 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9186 build_real (type, dconst0));
9187 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9188 tmp, fp_zero, fp_subnormal);
9189
9190 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9191 real_from_string (&r, buf);
9192 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9193 arg, build_real (type, r));
9194 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9195
9196 if (HONOR_INFINITIES (mode))
9197 {
9198 real_inf (&r);
9199 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9200 build_real (type, r));
9201 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9202 fp_infinite, res);
9203 }
9204
9205 if (HONOR_NANS (mode))
9206 {
9207 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9208 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9209 }
9210
9211 return res;
9212}
9213
9bc9f15f 9214/* Fold a call to an unordered comparison function such as
d5019fe8 9215 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 9216 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 9217 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9218 the opposite of the desired result. UNORDERED_CODE is used
9219 for modes that can hold NaNs and ORDERED_CODE is used for
9220 the rest. */
9bc9f15f 9221
9222static tree
389dd41b 9223fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 9224 enum tree_code unordered_code,
9225 enum tree_code ordered_code)
9226{
859f903a 9227 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 9228 enum tree_code code;
6978db0d 9229 tree type0, type1;
9230 enum tree_code code0, code1;
9231 tree cmp_type = NULL_TREE;
9bc9f15f 9232
6978db0d 9233 type0 = TREE_TYPE (arg0);
9234 type1 = TREE_TYPE (arg1);
a0c938f0 9235
6978db0d 9236 code0 = TREE_CODE (type0);
9237 code1 = TREE_CODE (type1);
a0c938f0 9238
6978db0d 9239 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9240 /* Choose the wider of two real types. */
9241 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9242 ? type0 : type1;
9243 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9244 cmp_type = type0;
9245 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9246 cmp_type = type1;
a0c938f0 9247
389dd41b 9248 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9249 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 9250
9251 if (unordered_code == UNORDERED_EXPR)
9252 {
93633022 9253 if (!HONOR_NANS (arg0))
389dd41b 9254 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9255 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 9256 }
9bc9f15f 9257
93633022 9258 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 9259 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9260 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 9261}
9262
0c93c8a9 9263/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9264 arithmetics if it can never overflow, or into internal functions that
9265 return both result of arithmetics and overflowed boolean flag in
732905bb 9266 a complex integer result, or some other check for overflow.
9267 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9268 checking part of that. */
0c93c8a9 9269
9270static tree
9271fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9272 tree arg0, tree arg1, tree arg2)
9273{
9274 enum internal_fn ifn = IFN_LAST;
732905bb 9275 /* The code of the expression corresponding to the type-generic
9276 built-in, or ERROR_MARK for the type-specific ones. */
9277 enum tree_code opcode = ERROR_MARK;
9278 bool ovf_only = false;
9279
0c93c8a9 9280 switch (fcode)
9281 {
732905bb 9282 case BUILT_IN_ADD_OVERFLOW_P:
9283 ovf_only = true;
9284 /* FALLTHRU */
0c93c8a9 9285 case BUILT_IN_ADD_OVERFLOW:
732905bb 9286 opcode = PLUS_EXPR;
9287 /* FALLTHRU */
0c93c8a9 9288 case BUILT_IN_SADD_OVERFLOW:
9289 case BUILT_IN_SADDL_OVERFLOW:
9290 case BUILT_IN_SADDLL_OVERFLOW:
9291 case BUILT_IN_UADD_OVERFLOW:
9292 case BUILT_IN_UADDL_OVERFLOW:
9293 case BUILT_IN_UADDLL_OVERFLOW:
9294 ifn = IFN_ADD_OVERFLOW;
9295 break;
732905bb 9296 case BUILT_IN_SUB_OVERFLOW_P:
9297 ovf_only = true;
9298 /* FALLTHRU */
0c93c8a9 9299 case BUILT_IN_SUB_OVERFLOW:
732905bb 9300 opcode = MINUS_EXPR;
9301 /* FALLTHRU */
0c93c8a9 9302 case BUILT_IN_SSUB_OVERFLOW:
9303 case BUILT_IN_SSUBL_OVERFLOW:
9304 case BUILT_IN_SSUBLL_OVERFLOW:
9305 case BUILT_IN_USUB_OVERFLOW:
9306 case BUILT_IN_USUBL_OVERFLOW:
9307 case BUILT_IN_USUBLL_OVERFLOW:
9308 ifn = IFN_SUB_OVERFLOW;
9309 break;
732905bb 9310 case BUILT_IN_MUL_OVERFLOW_P:
9311 ovf_only = true;
9312 /* FALLTHRU */
0c93c8a9 9313 case BUILT_IN_MUL_OVERFLOW:
732905bb 9314 opcode = MULT_EXPR;
9315 /* FALLTHRU */
0c93c8a9 9316 case BUILT_IN_SMUL_OVERFLOW:
9317 case BUILT_IN_SMULL_OVERFLOW:
9318 case BUILT_IN_SMULLL_OVERFLOW:
9319 case BUILT_IN_UMUL_OVERFLOW:
9320 case BUILT_IN_UMULL_OVERFLOW:
9321 case BUILT_IN_UMULLL_OVERFLOW:
9322 ifn = IFN_MUL_OVERFLOW;
9323 break;
9324 default:
9325 gcc_unreachable ();
9326 }
732905bb 9327
9328 /* For the "generic" overloads, the first two arguments can have different
9329 types and the last argument determines the target type to use to check
9330 for overflow. The arguments of the other overloads all have the same
9331 type. */
9332 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9333
9334 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9335 arguments are constant, attempt to fold the built-in call into a constant
9336 expression indicating whether or not it detected an overflow. */
9337 if (ovf_only
9338 && TREE_CODE (arg0) == INTEGER_CST
9339 && TREE_CODE (arg1) == INTEGER_CST)
9340 /* Perform the computation in the target type and check for overflow. */
9341 return omit_one_operand_loc (loc, boolean_type_node,
9342 arith_overflowed_p (opcode, type, arg0, arg1)
9343 ? boolean_true_node : boolean_false_node,
9344 arg2);
9345
0c93c8a9 9346 tree ctype = build_complex_type (type);
9347 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9348 2, arg0, arg1);
9349 tree tgt = save_expr (call);
9350 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9351 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9352 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
732905bb 9353
9354 if (ovf_only)
9355 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9356
9357 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
0c93c8a9 9358 tree store
9359 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9360 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9361}
9362
c388a0cf 9363/* Fold a call to __builtin_FILE to a constant string. */
9364
9365static inline tree
9366fold_builtin_FILE (location_t loc)
9367{
9368 if (const char *fname = LOCATION_FILE (loc))
859b51f8 9369 {
9370 /* The documentation says this builtin is equivalent to the preprocessor
9371 __FILE__ macro so it appears appropriate to use the same file prefix
9372 mappings. */
9373 fname = remap_macro_filename (fname);
c388a0cf 9374 return build_string_literal (strlen (fname) + 1, fname);
859b51f8 9375 }
c388a0cf 9376
9377 return build_string_literal (1, "");
9378}
9379
9380/* Fold a call to __builtin_FUNCTION to a constant string. */
9381
9382static inline tree
9383fold_builtin_FUNCTION ()
9384{
c2d38635 9385 const char *name = "";
9386
c388a0cf 9387 if (current_function_decl)
c2d38635 9388 name = lang_hooks.decl_printable_name (current_function_decl, 0);
c388a0cf 9389
c2d38635 9390 return build_string_literal (strlen (name) + 1, name);
c388a0cf 9391}
9392
9393/* Fold a call to __builtin_LINE to an integer constant. */
9394
9395static inline tree
9396fold_builtin_LINE (location_t loc, tree type)
9397{
9398 return build_int_cst (type, LOCATION_LINE (loc));
9399}
9400
c2f47e15 9401/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 9402 This function returns NULL_TREE if no simplification was possible. */
650e4c94 9403
4ee9c684 9404static tree
e80cc485 9405fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 9406{
e9f80ff5 9407 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 9408 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 9409 switch (fcode)
650e4c94 9410 {
c388a0cf 9411 case BUILT_IN_FILE:
9412 return fold_builtin_FILE (loc);
9413
9414 case BUILT_IN_FUNCTION:
9415 return fold_builtin_FUNCTION ();
9416
9417 case BUILT_IN_LINE:
9418 return fold_builtin_LINE (loc, type);
9419
c2f47e15 9420 CASE_FLT_FN (BUILT_IN_INF):
012f068a 9421 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
c2f47e15 9422 case BUILT_IN_INFD32:
9423 case BUILT_IN_INFD64:
9424 case BUILT_IN_INFD128:
389dd41b 9425 return fold_builtin_inf (loc, type, true);
7c2f0500 9426
c2f47e15 9427 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
012f068a 9428 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
389dd41b 9429 return fold_builtin_inf (loc, type, false);
7c2f0500 9430
c2f47e15 9431 case BUILT_IN_CLASSIFY_TYPE:
9432 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 9433
c2f47e15 9434 default:
9435 break;
9436 }
9437 return NULL_TREE;
9438}
7c2f0500 9439
c2f47e15 9440/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 9441 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 9442
c2f47e15 9443static tree
e80cc485 9444fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 9445{
9446 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9447 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 9448
9449 if (TREE_CODE (arg0) == ERROR_MARK)
9450 return NULL_TREE;
9451
744fe358 9452 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
6c21be92 9453 return ret;
9454
c2f47e15 9455 switch (fcode)
9456 {
650e4c94 9457 case BUILT_IN_CONSTANT_P:
7c2f0500 9458 {
c2f47e15 9459 tree val = fold_builtin_constant_p (arg0);
7c2f0500 9460
7c2f0500 9461 /* Gimplification will pull the CALL_EXPR for the builtin out of
9462 an if condition. When not optimizing, we'll not CSE it back.
9463 To avoid link error types of regressions, return false now. */
9464 if (!val && !optimize)
9465 val = integer_zero_node;
9466
9467 return val;
9468 }
650e4c94 9469
539a3a92 9470 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 9471 return fold_builtin_classify_type (arg0);
539a3a92 9472
650e4c94 9473 case BUILT_IN_STRLEN:
c7cbde74 9474 return fold_builtin_strlen (loc, type, arg0);
650e4c94 9475
4f35b1fc 9476 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 9477 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 9478 case BUILT_IN_FABSD32:
9479 case BUILT_IN_FABSD64:
9480 case BUILT_IN_FABSD128:
389dd41b 9481 return fold_builtin_fabs (loc, arg0, type);
d1aade50 9482
9483 case BUILT_IN_ABS:
9484 case BUILT_IN_LABS:
9485 case BUILT_IN_LLABS:
9486 case BUILT_IN_IMAXABS:
389dd41b 9487 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 9488
4f35b1fc 9489 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 9490 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9491 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9492 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 9493 break;
36d3581d 9494
4f35b1fc 9495 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 9496 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9497 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 9498 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 9499 break;
36d3581d 9500
4f35b1fc 9501 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 9502 if (validate_arg (arg0, COMPLEX_TYPE)
9503 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9504 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 9505 break;
36d3581d 9506
6c21be92 9507 CASE_FLT_FN (BUILT_IN_CARG):
9508 return fold_builtin_carg (loc, arg0, type);
c2373fdb 9509
6c21be92 9510 case BUILT_IN_ISASCII:
9511 return fold_builtin_isascii (loc, arg0);
48e1416a 9512
6c21be92 9513 case BUILT_IN_TOASCII:
9514 return fold_builtin_toascii (loc, arg0);
48e1416a 9515
6c21be92 9516 case BUILT_IN_ISDIGIT:
9517 return fold_builtin_isdigit (loc, arg0);
48e1416a 9518
12f08300 9519 CASE_FLT_FN (BUILT_IN_FINITE):
9520 case BUILT_IN_FINITED32:
9521 case BUILT_IN_FINITED64:
9522 case BUILT_IN_FINITED128:
9523 case BUILT_IN_ISFINITE:
9524 {
9525 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9526 if (ret)
9527 return ret;
9528 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9529 }
9530
9531 CASE_FLT_FN (BUILT_IN_ISINF):
9532 case BUILT_IN_ISINFD32:
9533 case BUILT_IN_ISINFD64:
9534 case BUILT_IN_ISINFD128:
9535 {
9536 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9537 if (ret)
9538 return ret;
9539 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9540 }
9541
9542 case BUILT_IN_ISNORMAL:
9543 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9544
6c21be92 9545 case BUILT_IN_ISINF_SIGN:
12f08300 9546 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9547
9548 CASE_FLT_FN (BUILT_IN_ISNAN):
9549 case BUILT_IN_ISNAND32:
9550 case BUILT_IN_ISNAND64:
9551 case BUILT_IN_ISNAND128:
9552 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
48e1416a 9553
6c21be92 9554 case BUILT_IN_FREE:
9555 if (integer_zerop (arg0))
9556 return build_empty_stmt (loc);
d064d976 9557 break;
c63f4ad3 9558
6c21be92 9559 default:
8b4af95f 9560 break;
6c21be92 9561 }
805e22b2 9562
6c21be92 9563 return NULL_TREE;
3bc5c41b 9564
6c21be92 9565}
728bac60 9566
6c21be92 9567/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9568 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 9569
9570static tree
e80cc485 9571fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 9572{
9573 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9574 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9575
6c21be92 9576 if (TREE_CODE (arg0) == ERROR_MARK
9577 || TREE_CODE (arg1) == ERROR_MARK)
9578 return NULL_TREE;
e5407ca6 9579
744fe358 9580 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
6c21be92 9581 return ret;
e84da7c1 9582
6c21be92 9583 switch (fcode)
9584 {
e84da7c1 9585 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9586 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9587 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9588 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 9589 return do_mpfr_lgamma_r (arg0, arg1, type);
9590 break;
c2f47e15 9591
3838b9ae 9592 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 9593 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 9594
ebf8b4f5 9595 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 9596 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 9597
c2f47e15 9598 case BUILT_IN_STRSPN:
389dd41b 9599 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 9600
9601 case BUILT_IN_STRCSPN:
389dd41b 9602 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 9603
c2f47e15 9604 case BUILT_IN_STRPBRK:
389dd41b 9605 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 9606
9607 case BUILT_IN_EXPECT:
01107f42 9608 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
c2f47e15 9609
9bc9f15f 9610 case BUILT_IN_ISGREATER:
389dd41b 9611 return fold_builtin_unordered_cmp (loc, fndecl,
9612 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 9613 case BUILT_IN_ISGREATEREQUAL:
389dd41b 9614 return fold_builtin_unordered_cmp (loc, fndecl,
9615 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 9616 case BUILT_IN_ISLESS:
389dd41b 9617 return fold_builtin_unordered_cmp (loc, fndecl,
9618 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 9619 case BUILT_IN_ISLESSEQUAL:
389dd41b 9620 return fold_builtin_unordered_cmp (loc, fndecl,
9621 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 9622 case BUILT_IN_ISLESSGREATER:
389dd41b 9623 return fold_builtin_unordered_cmp (loc, fndecl,
9624 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 9625 case BUILT_IN_ISUNORDERED:
389dd41b 9626 return fold_builtin_unordered_cmp (loc, fndecl,
9627 arg0, arg1, UNORDERED_EXPR,
d5019fe8 9628 NOP_EXPR);
9bc9f15f 9629
7c2f0500 9630 /* We do the folding for va_start in the expander. */
9631 case BUILT_IN_VA_START:
9632 break;
f0613857 9633
0a39fd54 9634 case BUILT_IN_OBJECT_SIZE:
c2f47e15 9635 return fold_builtin_object_size (arg0, arg1);
0a39fd54 9636
1cd6e20d 9637 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9638 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9639
9640 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9641 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9642
c2f47e15 9643 default:
9644 break;
9645 }
9646 return NULL_TREE;
9647}
9648
9649/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 9650 and ARG2.
c2f47e15 9651 This function returns NULL_TREE if no simplification was possible. */
9652
9653static tree
389dd41b 9654fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 9655 tree arg0, tree arg1, tree arg2)
c2f47e15 9656{
9657 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9658 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 9659
9660 if (TREE_CODE (arg0) == ERROR_MARK
9661 || TREE_CODE (arg1) == ERROR_MARK
9662 || TREE_CODE (arg2) == ERROR_MARK)
9663 return NULL_TREE;
9664
744fe358 9665 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9666 arg0, arg1, arg2))
6c21be92 9667 return ret;
9668
c2f47e15 9669 switch (fcode)
9670 {
9671
9672 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 9673 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 9674
e5407ca6 9675 CASE_FLT_FN (BUILT_IN_REMQUO):
9676 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9677 && validate_arg (arg1, REAL_TYPE)
9678 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 9679 return do_mpfr_remquo (arg0, arg1, arg2);
9680 break;
e5407ca6 9681
c2f47e15 9682 case BUILT_IN_MEMCMP:
7f38a6aa 9683 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
c2f47e15 9684
c83059be 9685 case BUILT_IN_EXPECT:
01107f42 9686 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9687
9688 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9689 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
c83059be 9690
0c93c8a9 9691 case BUILT_IN_ADD_OVERFLOW:
9692 case BUILT_IN_SUB_OVERFLOW:
9693 case BUILT_IN_MUL_OVERFLOW:
732905bb 9694 case BUILT_IN_ADD_OVERFLOW_P:
9695 case BUILT_IN_SUB_OVERFLOW_P:
9696 case BUILT_IN_MUL_OVERFLOW_P:
0c93c8a9 9697 case BUILT_IN_SADD_OVERFLOW:
9698 case BUILT_IN_SADDL_OVERFLOW:
9699 case BUILT_IN_SADDLL_OVERFLOW:
9700 case BUILT_IN_SSUB_OVERFLOW:
9701 case BUILT_IN_SSUBL_OVERFLOW:
9702 case BUILT_IN_SSUBLL_OVERFLOW:
9703 case BUILT_IN_SMUL_OVERFLOW:
9704 case BUILT_IN_SMULL_OVERFLOW:
9705 case BUILT_IN_SMULLL_OVERFLOW:
9706 case BUILT_IN_UADD_OVERFLOW:
9707 case BUILT_IN_UADDL_OVERFLOW:
9708 case BUILT_IN_UADDLL_OVERFLOW:
9709 case BUILT_IN_USUB_OVERFLOW:
9710 case BUILT_IN_USUBL_OVERFLOW:
9711 case BUILT_IN_USUBLL_OVERFLOW:
9712 case BUILT_IN_UMUL_OVERFLOW:
9713 case BUILT_IN_UMULL_OVERFLOW:
9714 case BUILT_IN_UMULLL_OVERFLOW:
9715 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9716
650e4c94 9717 default:
9718 break;
9719 }
c2f47e15 9720 return NULL_TREE;
9721}
650e4c94 9722
c2f47e15 9723/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 9724 arguments. IGNORE is true if the result of the
9725 function call is ignored. This function returns NULL_TREE if no
9726 simplification was possible. */
48e1416a 9727
2165588a 9728tree
e80cc485 9729fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 9730{
9731 tree ret = NULL_TREE;
a7f5bb2d 9732
c2f47e15 9733 switch (nargs)
9734 {
9735 case 0:
e80cc485 9736 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 9737 break;
9738 case 1:
e80cc485 9739 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 9740 break;
9741 case 2:
e80cc485 9742 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 9743 break;
9744 case 3:
e80cc485 9745 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 9746 break;
c2f47e15 9747 default:
12f08300 9748 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 9749 break;
9750 }
9751 if (ret)
9752 {
75a70cf9 9753 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 9754 SET_EXPR_LOCATION (ret, loc);
c2f47e15 9755 return ret;
9756 }
9757 return NULL_TREE;
9758}
9759
0e80b01d 9760/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9761 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9762 of arguments in ARGS to be omitted. OLDNARGS is the number of
9763 elements in ARGS. */
c2f47e15 9764
9765static tree
0e80b01d 9766rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9767 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 9768{
0e80b01d 9769 int nargs = oldnargs - skip + n;
9770 tree *buffer;
c2f47e15 9771
0e80b01d 9772 if (n > 0)
c2f47e15 9773 {
0e80b01d 9774 int i, j;
c2f47e15 9775
0e80b01d 9776 buffer = XALLOCAVEC (tree, nargs);
9777 for (i = 0; i < n; i++)
9778 buffer[i] = va_arg (newargs, tree);
9779 for (j = skip; j < oldnargs; j++, i++)
9780 buffer[i] = args[j];
9781 }
9782 else
9783 buffer = args + skip;
19fbe3a4 9784
0e80b01d 9785 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9786}
c2f47e15 9787
198622c0 9788/* Return true if FNDECL shouldn't be folded right now.
9789 If a built-in function has an inline attribute always_inline
9790 wrapper, defer folding it after always_inline functions have
9791 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9792 might not be performed. */
9793
51d2c51e 9794bool
198622c0 9795avoid_folding_inline_builtin (tree fndecl)
9796{
9797 return (DECL_DECLARED_INLINE_P (fndecl)
9798 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9799 && cfun
9800 && !cfun->always_inline_functions_inlined
9801 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9802}
9803
4ee9c684 9804/* A wrapper function for builtin folding that prevents warnings for
491e04ef 9805 "statement without effect" and the like, caused by removing the
4ee9c684 9806 call node earlier than the warning is generated. */
9807
9808tree
389dd41b 9809fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 9810{
c2f47e15 9811 tree ret = NULL_TREE;
9812 tree fndecl = get_callee_fndecl (exp);
a0e9bfbb 9813 if (fndecl && fndecl_built_in_p (fndecl)
48dc2227 9814 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9815 yet. Defer folding until we see all the arguments
9816 (after inlining). */
9817 && !CALL_EXPR_VA_ARG_PACK (exp))
9818 {
9819 int nargs = call_expr_nargs (exp);
9820
9821 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9822 instead last argument is __builtin_va_arg_pack (). Defer folding
9823 even in that case, until arguments are finalized. */
9824 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9825 {
9826 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
a0e9bfbb 9827 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
48dc2227 9828 return NULL_TREE;
9829 }
9830
198622c0 9831 if (avoid_folding_inline_builtin (fndecl))
9832 return NULL_TREE;
9833
c2f47e15 9834 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 9835 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9836 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 9837 else
9838 {
9d884767 9839 tree *args = CALL_EXPR_ARGP (exp);
9840 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 9841 if (ret)
389dd41b 9842 return ret;
c2f47e15 9843 }
4ee9c684 9844 }
c2f47e15 9845 return NULL_TREE;
9846}
48e1416a 9847
9d884767 9848/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9849 N arguments are passed in the array ARGARRAY. Return a folded
9850 expression or NULL_TREE if no simplification was possible. */
805e22b2 9851
9852tree
9d884767 9853fold_builtin_call_array (location_t loc, tree,
d01f58f9 9854 tree fn,
9855 int n,
9856 tree *argarray)
7e15618b 9857{
9d884767 9858 if (TREE_CODE (fn) != ADDR_EXPR)
9859 return NULL_TREE;
c2f47e15 9860
9d884767 9861 tree fndecl = TREE_OPERAND (fn, 0);
9862 if (TREE_CODE (fndecl) == FUNCTION_DECL
a0e9bfbb 9863 && fndecl_built_in_p (fndecl))
9d884767 9864 {
9865 /* If last argument is __builtin_va_arg_pack (), arguments to this
9866 function are not finalized yet. Defer folding until they are. */
9867 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9868 {
9869 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
a0e9bfbb 9870 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9d884767 9871 return NULL_TREE;
9872 }
9873 if (avoid_folding_inline_builtin (fndecl))
9874 return NULL_TREE;
9875 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9876 return targetm.fold_builtin (fndecl, n, argarray, false);
9877 else
9878 return fold_builtin_n (loc, fndecl, argarray, n, false);
9879 }
c2f47e15 9880
9d884767 9881 return NULL_TREE;
c2f47e15 9882}
9883
af1409ad 9884/* Construct a new CALL_EXPR using the tail of the argument list of EXP
9885 along with N new arguments specified as the "..." parameters. SKIP
9886 is the number of arguments in EXP to be omitted. This function is used
9887 to do varargs-to-varargs transformations. */
9888
9889static tree
9890rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9891{
9892 va_list ap;
9893 tree t;
9894
9895 va_start (ap, n);
9896 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9897 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9898 va_end (ap);
c2f47e15 9899
af1409ad 9900 return t;
c2f47e15 9901}
9902
9903/* Validate a single argument ARG against a tree code CODE representing
184fac50 9904 a type. Return true when argument is valid. */
48e1416a 9905
c2f47e15 9906static bool
184fac50 9907validate_arg (const_tree arg, enum tree_code code)
c2f47e15 9908{
9909 if (!arg)
9910 return false;
9911 else if (code == POINTER_TYPE)
184fac50 9912 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 9913 else if (code == INTEGER_TYPE)
9914 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 9915 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 9916}
0eb671f7 9917
75a70cf9 9918/* This function validates the types of a function call argument list
9919 against a specified list of tree_codes. If the last specifier is a 0,
9920 that represents an ellipses, otherwise the last specifier must be a
9921 VOID_TYPE.
9922
9923 This is the GIMPLE version of validate_arglist. Eventually we want to
9924 completely convert builtins.c to work from GIMPLEs and the tree based
9925 validate_arglist will then be removed. */
9926
9927bool
1a91d914 9928validate_gimple_arglist (const gcall *call, ...)
75a70cf9 9929{
9930 enum tree_code code;
9931 bool res = 0;
9932 va_list ap;
9933 const_tree arg;
9934 size_t i;
9935
9936 va_start (ap, call);
9937 i = 0;
9938
9939 do
9940 {
d62e827b 9941 code = (enum tree_code) va_arg (ap, int);
75a70cf9 9942 switch (code)
9943 {
9944 case 0:
9945 /* This signifies an ellipses, any further arguments are all ok. */
9946 res = true;
9947 goto end;
9948 case VOID_TYPE:
9949 /* This signifies an endlink, if no arguments remain, return
9950 true, otherwise return false. */
9951 res = (i == gimple_call_num_args (call));
9952 goto end;
9953 default:
9954 /* If no parameters remain or the parameter's code does not
9955 match the specified code, return false. Otherwise continue
9956 checking any remaining arguments. */
9957 arg = gimple_call_arg (call, i++);
9958 if (!validate_arg (arg, code))
9959 goto end;
9960 break;
9961 }
9962 }
9963 while (1);
9964
9965 /* We need gotos here since we can only have one VA_CLOSE in a
9966 function. */
9967 end: ;
9968 va_end (ap);
9969
9970 return res;
9971}
9972
fc2a2dcb 9973/* Default target-specific builtin expander that does nothing. */
9974
9975rtx
aecda0d6 9976default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9977 rtx target ATTRIBUTE_UNUSED,
9978 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 9979 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 9980 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 9981{
9982 return NULL_RTX;
9983}
c7926a82 9984
01537105 9985/* Returns true is EXP represents data that would potentially reside
9986 in a readonly section. */
9987
b9ea678c 9988bool
01537105 9989readonly_data_expr (tree exp)
9990{
9991 STRIP_NOPS (exp);
9992
9ff0637e 9993 if (TREE_CODE (exp) != ADDR_EXPR)
9994 return false;
9995
9996 exp = get_base_address (TREE_OPERAND (exp, 0));
9997 if (!exp)
9998 return false;
9999
10000 /* Make sure we call decl_readonly_section only for trees it
10001 can handle (since it returns true for everything it doesn't
10002 understand). */
491e04ef 10003 if (TREE_CODE (exp) == STRING_CST
9ff0637e 10004 || TREE_CODE (exp) == CONSTRUCTOR
53e9c5c4 10005 || (VAR_P (exp) && TREE_STATIC (exp)))
9ff0637e 10006 return decl_readonly_section (exp, 0);
01537105 10007 else
10008 return false;
10009}
4ee9c684 10010
c2f47e15 10011/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10012 to the call, and TYPE is its return type.
4ee9c684 10013
c2f47e15 10014 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10015 simplified form of the call as a tree.
10016
10017 The simplified form may be a constant or other expression which
10018 computes the same value, but in a more efficient manner (including
10019 calls to other builtin functions).
10020
10021 The call may contain arguments which need to be evaluated, but
10022 which are not useful to determine the result of the call. In
10023 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10024 COMPOUND_EXPR will be an argument which must be evaluated.
10025 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10026 COMPOUND_EXPR in the chain will contain the tree for the simplified
10027 form of the builtin function call. */
10028
10029static tree
389dd41b 10030fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10031{
c2f47e15 10032 if (!validate_arg (s1, POINTER_TYPE)
10033 || !validate_arg (s2, POINTER_TYPE))
10034 return NULL_TREE;
4ee9c684 10035 else
10036 {
4ee9c684 10037 tree fn;
10038 const char *p1, *p2;
10039
10040 p2 = c_getstr (s2);
10041 if (p2 == NULL)
c2f47e15 10042 return NULL_TREE;
4ee9c684 10043
10044 p1 = c_getstr (s1);
10045 if (p1 != NULL)
10046 {
10047 const char *r = strpbrk (p1, p2);
daa1d5f5 10048 tree tem;
4ee9c684 10049
10050 if (r == NULL)
779b4c41 10051 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 10052
10053 /* Return an offset into the constant string argument. */
2cc66f2a 10054 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10055 return fold_convert_loc (loc, type, tem);
4ee9c684 10056 }
10057
10058 if (p2[0] == '\0')
05abc81b 10059 /* strpbrk(x, "") == NULL.
10060 Evaluate and ignore s1 in case it had side-effects. */
44bfe16d 10061 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
4ee9c684 10062
10063 if (p2[1] != '\0')
c2f47e15 10064 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 10065
b9a16870 10066 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 10067 if (!fn)
c2f47e15 10068 return NULL_TREE;
4ee9c684 10069
10070 /* New argument list transforming strpbrk(s1, s2) to
10071 strchr(s1, s2[0]). */
7002a1c8 10072 return build_call_expr_loc (loc, fn, 2, s1,
10073 build_int_cst (integer_type_node, p2[0]));
4ee9c684 10074 }
10075}
10076
c2f47e15 10077/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10078 to the call.
4ee9c684 10079
c2f47e15 10080 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10081 simplified form of the call as a tree.
10082
10083 The simplified form may be a constant or other expression which
10084 computes the same value, but in a more efficient manner (including
10085 calls to other builtin functions).
10086
10087 The call may contain arguments which need to be evaluated, but
10088 which are not useful to determine the result of the call. In
10089 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10090 COMPOUND_EXPR will be an argument which must be evaluated.
10091 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10092 COMPOUND_EXPR in the chain will contain the tree for the simplified
10093 form of the builtin function call. */
10094
10095static tree
389dd41b 10096fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 10097{
c2f47e15 10098 if (!validate_arg (s1, POINTER_TYPE)
10099 || !validate_arg (s2, POINTER_TYPE))
10100 return NULL_TREE;
4ee9c684 10101 else
10102 {
4ee9c684 10103 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10104
c2f47e15 10105 /* If either argument is "", return NULL_TREE. */
4ee9c684 10106 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 10107 /* Evaluate and ignore both arguments in case either one has
10108 side-effects. */
389dd41b 10109 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 10110 s1, s2);
c2f47e15 10111 return NULL_TREE;
4ee9c684 10112 }
10113}
10114
c2f47e15 10115/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10116 to the call.
4ee9c684 10117
c2f47e15 10118 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10119 simplified form of the call as a tree.
10120
10121 The simplified form may be a constant or other expression which
10122 computes the same value, but in a more efficient manner (including
10123 calls to other builtin functions).
10124
10125 The call may contain arguments which need to be evaluated, but
10126 which are not useful to determine the result of the call. In
10127 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10128 COMPOUND_EXPR will be an argument which must be evaluated.
10129 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10130 COMPOUND_EXPR in the chain will contain the tree for the simplified
10131 form of the builtin function call. */
10132
10133static tree
389dd41b 10134fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 10135{
c2f47e15 10136 if (!validate_arg (s1, POINTER_TYPE)
10137 || !validate_arg (s2, POINTER_TYPE))
10138 return NULL_TREE;
4ee9c684 10139 else
10140 {
c2f47e15 10141 /* If the first argument is "", return NULL_TREE. */
b5e46e2c 10142 const char *p1 = c_getstr (s1);
4ee9c684 10143 if (p1 && *p1 == '\0')
10144 {
10145 /* Evaluate and ignore argument s2 in case it has
10146 side-effects. */
389dd41b 10147 return omit_one_operand_loc (loc, size_type_node,
39761420 10148 size_zero_node, s2);
4ee9c684 10149 }
10150
10151 /* If the second argument is "", return __builtin_strlen(s1). */
b5e46e2c 10152 const char *p2 = c_getstr (s2);
4ee9c684 10153 if (p2 && *p2 == '\0')
10154 {
b9a16870 10155 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 10156
10157 /* If the replacement _DECL isn't initialized, don't do the
10158 transformation. */
10159 if (!fn)
c2f47e15 10160 return NULL_TREE;
4ee9c684 10161
389dd41b 10162 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 10163 }
c2f47e15 10164 return NULL_TREE;
4ee9c684 10165 }
10166}
10167
c2f47e15 10168/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 10169 produced. False otherwise. This is done so that we don't output the error
10170 or warning twice or three times. */
75a70cf9 10171
743b0c6a 10172bool
c2f47e15 10173fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 10174{
10175 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 10176 int nargs = call_expr_nargs (exp);
10177 tree arg;
d98fd4a4 10178 /* There is good chance the current input_location points inside the
10179 definition of the va_start macro (perhaps on the token for
10180 builtin) in a system header, so warnings will not be emitted.
10181 Use the location in real source code. */
10182 source_location current_location =
10183 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10184 NULL);
4ee9c684 10185
257d99c3 10186 if (!stdarg_p (fntype))
743b0c6a 10187 {
10188 error ("%<va_start%> used in function with fixed args");
10189 return true;
10190 }
c2f47e15 10191
10192 if (va_start_p)
79012a9d 10193 {
c2f47e15 10194 if (va_start_p && (nargs != 2))
10195 {
10196 error ("wrong number of arguments to function %<va_start%>");
10197 return true;
10198 }
10199 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 10200 }
10201 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10202 when we checked the arguments and if needed issued a warning. */
c2f47e15 10203 else
4ee9c684 10204 {
c2f47e15 10205 if (nargs == 0)
10206 {
10207 /* Evidently an out of date version of <stdarg.h>; can't validate
10208 va_start's second argument, but can still work as intended. */
d98fd4a4 10209 warning_at (current_location,
7edb1062 10210 OPT_Wvarargs,
10211 "%<__builtin_next_arg%> called without an argument");
c2f47e15 10212 return true;
10213 }
10214 else if (nargs > 1)
a0c938f0 10215 {
c2f47e15 10216 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 10217 return true;
10218 }
c2f47e15 10219 arg = CALL_EXPR_ARG (exp, 0);
10220 }
10221
a8dd994c 10222 if (TREE_CODE (arg) == SSA_NAME)
10223 arg = SSA_NAME_VAR (arg);
10224
c2f47e15 10225 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 10226 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 10227 the arguments and if needed issuing a warning. */
10228 if (!integer_zerop (arg))
10229 {
10230 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 10231
4ee9c684 10232 /* Strip off all nops for the sake of the comparison. This
10233 is not quite the same as STRIP_NOPS. It does more.
10234 We must also strip off INDIRECT_EXPR for C++ reference
10235 parameters. */
72dd6141 10236 while (CONVERT_EXPR_P (arg)
4ee9c684 10237 || TREE_CODE (arg) == INDIRECT_REF)
10238 arg = TREE_OPERAND (arg, 0);
10239 if (arg != last_parm)
a0c938f0 10240 {
b08cf617 10241 /* FIXME: Sometimes with the tree optimizers we can get the
10242 not the last argument even though the user used the last
10243 argument. We just warn and set the arg to be the last
10244 argument so that we will get wrong-code because of
10245 it. */
d98fd4a4 10246 warning_at (current_location,
7edb1062 10247 OPT_Wvarargs,
d98fd4a4 10248 "second parameter of %<va_start%> not last named argument");
743b0c6a 10249 }
24158ad7 10250
10251 /* Undefined by C99 7.15.1.4p4 (va_start):
10252 "If the parameter parmN is declared with the register storage
10253 class, with a function or array type, or with a type that is
10254 not compatible with the type that results after application of
10255 the default argument promotions, the behavior is undefined."
10256 */
10257 else if (DECL_REGISTER (arg))
d98fd4a4 10258 {
10259 warning_at (current_location,
7edb1062 10260 OPT_Wvarargs,
67cf9b55 10261 "undefined behavior when second parameter of "
d98fd4a4 10262 "%<va_start%> is declared with %<register%> storage");
10263 }
24158ad7 10264
79012a9d 10265 /* We want to verify the second parameter just once before the tree
a0c938f0 10266 optimizers are run and then avoid keeping it in the tree,
10267 as otherwise we could warn even for correct code like:
10268 void foo (int i, ...)
10269 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 10270 if (va_start_p)
10271 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10272 else
10273 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 10274 }
10275 return false;
4ee9c684 10276}
10277
10278
c2f47e15 10279/* Expand a call EXP to __builtin_object_size. */
0a39fd54 10280
f7715905 10281static rtx
0a39fd54 10282expand_builtin_object_size (tree exp)
10283{
10284 tree ost;
10285 int object_size_type;
10286 tree fndecl = get_callee_fndecl (exp);
0a39fd54 10287
c2f47e15 10288 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 10289 {
8c41abe8 10290 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
b8c23db3 10291 exp, fndecl);
0a39fd54 10292 expand_builtin_trap ();
10293 return const0_rtx;
10294 }
10295
c2f47e15 10296 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 10297 STRIP_NOPS (ost);
10298
10299 if (TREE_CODE (ost) != INTEGER_CST
10300 || tree_int_cst_sgn (ost) < 0
10301 || compare_tree_int (ost, 3) > 0)
10302 {
8c41abe8 10303 error ("%Klast argument of %qD is not integer constant between 0 and 3",
b8c23db3 10304 exp, fndecl);
0a39fd54 10305 expand_builtin_trap ();
10306 return const0_rtx;
10307 }
10308
e913b5cd 10309 object_size_type = tree_to_shwi (ost);
0a39fd54 10310
10311 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10312}
10313
10314/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10315 FCODE is the BUILT_IN_* to use.
c2f47e15 10316 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 10317 otherwise try to get the result in TARGET, if convenient (and in
10318 mode MODE if that's convenient). */
10319
10320static rtx
3754d046 10321expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 10322 enum built_in_function fcode)
10323{
c2f47e15 10324 if (!validate_arglist (exp,
0a39fd54 10325 POINTER_TYPE,
10326 fcode == BUILT_IN_MEMSET_CHK
10327 ? INTEGER_TYPE : POINTER_TYPE,
10328 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 10329 return NULL_RTX;
0a39fd54 10330
e6a18b5a 10331 tree dest = CALL_EXPR_ARG (exp, 0);
10332 tree src = CALL_EXPR_ARG (exp, 1);
10333 tree len = CALL_EXPR_ARG (exp, 2);
10334 tree size = CALL_EXPR_ARG (exp, 3);
0a39fd54 10335
e6a18b5a 10336 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10337 /*str=*/NULL_TREE, size);
5aef8938 10338
10339 if (!tree_fits_uhwi_p (size))
c2f47e15 10340 return NULL_RTX;
0a39fd54 10341
e913b5cd 10342 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 10343 {
5aef8938 10344 /* Avoid transforming the checking call to an ordinary one when
10345 an overflow has been detected or when the call couldn't be
10346 validated because the size is not constant. */
10347 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10348 return NULL_RTX;
0a39fd54 10349
5aef8938 10350 tree fn = NULL_TREE;
0a39fd54 10351 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10352 mem{cpy,pcpy,move,set} is available. */
10353 switch (fcode)
10354 {
10355 case BUILT_IN_MEMCPY_CHK:
b9a16870 10356 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 10357 break;
10358 case BUILT_IN_MEMPCPY_CHK:
b9a16870 10359 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 10360 break;
10361 case BUILT_IN_MEMMOVE_CHK:
b9a16870 10362 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 10363 break;
10364 case BUILT_IN_MEMSET_CHK:
b9a16870 10365 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 10366 break;
10367 default:
10368 break;
10369 }
10370
10371 if (! fn)
c2f47e15 10372 return NULL_RTX;
0a39fd54 10373
0568e9c1 10374 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 10375 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10376 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 10377 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10378 }
10379 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 10380 return NULL_RTX;
0a39fd54 10381 else
10382 {
957d0361 10383 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 10384
10385 /* If DEST is not a pointer type, call the normal function. */
10386 if (dest_align == 0)
c2f47e15 10387 return NULL_RTX;
0a39fd54 10388
10389 /* If SRC and DEST are the same (and not volatile), do nothing. */
10390 if (operand_equal_p (src, dest, 0))
10391 {
10392 tree expr;
10393
10394 if (fcode != BUILT_IN_MEMPCPY_CHK)
10395 {
10396 /* Evaluate and ignore LEN in case it has side-effects. */
10397 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10398 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10399 }
10400
2cc66f2a 10401 expr = fold_build_pointer_plus (dest, len);
0a39fd54 10402 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10403 }
10404
10405 /* __memmove_chk special case. */
10406 if (fcode == BUILT_IN_MEMMOVE_CHK)
10407 {
957d0361 10408 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 10409
10410 if (src_align == 0)
c2f47e15 10411 return NULL_RTX;
0a39fd54 10412
10413 /* If src is categorized for a readonly section we can use
10414 normal __memcpy_chk. */
10415 if (readonly_data_expr (src))
10416 {
b9a16870 10417 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 10418 if (!fn)
c2f47e15 10419 return NULL_RTX;
0568e9c1 10420 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10421 dest, src, len, size);
a65c4d64 10422 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10423 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 10424 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10425 }
10426 }
c2f47e15 10427 return NULL_RTX;
0a39fd54 10428 }
10429}
10430
10431/* Emit warning if a buffer overflow is detected at compile time. */
10432
10433static void
10434maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10435{
5aef8938 10436 /* The source string. */
10437 tree srcstr = NULL_TREE;
10438 /* The size of the destination object. */
10439 tree objsize = NULL_TREE;
10440 /* The string that is being concatenated with (as in __strcat_chk)
10441 or null if it isn't. */
10442 tree catstr = NULL_TREE;
10443 /* The maximum length of the source sequence in a bounded operation
10444 (such as __strncat_chk) or null if the operation isn't bounded
10445 (such as __strcat_chk). */
e6a18b5a 10446 tree maxread = NULL_TREE;
f3969b49 10447 /* The exact size of the access (such as in __strncpy_chk). */
10448 tree size = NULL_TREE;
0a39fd54 10449
10450 switch (fcode)
10451 {
10452 case BUILT_IN_STRCPY_CHK:
10453 case BUILT_IN_STPCPY_CHK:
5aef8938 10454 srcstr = CALL_EXPR_ARG (exp, 1);
10455 objsize = CALL_EXPR_ARG (exp, 2);
10456 break;
10457
0a39fd54 10458 case BUILT_IN_STRCAT_CHK:
5aef8938 10459 /* For __strcat_chk the warning will be emitted only if overflowing
10460 by at least strlen (dest) + 1 bytes. */
10461 catstr = CALL_EXPR_ARG (exp, 0);
10462 srcstr = CALL_EXPR_ARG (exp, 1);
10463 objsize = CALL_EXPR_ARG (exp, 2);
0a39fd54 10464 break;
5aef8938 10465
b356dfef 10466 case BUILT_IN_STRNCAT_CHK:
5aef8938 10467 catstr = CALL_EXPR_ARG (exp, 0);
10468 srcstr = CALL_EXPR_ARG (exp, 1);
e6a18b5a 10469 maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 10470 objsize = CALL_EXPR_ARG (exp, 3);
10471 break;
10472
0a39fd54 10473 case BUILT_IN_STRNCPY_CHK:
1063acde 10474 case BUILT_IN_STPNCPY_CHK:
5aef8938 10475 srcstr = CALL_EXPR_ARG (exp, 1);
f3969b49 10476 size = CALL_EXPR_ARG (exp, 2);
5aef8938 10477 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 10478 break;
5aef8938 10479
0a39fd54 10480 case BUILT_IN_SNPRINTF_CHK:
10481 case BUILT_IN_VSNPRINTF_CHK:
e6a18b5a 10482 maxread = CALL_EXPR_ARG (exp, 1);
5aef8938 10483 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 10484 break;
10485 default:
10486 gcc_unreachable ();
10487 }
10488
e6a18b5a 10489 if (catstr && maxread)
0a39fd54 10490 {
5aef8938 10491 /* Check __strncat_chk. There is no way to determine the length
10492 of the string to which the source string is being appended so
10493 just warn when the length of the source string is not known. */
8d6c6ef5 10494 check_strncat_sizes (exp, objsize);
10495 return;
0a39fd54 10496 }
0a39fd54 10497
e6a18b5a 10498 /* The destination argument is the first one for all built-ins above. */
10499 tree dst = CALL_EXPR_ARG (exp, 0);
10500
10501 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
0a39fd54 10502}
10503
10504/* Emit warning if a buffer overflow is detected at compile time
10505 in __sprintf_chk/__vsprintf_chk calls. */
10506
10507static void
10508maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10509{
1e4adcfc 10510 tree size, len, fmt;
0a39fd54 10511 const char *fmt_str;
c2f47e15 10512 int nargs = call_expr_nargs (exp);
0a39fd54 10513
10514 /* Verify the required arguments in the original call. */
48e1416a 10515
c2f47e15 10516 if (nargs < 4)
0a39fd54 10517 return;
c2f47e15 10518 size = CALL_EXPR_ARG (exp, 2);
10519 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 10520
e913b5cd 10521 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 10522 return;
10523
10524 /* Check whether the format is a literal string constant. */
10525 fmt_str = c_getstr (fmt);
10526 if (fmt_str == NULL)
10527 return;
10528
d4473c84 10529 if (!init_target_chars ())
99eabcc1 10530 return;
10531
0a39fd54 10532 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 10533 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 10534 len = build_int_cstu (size_type_node, strlen (fmt_str));
10535 /* If the format is "%s" and first ... argument is a string literal,
10536 we know it too. */
c2f47e15 10537 else if (fcode == BUILT_IN_SPRINTF_CHK
10538 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 10539 {
10540 tree arg;
10541
c2f47e15 10542 if (nargs < 5)
0a39fd54 10543 return;
c2f47e15 10544 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 10545 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10546 return;
10547
10548 len = c_strlen (arg, 1);
e913b5cd 10549 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 10550 return;
10551 }
10552 else
10553 return;
10554
5aef8938 10555 /* Add one for the terminating nul. */
10556 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
e6a18b5a 10557
10558 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10559 /*maxread=*/NULL_TREE, len, size);
0a39fd54 10560}
10561
2c281b15 10562/* Emit warning if a free is called with address of a variable. */
10563
10564static void
10565maybe_emit_free_warning (tree exp)
10566{
10567 tree arg = CALL_EXPR_ARG (exp, 0);
10568
10569 STRIP_NOPS (arg);
10570 if (TREE_CODE (arg) != ADDR_EXPR)
10571 return;
10572
10573 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 10574 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 10575 return;
10576
10577 if (SSA_VAR_P (arg))
f74ea1c2 10578 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10579 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 10580 else
f74ea1c2 10581 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10582 "%Kattempt to free a non-heap object", exp);
2c281b15 10583}
10584
c2f47e15 10585/* Fold a call to __builtin_object_size with arguments PTR and OST,
10586 if possible. */
0a39fd54 10587
f7715905 10588static tree
c2f47e15 10589fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 10590{
a6caa15f 10591 unsigned HOST_WIDE_INT bytes;
0a39fd54 10592 int object_size_type;
10593
c2f47e15 10594 if (!validate_arg (ptr, POINTER_TYPE)
10595 || !validate_arg (ost, INTEGER_TYPE))
10596 return NULL_TREE;
0a39fd54 10597
0a39fd54 10598 STRIP_NOPS (ost);
10599
10600 if (TREE_CODE (ost) != INTEGER_CST
10601 || tree_int_cst_sgn (ost) < 0
10602 || compare_tree_int (ost, 3) > 0)
c2f47e15 10603 return NULL_TREE;
0a39fd54 10604
e913b5cd 10605 object_size_type = tree_to_shwi (ost);
0a39fd54 10606
10607 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10608 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10609 and (size_t) 0 for types 2 and 3. */
10610 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 10611 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 10612
10613 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 10614 {
4e91a07b 10615 compute_builtin_object_size (ptr, object_size_type, &bytes);
6da74b21 10616 if (wi::fits_to_tree_p (bytes, size_type_node))
10617 return build_int_cstu (size_type_node, bytes);
a6caa15f 10618 }
0a39fd54 10619 else if (TREE_CODE (ptr) == SSA_NAME)
10620 {
0a39fd54 10621 /* If object size is not known yet, delay folding until
10622 later. Maybe subsequent passes will help determining
10623 it. */
4e91a07b 10624 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10625 && wi::fits_to_tree_p (bytes, size_type_node))
6da74b21 10626 return build_int_cstu (size_type_node, bytes);
0a39fd54 10627 }
10628
a6caa15f 10629 return NULL_TREE;
0a39fd54 10630}
10631
12f08300 10632/* Builtins with folding operations that operate on "..." arguments
10633 need special handling; we need to store the arguments in a convenient
10634 data structure before attempting any folding. Fortunately there are
10635 only a few builtins that fall into this category. FNDECL is the
10636 function, EXP is the CALL_EXPR for the call. */
10637
10638static tree
10639fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10640{
10641 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10642 tree ret = NULL_TREE;
10643
10644 switch (fcode)
10645 {
10646 case BUILT_IN_FPCLASSIFY:
10647 ret = fold_builtin_fpclassify (loc, args, nargs);
10648 break;
10649
10650 default:
10651 break;
10652 }
10653 if (ret)
10654 {
10655 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10656 SET_EXPR_LOCATION (ret, loc);
10657 TREE_NO_WARNING (ret) = 1;
10658 return ret;
10659 }
10660 return NULL_TREE;
10661}
10662
99eabcc1 10663/* Initialize format string characters in the target charset. */
10664
b9ea678c 10665bool
99eabcc1 10666init_target_chars (void)
10667{
10668 static bool init;
10669 if (!init)
10670 {
10671 target_newline = lang_hooks.to_target_charset ('\n');
10672 target_percent = lang_hooks.to_target_charset ('%');
10673 target_c = lang_hooks.to_target_charset ('c');
10674 target_s = lang_hooks.to_target_charset ('s');
10675 if (target_newline == 0 || target_percent == 0 || target_c == 0
10676 || target_s == 0)
10677 return false;
10678
10679 target_percent_c[0] = target_percent;
10680 target_percent_c[1] = target_c;
10681 target_percent_c[2] = '\0';
10682
10683 target_percent_s[0] = target_percent;
10684 target_percent_s[1] = target_s;
10685 target_percent_s[2] = '\0';
10686
10687 target_percent_s_newline[0] = target_percent;
10688 target_percent_s_newline[1] = target_s;
10689 target_percent_s_newline[2] = target_newline;
10690 target_percent_s_newline[3] = '\0';
a0c938f0 10691
99eabcc1 10692 init = true;
10693 }
10694 return true;
10695}
bffb7645 10696
f0c477f2 10697/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10698 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 10699 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 10700 function assumes that you cleared the MPFR flags and then
10701 calculated M to see if anything subsequently set a flag prior to
10702 entering this function. Return NULL_TREE if any checks fail. */
10703
10704static tree
d4473c84 10705do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 10706{
10707 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10708 overflow/underflow occurred. If -frounding-math, proceed iff the
10709 result of calling FUNC was exact. */
d4473c84 10710 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 10711 && (!flag_rounding_math || !inexact))
10712 {
10713 REAL_VALUE_TYPE rr;
10714
66fa16e6 10715 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 10716 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10717 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10718 but the mpft_t is not, then we underflowed in the
10719 conversion. */
776a7bab 10720 if (real_isfinite (&rr)
f0c477f2 10721 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10722 {
10723 REAL_VALUE_TYPE rmode;
10724
10725 real_convert (&rmode, TYPE_MODE (type), &rr);
10726 /* Proceed iff the specified mode can hold the value. */
10727 if (real_identical (&rmode, &rr))
10728 return build_real (type, rmode);
10729 }
10730 }
10731 return NULL_TREE;
10732}
10733
239d491a 10734/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10735 number and no overflow/underflow occurred. INEXACT is true if M
10736 was not exactly calculated. TYPE is the tree type for the result.
10737 This function assumes that you cleared the MPFR flags and then
10738 calculated M to see if anything subsequently set a flag prior to
652d9409 10739 entering this function. Return NULL_TREE if any checks fail, if
10740 FORCE_CONVERT is true, then bypass the checks. */
239d491a 10741
10742static tree
652d9409 10743do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 10744{
10745 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10746 overflow/underflow occurred. If -frounding-math, proceed iff the
10747 result of calling FUNC was exact. */
652d9409 10748 if (force_convert
10749 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10750 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10751 && (!flag_rounding_math || !inexact)))
239d491a 10752 {
10753 REAL_VALUE_TYPE re, im;
10754
b0e7c4d4 10755 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10756 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 10757 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10758 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10759 but the mpft_t is not, then we underflowed in the
10760 conversion. */
652d9409 10761 if (force_convert
10762 || (real_isfinite (&re) && real_isfinite (&im)
10763 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10764 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 10765 {
10766 REAL_VALUE_TYPE re_mode, im_mode;
10767
10768 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10769 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10770 /* Proceed iff the specified mode can hold the value. */
652d9409 10771 if (force_convert
10772 || (real_identical (&re_mode, &re)
10773 && real_identical (&im_mode, &im)))
239d491a 10774 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10775 build_real (TREE_TYPE (type), im_mode));
10776 }
10777 }
10778 return NULL_TREE;
10779}
239d491a 10780
e5407ca6 10781/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10782 the pointer *(ARG_QUO) and return the result. The type is taken
10783 from the type of ARG0 and is used for setting the precision of the
10784 calculation and results. */
10785
10786static tree
10787do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10788{
10789 tree const type = TREE_TYPE (arg0);
10790 tree result = NULL_TREE;
48e1416a 10791
e5407ca6 10792 STRIP_NOPS (arg0);
10793 STRIP_NOPS (arg1);
48e1416a 10794
e5407ca6 10795 /* To proceed, MPFR must exactly represent the target floating point
10796 format, which only happens when the target base equals two. */
10797 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10798 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10799 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10800 {
10801 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10802 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10803
776a7bab 10804 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 10805 {
e2eb2b7f 10806 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10807 const int prec = fmt->p;
10808 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 10809 tree result_rem;
10810 long integer_quo;
10811 mpfr_t m0, m1;
10812
10813 mpfr_inits2 (prec, m0, m1, NULL);
10814 mpfr_from_real (m0, ra0, GMP_RNDN);
10815 mpfr_from_real (m1, ra1, GMP_RNDN);
10816 mpfr_clear_flags ();
e2eb2b7f 10817 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 10818 /* Remquo is independent of the rounding mode, so pass
10819 inexact=0 to do_mpfr_ckconv(). */
10820 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10821 mpfr_clears (m0, m1, NULL);
10822 if (result_rem)
10823 {
10824 /* MPFR calculates quo in the host's long so it may
10825 return more bits in quo than the target int can hold
10826 if sizeof(host long) > sizeof(target int). This can
10827 happen even for native compilers in LP64 mode. In
10828 these cases, modulo the quo value with the largest
10829 number that the target int can hold while leaving one
10830 bit for the sign. */
10831 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10832 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10833
10834 /* Dereference the quo pointer argument. */
10835 arg_quo = build_fold_indirect_ref (arg_quo);
10836 /* Proceed iff a valid pointer type was passed in. */
10837 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10838 {
10839 /* Set the value. */
7002a1c8 10840 tree result_quo
10841 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10842 build_int_cst (TREE_TYPE (arg_quo),
10843 integer_quo));
e5407ca6 10844 TREE_SIDE_EFFECTS (result_quo) = 1;
10845 /* Combine the quo assignment with the rem. */
10846 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10847 result_quo, result_rem));
10848 }
10849 }
10850 }
10851 }
10852 return result;
10853}
e84da7c1 10854
10855/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10856 resulting value as a tree with type TYPE. The mpfr precision is
10857 set to the precision of TYPE. We assume that this mpfr function
10858 returns zero if the result could be calculated exactly within the
10859 requested precision. In addition, the integer pointer represented
10860 by ARG_SG will be dereferenced and set to the appropriate signgam
10861 (-1,1) value. */
10862
10863static tree
10864do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10865{
10866 tree result = NULL_TREE;
10867
10868 STRIP_NOPS (arg);
48e1416a 10869
e84da7c1 10870 /* To proceed, MPFR must exactly represent the target floating point
10871 format, which only happens when the target base equals two. Also
10872 verify ARG is a constant and that ARG_SG is an int pointer. */
10873 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10874 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10875 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10876 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10877 {
10878 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10879
10880 /* In addition to NaN and Inf, the argument cannot be zero or a
10881 negative integer. */
776a7bab 10882 if (real_isfinite (ra)
e84da7c1 10883 && ra->cl != rvc_zero
9af5ce0c 10884 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 10885 {
e2eb2b7f 10886 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10887 const int prec = fmt->p;
10888 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 10889 int inexact, sg;
10890 mpfr_t m;
10891 tree result_lg;
10892
10893 mpfr_init2 (m, prec);
10894 mpfr_from_real (m, ra, GMP_RNDN);
10895 mpfr_clear_flags ();
e2eb2b7f 10896 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 10897 result_lg = do_mpfr_ckconv (m, type, inexact);
10898 mpfr_clear (m);
10899 if (result_lg)
10900 {
10901 tree result_sg;
10902
10903 /* Dereference the arg_sg pointer argument. */
10904 arg_sg = build_fold_indirect_ref (arg_sg);
10905 /* Assign the signgam value into *arg_sg. */
10906 result_sg = fold_build2 (MODIFY_EXPR,
10907 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 10908 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 10909 TREE_SIDE_EFFECTS (result_sg) = 1;
10910 /* Combine the signgam assignment with the lgamma result. */
10911 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10912 result_sg, result_lg));
10913 }
10914 }
10915 }
10916
10917 return result;
10918}
75a70cf9 10919
c699fab8 10920/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10921 mpc function FUNC on it and return the resulting value as a tree
10922 with type TYPE. The mpfr precision is set to the precision of
10923 TYPE. We assume that function FUNC returns zero if the result
652d9409 10924 could be calculated exactly within the requested precision. If
10925 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10926 in the arguments and/or results. */
c699fab8 10927
63e89698 10928tree
652d9409 10929do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 10930 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10931{
10932 tree result = NULL_TREE;
48e1416a 10933
c699fab8 10934 STRIP_NOPS (arg0);
10935 STRIP_NOPS (arg1);
10936
10937 /* To proceed, MPFR must exactly represent the target floating point
10938 format, which only happens when the target base equals two. */
10939 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10940 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10941 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10942 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10943 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10944 {
10945 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10946 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10947 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10948 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10949
652d9409 10950 if (do_nonfinite
10951 || (real_isfinite (re0) && real_isfinite (im0)
10952 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 10953 {
10954 const struct real_format *const fmt =
10955 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10956 const int prec = fmt->p;
10957 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10958 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10959 int inexact;
10960 mpc_t m0, m1;
48e1416a 10961
c699fab8 10962 mpc_init2 (m0, prec);
10963 mpc_init2 (m1, prec);
9af5ce0c 10964 mpfr_from_real (mpc_realref (m0), re0, rnd);
10965 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10966 mpfr_from_real (mpc_realref (m1), re1, rnd);
10967 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 10968 mpfr_clear_flags ();
10969 inexact = func (m0, m0, m1, crnd);
652d9409 10970 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 10971 mpc_clear (m0);
10972 mpc_clear (m1);
10973 }
10974 }
10975
10976 return result;
10977}
239d491a 10978
75a70cf9 10979/* A wrapper function for builtin folding that prevents warnings for
10980 "statement without effect" and the like, caused by removing the
10981 call node earlier than the warning is generated. */
10982
10983tree
1a91d914 10984fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 10985{
10986 tree ret = NULL_TREE;
10987 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 10988 location_t loc = gimple_location (stmt);
a0e9bfbb 10989 if (fndecl && fndecl_built_in_p (fndecl)
75a70cf9 10990 && !gimple_call_va_arg_pack_p (stmt))
10991 {
10992 int nargs = gimple_call_num_args (stmt);
9845fb99 10993 tree *args = (nargs > 0
10994 ? gimple_call_arg_ptr (stmt, 0)
10995 : &error_mark_node);
75a70cf9 10996
198622c0 10997 if (avoid_folding_inline_builtin (fndecl))
10998 return NULL_TREE;
75a70cf9 10999 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11000 {
9845fb99 11001 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 11002 }
11003 else
11004 {
9d884767 11005 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 11006 if (ret)
11007 {
11008 /* Propagate location information from original call to
11009 expansion of builtin. Otherwise things like
11010 maybe_emit_chk_warning, that operate on the expansion
11011 of a builtin, will use the wrong location information. */
11012 if (gimple_has_location (stmt))
11013 {
11014 tree realret = ret;
11015 if (TREE_CODE (ret) == NOP_EXPR)
11016 realret = TREE_OPERAND (ret, 0);
11017 if (CAN_HAVE_LOCATION_P (realret)
11018 && !EXPR_HAS_LOCATION (realret))
389dd41b 11019 SET_EXPR_LOCATION (realret, loc);
75a70cf9 11020 return realret;
11021 }
11022 return ret;
11023 }
11024 }
11025 }
11026 return NULL_TREE;
11027}
7bfefa9d 11028
b9a16870 11029/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 11030 and set ASMSPEC as its user assembler name. DECL must be a
11031 function decl that declares a builtin. */
11032
11033void
11034set_builtin_user_assembler_name (tree decl, const char *asmspec)
11035{
a0e9bfbb 11036 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
7bfefa9d 11037 && asmspec != 0);
11038
61ffc71a 11039 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 11040 set_user_assembler_name (builtin, asmspec);
61ffc71a 11041
11042 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11043 && INT_TYPE_SIZE < BITS_PER_WORD)
7bfefa9d 11044 {
44504d18 11045 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
61ffc71a 11046 set_user_assembler_libfunc ("ffs", asmspec);
44504d18 11047 set_optab_libfunc (ffs_optab, mode, "ffs");
7bfefa9d 11048 }
11049}
a6b74a67 11050
11051/* Return true if DECL is a builtin that expands to a constant or similarly
11052 simple code. */
11053bool
11054is_simple_builtin (tree decl)
11055{
a0e9bfbb 11056 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
a6b74a67 11057 switch (DECL_FUNCTION_CODE (decl))
11058 {
11059 /* Builtins that expand to constants. */
11060 case BUILT_IN_CONSTANT_P:
11061 case BUILT_IN_EXPECT:
11062 case BUILT_IN_OBJECT_SIZE:
11063 case BUILT_IN_UNREACHABLE:
11064 /* Simple register moves or loads from stack. */
fca0886c 11065 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 11066 case BUILT_IN_RETURN_ADDRESS:
11067 case BUILT_IN_EXTRACT_RETURN_ADDR:
11068 case BUILT_IN_FROB_RETURN_ADDR:
11069 case BUILT_IN_RETURN:
11070 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11071 case BUILT_IN_FRAME_ADDRESS:
11072 case BUILT_IN_VA_END:
11073 case BUILT_IN_STACK_SAVE:
11074 case BUILT_IN_STACK_RESTORE:
11075 /* Exception state returns or moves registers around. */
11076 case BUILT_IN_EH_FILTER:
11077 case BUILT_IN_EH_POINTER:
11078 case BUILT_IN_EH_COPY_VALUES:
11079 return true;
11080
11081 default:
11082 return false;
11083 }
11084
11085 return false;
11086}
11087
11088/* Return true if DECL is a builtin that is not expensive, i.e., they are
11089 most probably expanded inline into reasonably simple code. This is a
11090 superset of is_simple_builtin. */
11091bool
11092is_inexpensive_builtin (tree decl)
11093{
11094 if (!decl)
11095 return false;
11096 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11097 return true;
11098 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11099 switch (DECL_FUNCTION_CODE (decl))
11100 {
11101 case BUILT_IN_ABS:
2b34677f 11102 CASE_BUILT_IN_ALLOCA:
74bdbe96 11103 case BUILT_IN_BSWAP16:
a6b74a67 11104 case BUILT_IN_BSWAP32:
11105 case BUILT_IN_BSWAP64:
11106 case BUILT_IN_CLZ:
11107 case BUILT_IN_CLZIMAX:
11108 case BUILT_IN_CLZL:
11109 case BUILT_IN_CLZLL:
11110 case BUILT_IN_CTZ:
11111 case BUILT_IN_CTZIMAX:
11112 case BUILT_IN_CTZL:
11113 case BUILT_IN_CTZLL:
11114 case BUILT_IN_FFS:
11115 case BUILT_IN_FFSIMAX:
11116 case BUILT_IN_FFSL:
11117 case BUILT_IN_FFSLL:
11118 case BUILT_IN_IMAXABS:
11119 case BUILT_IN_FINITE:
11120 case BUILT_IN_FINITEF:
11121 case BUILT_IN_FINITEL:
11122 case BUILT_IN_FINITED32:
11123 case BUILT_IN_FINITED64:
11124 case BUILT_IN_FINITED128:
11125 case BUILT_IN_FPCLASSIFY:
11126 case BUILT_IN_ISFINITE:
11127 case BUILT_IN_ISINF_SIGN:
11128 case BUILT_IN_ISINF:
11129 case BUILT_IN_ISINFF:
11130 case BUILT_IN_ISINFL:
11131 case BUILT_IN_ISINFD32:
11132 case BUILT_IN_ISINFD64:
11133 case BUILT_IN_ISINFD128:
11134 case BUILT_IN_ISNAN:
11135 case BUILT_IN_ISNANF:
11136 case BUILT_IN_ISNANL:
11137 case BUILT_IN_ISNAND32:
11138 case BUILT_IN_ISNAND64:
11139 case BUILT_IN_ISNAND128:
11140 case BUILT_IN_ISNORMAL:
11141 case BUILT_IN_ISGREATER:
11142 case BUILT_IN_ISGREATEREQUAL:
11143 case BUILT_IN_ISLESS:
11144 case BUILT_IN_ISLESSEQUAL:
11145 case BUILT_IN_ISLESSGREATER:
11146 case BUILT_IN_ISUNORDERED:
11147 case BUILT_IN_VA_ARG_PACK:
11148 case BUILT_IN_VA_ARG_PACK_LEN:
11149 case BUILT_IN_VA_COPY:
11150 case BUILT_IN_TRAP:
11151 case BUILT_IN_SAVEREGS:
11152 case BUILT_IN_POPCOUNTL:
11153 case BUILT_IN_POPCOUNTLL:
11154 case BUILT_IN_POPCOUNTIMAX:
11155 case BUILT_IN_POPCOUNT:
11156 case BUILT_IN_PARITYL:
11157 case BUILT_IN_PARITYLL:
11158 case BUILT_IN_PARITYIMAX:
11159 case BUILT_IN_PARITY:
11160 case BUILT_IN_LABS:
11161 case BUILT_IN_LLABS:
11162 case BUILT_IN_PREFETCH:
ca4c3545 11163 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 11164 return true;
11165
11166 default:
11167 return is_simple_builtin (decl);
11168 }
11169
11170 return false;
11171}
507a998e 11172
11173/* Return true if T is a constant and the value cast to a target char
11174 can be represented by a host char.
11175 Store the casted char constant in *P if so. */
11176
11177bool
11178target_char_cst_p (tree t, char *p)
11179{
11180 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11181 return false;
11182
11183 *p = (char)tree_to_uhwi (t);
11184 return true;
11185}
e6a18b5a 11186
11187/* Return the maximum object size. */
11188
11189tree
11190max_object_size (void)
11191{
11192 /* To do: Make this a configurable parameter. */
11193 return TYPE_MAX_VALUE (ptrdiff_type_node);
11194}