]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
rs6000: re-enable web and rnreg with -funroll-loops
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
28f4ec01 1/* Expand builtin functions.
a5544970 2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
28f4ec01 3
1322177d 4This file is part of GCC.
28f4ec01 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
28f4ec01 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
28f4ec01
BS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
28f4ec01 19
25ab3b0a
RB
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
28f4ec01
BS
24#include "config.h"
25#include "system.h"
4977bab6 26#include "coretypes.h"
c7131fb2 27#include "backend.h"
957060b5
AM
28#include "target.h"
29#include "rtl.h"
c7131fb2 30#include "tree.h"
e73cf9a2 31#include "memmodel.h"
c7131fb2 32#include "gimple.h"
957060b5
AM
33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
f90aa46c 36#include "tree-vrp.h"
957060b5
AM
37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
957060b5
AM
40#include "emit-rtl.h"
41#include "recog.h"
957060b5 42#include "diagnostic-core.h"
40e23961 43#include "alias.h"
40e23961 44#include "fold-const.h"
5c1a2e63 45#include "fold-const-call.h"
cc8bea0a 46#include "gimple-ssa-warn-restrict.h"
d8a2d370
DN
47#include "stor-layout.h"
48#include "calls.h"
49#include "varasm.h"
50#include "tree-object-size.h"
ef29b12c 51#include "tree-ssa-strlen.h"
d49b6e1e 52#include "realmpfr.h"
60393bbc 53#include "cfgrtl.h"
28f4ec01 54#include "except.h"
36566b39
PK
55#include "dojump.h"
56#include "explow.h"
36566b39 57#include "stmt.h"
28f4ec01 58#include "expr.h"
e78d8e51 59#include "libfuncs.h"
28f4ec01
BS
60#include "output.h"
61#include "typeclass.h"
ab393bf1 62#include "langhooks.h"
079a182e 63#include "value-prof.h"
fa19795e 64#include "builtins.h"
314e6352
ML
65#include "stringpool.h"
66#include "attribs.h"
bdea98ca 67#include "asan.h"
686ee971 68#include "internal-fn.h"
b03ff92e 69#include "case-cfn-macros.h"
44a845ca 70#include "gimple-fold.h"
ee92e7ba 71#include "intl.h"
7365279f 72#include "file-prefix-map.h" /* remap_macro_filename() */
1f62d637
TV
73#include "gomp-constants.h"
74#include "omp-general.h"
464969eb 75#include "tree-dfa.h"
81f5094d 76
fa19795e
RS
77struct target_builtins default_target_builtins;
78#if SWITCHABLE_TARGET
79struct target_builtins *this_target_builtins = &default_target_builtins;
80#endif
81
9df2c88c 82/* Define the names of the builtin function types and codes. */
5e351e96 83const char *const built_in_class_names[BUILT_IN_LAST]
9df2c88c
RK
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85
c6a912da 86#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
7e7e470f 87const char * built_in_names[(int) END_BUILTINS] =
cb1072f4
KG
88{
89#include "builtins.def"
90};
9df2c88c 91
cbf5d0e7 92/* Setup an array of builtin_info_type, make sure each element decl is
3ff5f682 93 initialized to NULL_TREE. */
cbf5d0e7 94builtin_info_type builtin_info[(int)END_BUILTINS];
3ff5f682 95
4e7d7b3d
JJ
96/* Non-zero if __builtin_constant_p should be folded right away. */
97bool force_folding_builtin_constant_p;
98
4682ae04 99static int target_char_cast (tree, char *);
435bb2a1 100static rtx get_memory_rtx (tree, tree);
4682ae04
AJ
101static int apply_args_size (void);
102static int apply_result_size (void);
4682ae04 103static rtx result_vector (int, rtx);
4682ae04
AJ
104static void expand_builtin_prefetch (tree);
105static rtx expand_builtin_apply_args (void);
106static rtx expand_builtin_apply_args_1 (void);
107static rtx expand_builtin_apply (rtx, rtx, rtx);
108static void expand_builtin_return (rtx);
109static enum type_class type_to_class (tree);
110static rtx expand_builtin_classify_type (tree);
6c7cf1f0 111static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
1b1562a5 112static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
4359dc2a 113static rtx expand_builtin_interclass_mathfn (tree, rtx);
403e54f0 114static rtx expand_builtin_sincos (tree);
4359dc2a 115static rtx expand_builtin_cexpi (tree, rtx);
1856c8dc
JH
116static rtx expand_builtin_int_roundingfn (tree, rtx);
117static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
8870e212 118static rtx expand_builtin_next_arg (void);
4682ae04
AJ
119static rtx expand_builtin_va_start (tree);
120static rtx expand_builtin_va_end (tree);
121static rtx expand_builtin_va_copy (tree);
523a59ff 122static rtx inline_expand_builtin_string_cmp (tree, rtx);
44e10129 123static rtx expand_builtin_strcmp (tree, rtx);
ef4bddc2 124static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
095a2d76 125static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
d9c5a8b9 126static rtx expand_builtin_memchr (tree, rtx);
44e10129 127static rtx expand_builtin_memcpy (tree, rtx);
671a00ee 128static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
2ff5ffb6 129 rtx target, tree exp,
03a9b90a
AS
130 memop_ret retmode,
131 bool might_overlap);
e50d56a5 132static rtx expand_builtin_memmove (tree, rtx);
671a00ee 133static rtx expand_builtin_mempcpy (tree, rtx);
2ff5ffb6 134static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
b5338fb3 135static rtx expand_builtin_strcat (tree);
44e10129 136static rtx expand_builtin_strcpy (tree, rtx);
e08341bb 137static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
ef4bddc2 138static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
e50d56a5 139static rtx expand_builtin_stpncpy (tree, rtx);
ee92e7ba 140static rtx expand_builtin_strncat (tree, rtx);
44e10129 141static rtx expand_builtin_strncpy (tree, rtx);
095a2d76 142static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
ef4bddc2
RS
143static rtx expand_builtin_memset (tree, rtx, machine_mode);
144static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
4682ae04 145static rtx expand_builtin_bzero (tree);
ef4bddc2 146static rtx expand_builtin_strlen (tree, rtx, machine_mode);
781ff3d8 147static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
b7e52782 148static rtx expand_builtin_alloca (tree);
ef4bddc2 149static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
4682ae04 150static rtx expand_builtin_frame_address (tree, tree);
db3927fb 151static tree stabilize_va_list_loc (location_t, tree, int);
4682ae04 152static rtx expand_builtin_expect (tree, rtx);
1e9168b2 153static rtx expand_builtin_expect_with_probability (tree, rtx);
4682ae04
AJ
154static tree fold_builtin_constant_p (tree);
155static tree fold_builtin_classify_type (tree);
ab996409 156static tree fold_builtin_strlen (location_t, tree, tree);
db3927fb 157static tree fold_builtin_inf (location_t, tree, int);
db3927fb 158static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
0dba7960 159static bool validate_arg (const_tree, enum tree_code code);
4682ae04 160static rtx expand_builtin_fabs (tree, rtx, rtx);
ef79730c 161static rtx expand_builtin_signbit (tree, rtx);
db3927fb 162static tree fold_builtin_memcmp (location_t, tree, tree, tree);
db3927fb
AH
163static tree fold_builtin_isascii (location_t, tree);
164static tree fold_builtin_toascii (location_t, tree);
165static tree fold_builtin_isdigit (location_t, tree);
166static tree fold_builtin_fabs (location_t, tree, tree);
167static tree fold_builtin_abs (location_t, tree, tree);
168static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
a35da91f 169 enum tree_code);
903c723b 170static tree fold_builtin_varargs (location_t, tree, tree*, int);
db3927fb 171
b5338fb3
MS
172static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
173static tree fold_builtin_strspn (location_t, tree, tree, tree);
174static tree fold_builtin_strcspn (location_t, tree, tree, tree);
6de9cd9a 175
10a0d495 176static rtx expand_builtin_object_size (tree);
ef4bddc2 177static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
10a0d495
JJ
178 enum built_in_function);
179static void maybe_emit_chk_warning (tree, enum built_in_function);
180static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
f9555f40 181static void maybe_emit_free_warning (tree);
5039610b 182static tree fold_builtin_object_size (tree, tree);
000ba23d 183
ad03a744 184unsigned HOST_WIDE_INT target_newline;
fef5a0d9 185unsigned HOST_WIDE_INT target_percent;
000ba23d
KG
186static unsigned HOST_WIDE_INT target_c;
187static unsigned HOST_WIDE_INT target_s;
edd7ae68 188char target_percent_c[3];
fef5a0d9 189char target_percent_s[3];
ad03a744 190char target_percent_s_newline[4];
ea91f957 191static tree do_mpfr_remquo (tree, tree, tree);
752b7d38 192static tree do_mpfr_lgamma_r (tree, tree, tree);
86951993 193static void expand_builtin_sync_synchronize (void);
10a0d495 194
d7f09764
DN
195/* Return true if NAME starts with __builtin_ or __sync_. */
196
0c1e7e42 197static bool
bbf7ce11 198is_builtin_name (const char *name)
48ae6c13 199{
48ae6c13
RH
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
86951993
AM
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
48ae6c13
RH
206 return false;
207}
6de9cd9a 208
bbf7ce11
RAE
209/* Return true if NODE should be considered for inline expansion regardless
210 of the optimization level. This means whenever a function is invoked with
211 its "internal" name, which normally contains the prefix "__builtin". */
212
4cfe7a6c 213bool
bbf7ce11
RAE
214called_as_built_in (tree node)
215{
216 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
217 we want the name used to call the function, not the name it
218 will have. */
219 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
220 return is_builtin_name (name);
221}
222
644ffefd
MJ
223/* Compute values M and N such that M divides (address of EXP - N) and such
224 that N < M. If these numbers can be determined, store M in alignp and N in
225 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
226 *alignp and any bit-offset to *bitposp.
73f6eabc
RS
227
228 Note that the address (and thus the alignment) computed here is based
229 on the address to which a symbol resolves, whereas DECL_ALIGN is based
230 on the address at which an object is actually located. These two
231 addresses are not always the same. For example, on ARM targets,
232 the address &foo of a Thumb function foo() has the lowest bit set,
b0f4a35f 233 whereas foo() itself starts on an even address.
df96b059 234
b0f4a35f
RG
235 If ADDR_P is true we are taking the address of the memory reference EXP
236 and thus cannot rely on the access taking place. */
237
238static bool
239get_object_alignment_2 (tree exp, unsigned int *alignp,
240 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
df96b059 241{
7df9b6f1 242 poly_int64 bitsize, bitpos;
e80c2726 243 tree offset;
ef4bddc2 244 machine_mode mode;
ee45a32d 245 int unsignedp, reversep, volatilep;
eae76e53 246 unsigned int align = BITS_PER_UNIT;
644ffefd 247 bool known_alignment = false;
df96b059 248
e80c2726
RG
249 /* Get the innermost object and the constant (bitpos) and possibly
250 variable (offset) offset of the access. */
ee45a32d 251 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
25b75a48 252 &unsignedp, &reversep, &volatilep);
e80c2726
RG
253
254 /* Extract alignment information from the innermost object and
255 possibly adjust bitpos and offset. */
b0f4a35f 256 if (TREE_CODE (exp) == FUNCTION_DECL)
73f6eabc 257 {
b0f4a35f
RG
258 /* Function addresses can encode extra information besides their
259 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
260 allows the low bit to be used as a virtual bit, we know
261 that the address itself must be at least 2-byte aligned. */
262 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
263 align = 2 * BITS_PER_UNIT;
73f6eabc 264 }
b0f4a35f
RG
265 else if (TREE_CODE (exp) == LABEL_DECL)
266 ;
267 else if (TREE_CODE (exp) == CONST_DECL)
e80c2726 268 {
b0f4a35f
RG
269 /* The alignment of a CONST_DECL is determined by its initializer. */
270 exp = DECL_INITIAL (exp);
e80c2726 271 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 272 if (CONSTANT_CLASS_P (exp))
58e17cf8 273 align = targetm.constant_alignment (exp, align);
6b00e42d 274
b0f4a35f 275 known_alignment = true;
e80c2726 276 }
b0f4a35f 277 else if (DECL_P (exp))
644ffefd 278 {
b0f4a35f 279 align = DECL_ALIGN (exp);
644ffefd 280 known_alignment = true;
644ffefd 281 }
b0f4a35f
RG
282 else if (TREE_CODE (exp) == INDIRECT_REF
283 || TREE_CODE (exp) == MEM_REF
284 || TREE_CODE (exp) == TARGET_MEM_REF)
e80c2726
RG
285 {
286 tree addr = TREE_OPERAND (exp, 0);
644ffefd
MJ
287 unsigned ptr_align;
288 unsigned HOST_WIDE_INT ptr_bitpos;
4ceae7e9 289 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
644ffefd 290
4ceae7e9 291 /* If the address is explicitely aligned, handle that. */
e80c2726
RG
292 if (TREE_CODE (addr) == BIT_AND_EXPR
293 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
294 {
4ceae7e9
RB
295 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
296 ptr_bitmask *= BITS_PER_UNIT;
146ec50f 297 align = least_bit_hwi (ptr_bitmask);
e80c2726
RG
298 addr = TREE_OPERAND (addr, 0);
299 }
644ffefd 300
b0f4a35f
RG
301 known_alignment
302 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
b0f4a35f
RG
303 align = MAX (ptr_align, align);
304
4ceae7e9
RB
305 /* Re-apply explicit alignment to the bitpos. */
306 ptr_bitpos &= ptr_bitmask;
307
3c82efd9
RG
308 /* The alignment of the pointer operand in a TARGET_MEM_REF
309 has to take the variable offset parts into account. */
b0f4a35f 310 if (TREE_CODE (exp) == TARGET_MEM_REF)
1be38ccb 311 {
b0f4a35f
RG
312 if (TMR_INDEX (exp))
313 {
314 unsigned HOST_WIDE_INT step = 1;
315 if (TMR_STEP (exp))
316 step = TREE_INT_CST_LOW (TMR_STEP (exp));
146ec50f 317 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
b0f4a35f
RG
318 }
319 if (TMR_INDEX2 (exp))
320 align = BITS_PER_UNIT;
321 known_alignment = false;
1be38ccb 322 }
644ffefd 323
b0f4a35f
RG
324 /* When EXP is an actual memory reference then we can use
325 TYPE_ALIGN of a pointer indirection to derive alignment.
326 Do so only if get_pointer_alignment_1 did not reveal absolute
3c82efd9
RG
327 alignment knowledge and if using that alignment would
328 improve the situation. */
a4cf4b64 329 unsigned int talign;
3c82efd9 330 if (!addr_p && !known_alignment
a4cf4b64
RB
331 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
332 && talign > align)
333 align = talign;
3c82efd9
RG
334 else
335 {
336 /* Else adjust bitpos accordingly. */
337 bitpos += ptr_bitpos;
338 if (TREE_CODE (exp) == MEM_REF
339 || TREE_CODE (exp) == TARGET_MEM_REF)
aca52e6f 340 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
3c82efd9 341 }
e80c2726 342 }
b0f4a35f 343 else if (TREE_CODE (exp) == STRING_CST)
1be38ccb 344 {
b0f4a35f
RG
345 /* STRING_CST are the only constant objects we allow to be not
346 wrapped inside a CONST_DECL. */
347 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 348 if (CONSTANT_CLASS_P (exp))
58e17cf8 349 align = targetm.constant_alignment (exp, align);
6b00e42d 350
b0f4a35f 351 known_alignment = true;
e80c2726 352 }
e80c2726
RG
353
354 /* If there is a non-constant offset part extract the maximum
355 alignment that can prevail. */
eae76e53 356 if (offset)
e80c2726 357 {
e75fde1a 358 unsigned int trailing_zeros = tree_ctz (offset);
eae76e53 359 if (trailing_zeros < HOST_BITS_PER_INT)
e80c2726 360 {
eae76e53
JJ
361 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
362 if (inner)
363 align = MIN (align, inner);
e80c2726 364 }
e80c2726
RG
365 }
366
7df9b6f1
RS
367 /* Account for the alignment of runtime coefficients, so that the constant
368 bitpos is guaranteed to be accurate. */
369 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
370 if (alt_align != 0 && alt_align < align)
371 {
372 align = alt_align;
373 known_alignment = false;
374 }
375
b0f4a35f 376 *alignp = align;
7df9b6f1 377 *bitposp = bitpos.coeffs[0] & (align - 1);
644ffefd 378 return known_alignment;
daade206
RG
379}
380
b0f4a35f
RG
381/* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385
386bool
387get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
389{
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
391}
392
0eb77834 393/* Return the alignment in bits of EXP, an object. */
daade206
RG
394
395unsigned int
0eb77834 396get_object_alignment (tree exp)
daade206
RG
397{
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
400
644ffefd 401 get_object_alignment_1 (exp, &align, &bitpos);
daade206 402
e80c2726
RG
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
405
406 if (bitpos != 0)
146ec50f 407 align = least_bit_hwi (bitpos);
0eb77834 408 return align;
df96b059
JJ
409}
410
644ffefd
MJ
411/* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
b0f4a35f
RG
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
28f4ec01 415
644ffefd 416 If EXP is not a pointer, false is returned too. */
28f4ec01 417
644ffefd
MJ
418bool
419get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
28f4ec01 421{
1be38ccb 422 STRIP_NOPS (exp);
6026b73e 423
1be38ccb 424 if (TREE_CODE (exp) == ADDR_EXPR)
b0f4a35f
RG
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
5fa79de8
RB
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 {
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
436 {
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
439 {
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
443 }
444 }
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
448 }
1be38ccb
RG
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
28f4ec01 451 {
644ffefd 452 unsigned int ptr_align, ptr_misalign;
1be38ccb 453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
644ffefd
MJ
454
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 {
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
5505978a
RB
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
b0f4a35f 463 /* We cannot really tell whether this result is an approximation. */
5f9a167b 464 return false;
644ffefd
MJ
465 }
466 else
87c0fb4b
RG
467 {
468 *bitposp = 0;
644ffefd
MJ
469 *alignp = BITS_PER_UNIT;
470 return false;
87c0fb4b 471 }
28f4ec01 472 }
44fabee4
RG
473 else if (TREE_CODE (exp) == INTEGER_CST)
474 {
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
479 }
1be38ccb 480
87c0fb4b 481 *bitposp = 0;
644ffefd
MJ
482 *alignp = BITS_PER_UNIT;
483 return false;
28f4ec01
BS
484}
485
87c0fb4b
RG
486/* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
492
493unsigned int
494get_pointer_alignment (tree exp)
495{
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
644ffefd
MJ
498
499 get_pointer_alignment_1 (exp, &align, &bitpos);
87c0fb4b
RG
500
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
503
504 if (bitpos != 0)
146ec50f 505 align = least_bit_hwi (bitpos);
87c0fb4b
RG
506
507 return align;
508}
509
bfb9bd47 510/* Return the number of leading non-zero elements in the sequence
1eb4547b
MS
511 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
512 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
513
bfb9bd47 514unsigned
1eb4547b
MS
515string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
516{
517 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
518
519 unsigned n;
520
521 if (eltsize == 1)
522 {
523 /* Optimize the common case of plain char. */
524 for (n = 0; n < maxelts; n++)
525 {
526 const char *elt = (const char*) ptr + n;
527 if (!*elt)
528 break;
529 }
530 }
531 else
532 {
533 for (n = 0; n < maxelts; n++)
534 {
535 const char *elt = (const char*) ptr + n * eltsize;
536 if (!memcmp (elt, "\0\0\0\0", eltsize))
537 break;
538 }
539 }
540 return n;
541}
542
6ab24ea8
MS
543/* For a call at LOC to a function FN that expects a string in the argument
544 ARG, issue a diagnostic due to it being a called with an argument
545 declared at NONSTR that is a character array with no terminating NUL. */
546
547void
548warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
549{
550 if (TREE_NO_WARNING (arg))
551 return;
552
553 loc = expansion_point_location_if_in_system_header (loc);
554
555 if (warning_at (loc, OPT_Wstringop_overflow_,
556 "%qs argument missing terminating nul", fn))
557 {
558 inform (DECL_SOURCE_LOCATION (decl),
559 "referenced argument declared here");
560 TREE_NO_WARNING (arg) = 1;
561 }
562}
563
b5338fb3
MS
564/* For a call EXPR (which may be null) that expects a string argument
565 and SRC as the argument, returns false if SRC is a character array
566 with no terminating NUL. When nonnull, BOUND is the number of
567 characters in which to expect the terminating NUL.
568 When EXPR is nonnull also issues a warning. */
569
570bool
571check_nul_terminated_array (tree expr, tree src, tree bound /* = NULL_TREE */)
572{
573 tree size;
574 bool exact;
575 tree nonstr = unterminated_array (src, &size, &exact);
576 if (!nonstr)
577 return true;
578
579 /* NONSTR refers to the non-nul terminated constant array and SIZE
580 is the constant size of the array in bytes. EXACT is true when
581 SIZE is exact. */
582
583 if (bound)
584 {
585 wide_int min, max;
586 if (TREE_CODE (bound) == INTEGER_CST)
587 min = max = wi::to_wide (bound);
588 else
589 {
590 value_range_kind rng = get_range_info (bound, &min, &max);
591 if (rng != VR_RANGE)
592 return true;
593 }
594
595 if (wi::leu_p (min, wi::to_wide (size)))
596 return true;
597 }
598
599 if (expr && !TREE_NO_WARNING (expr))
600 {
601 tree fndecl = get_callee_fndecl (expr);
602 const char *fname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
603 warn_string_no_nul (EXPR_LOCATION (expr), fname, src, nonstr);
604 }
605
606 return false;
607}
608
e08341bb
MS
609/* If EXP refers to an unterminated constant character array return
610 the declaration of the object of which the array is a member or
6c4aa5f6
MS
611 element and if SIZE is not null, set *SIZE to the size of
612 the unterminated array and set *EXACT if the size is exact or
613 clear it otherwise. Otherwise return null. */
e08341bb 614
01b0acb7 615tree
6c4aa5f6 616unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
e08341bb 617{
6c4aa5f6
MS
618 /* C_STRLEN will return NULL and set DECL in the info
619 structure if EXP references a unterminated array. */
e09aa5bd
MS
620 c_strlen_data lendata = { };
621 tree len = c_strlen (exp, 1, &lendata);
b71bbbe2 622 if (len == NULL_TREE && lendata.minlen && lendata.decl)
6c4aa5f6
MS
623 {
624 if (size)
625 {
b71bbbe2 626 len = lendata.minlen;
e09aa5bd 627 if (lendata.off)
6c4aa5f6 628 {
e09aa5bd
MS
629 /* Constant offsets are already accounted for in LENDATA.MINLEN,
630 but not in a SSA_NAME + CST expression. */
631 if (TREE_CODE (lendata.off) == INTEGER_CST)
6c4aa5f6 632 *exact = true;
e09aa5bd
MS
633 else if (TREE_CODE (lendata.off) == PLUS_EXPR
634 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
6c4aa5f6
MS
635 {
636 /* Subtract the offset from the size of the array. */
637 *exact = false;
e09aa5bd 638 tree temp = TREE_OPERAND (lendata.off, 1);
6c4aa5f6
MS
639 temp = fold_convert (ssizetype, temp);
640 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
641 }
642 else
643 *exact = false;
644 }
645 else
646 *exact = true;
647
648 *size = len;
649 }
e09aa5bd 650 return lendata.decl;
6c4aa5f6
MS
651 }
652
653 return NULL_TREE;
e08341bb
MS
654}
655
1eb4547b
MS
656/* Compute the length of a null-terminated character string or wide
657 character string handling character sizes of 1, 2, and 4 bytes.
658 TREE_STRING_LENGTH is not the right way because it evaluates to
659 the size of the character array in bytes (as opposed to characters)
660 and because it can contain a zero byte in the middle.
28f4ec01 661
f1ba665b 662 ONLY_VALUE should be nonzero if the result is not going to be emitted
88373ed0 663 into the instruction stream and zero if it is going to be expanded.
f1ba665b 664 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
ae808627 665 is returned, otherwise NULL, since
14b7950f 666 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
ae808627
JJ
667 evaluate the side-effects.
668
21e8fb22
RB
669 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
670 accesses. Note that this implies the result is not going to be emitted
671 into the instruction stream.
672
7d583f42 673 Additional information about the string accessed may be recorded
14b7950f 674 in DATA. For example, if ARG references an unterminated string,
7d583f42
JL
675 then the declaration will be stored in the DECL field. If the
676 length of the unterminated string can be determined, it'll be
677 stored in the LEN field. Note this length could well be different
678 than what a C strlen call would return.
6ab24ea8 679
4148b00d
BE
680 ELTSIZE is 1 for normal single byte character strings, and 2 or
681 4 for wide characer strings. ELTSIZE is by default 1.
fed3cef0 682
4148b00d 683 The value returned is of type `ssizetype'. */
28f4ec01 684
6de9cd9a 685tree
14b7950f 686c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
28f4ec01 687{
7d583f42
JL
688 /* If we were not passed a DATA pointer, then get one to a local
689 structure. That avoids having to check DATA for NULL before
690 each time we want to use it. */
3f46ef1f 691 c_strlen_data local_strlen_data = { };
7d583f42
JL
692 if (!data)
693 data = &local_strlen_data;
694
1ebf0641 695 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
14b7950f
MS
696
697 tree src = STRIP_NOPS (arg);
ae808627
JJ
698 if (TREE_CODE (src) == COND_EXPR
699 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
700 {
701 tree len1, len2;
702
7d583f42
JL
703 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
704 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
33521f7d 705 if (tree_int_cst_equal (len1, len2))
ae808627
JJ
706 return len1;
707 }
708
709 if (TREE_CODE (src) == COMPOUND_EXPR
710 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
7d583f42 711 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
ae808627 712
1eb4547b 713 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
59d49708 714
1eb4547b
MS
715 /* Offset from the beginning of the string in bytes. */
716 tree byteoff;
4148b00d 717 tree memsize;
6ab24ea8
MS
718 tree decl;
719 src = string_constant (src, &byteoff, &memsize, &decl);
28f4ec01 720 if (src == 0)
5039610b 721 return NULL_TREE;
fed3cef0 722
1eb4547b 723 /* Determine the size of the string element. */
4148b00d
BE
724 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
725 return NULL_TREE;
1eb4547b
MS
726
727 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
35b4d3a6 728 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
bfb9bd47
MS
729 in case the latter is less than the size of the array, such as when
730 SRC refers to a short string literal used to initialize a large array.
731 In that case, the elements of the array after the terminating NUL are
732 all NUL. */
733 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
d01b568a 734 strelts = strelts / eltsize;
bfb9bd47 735
4148b00d
BE
736 if (!tree_fits_uhwi_p (memsize))
737 return NULL_TREE;
738
d01b568a 739 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
1eb4547b
MS
740
741 /* PTR can point to the byte representation of any string type, including
742 char* and wchar_t*. */
743 const char *ptr = TREE_STRING_POINTER (src);
fed3cef0 744
1eb4547b 745 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
28f4ec01 746 {
4148b00d
BE
747 /* The code below works only for single byte character types. */
748 if (eltsize != 1)
749 return NULL_TREE;
750
bfb9bd47
MS
751 /* If the string has an internal NUL character followed by any
752 non-NUL characters (e.g., "foo\0bar"), we can't compute
753 the offset to the following NUL if we don't know where to
28f4ec01 754 start searching for it. */
bfb9bd47 755 unsigned len = string_length (ptr, eltsize, strelts);
fed3cef0 756
7d583f42
JL
757 /* Return when an embedded null character is found or none at all.
758 In the latter case, set the DECL/LEN field in the DATA structure
759 so that callers may examine them. */
6ab24ea8 760 if (len + 1 < strelts)
4148b00d 761 return NULL_TREE;
6ab24ea8
MS
762 else if (len >= maxelts)
763 {
7d583f42 764 data->decl = decl;
6c4aa5f6 765 data->off = byteoff;
b71bbbe2 766 data->minlen = ssize_int (len);
6ab24ea8
MS
767 return NULL_TREE;
768 }
c42d0aa0 769
d01b568a
BE
770 /* For empty strings the result should be zero. */
771 if (len == 0)
772 return ssize_int (0);
773
28f4ec01 774 /* We don't know the starting offset, but we do know that the string
bfb9bd47
MS
775 has no internal zero bytes. If the offset falls within the bounds
776 of the string subtract the offset from the length of the string,
777 and return that. Otherwise the length is zero. Take care to
778 use SAVE_EXPR in case the OFFSET has side-effects. */
e8bf3d5e
BE
779 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
780 : byteoff;
781 offsave = fold_convert_loc (loc, sizetype, offsave);
bfb9bd47 782 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
e8bf3d5e
BE
783 size_int (len));
784 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
785 offsave);
786 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
bfb9bd47
MS
787 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
788 build_zero_cst (ssizetype));
28f4ec01
BS
789 }
790
1eb4547b
MS
791 /* Offset from the beginning of the string in elements. */
792 HOST_WIDE_INT eltoff;
793
28f4ec01 794 /* We have a known offset into the string. Start searching there for
5197bd50 795 a null character if we can represent it as a single HOST_WIDE_INT. */
1eb4547b
MS
796 if (byteoff == 0)
797 eltoff = 0;
1ebf0641 798 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1eb4547b 799 eltoff = -1;
28f4ec01 800 else
1ebf0641 801 eltoff = tree_to_uhwi (byteoff) / eltsize;
fed3cef0 802
b2ed71b6
BE
803 /* If the offset is known to be out of bounds, warn, and call strlen at
804 runtime. */
d01b568a 805 if (eltoff < 0 || eltoff >= maxelts)
28f4ec01 806 {
1db01ff9 807 /* Suppress multiple warnings for propagated constant strings. */
3b57ff81 808 if (only_value != 2
14b7950f 809 && !TREE_NO_WARNING (arg)
1db01ff9
JJ
810 && warning_at (loc, OPT_Warray_bounds,
811 "offset %qwi outside bounds of constant string",
812 eltoff))
14b7950f
MS
813 {
814 if (decl)
815 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
816 TREE_NO_WARNING (arg) = 1;
817 }
5039610b 818 return NULL_TREE;
28f4ec01 819 }
fed3cef0 820
4148b00d
BE
821 /* If eltoff is larger than strelts but less than maxelts the
822 string length is zero, since the excess memory will be zero. */
823 if (eltoff > strelts)
824 return ssize_int (0);
825
28f4ec01
BS
826 /* Use strlen to search for the first zero byte. Since any strings
827 constructed with build_string will have nulls appended, we win even
828 if we get handed something like (char[4])"abcd".
829
1eb4547b 830 Since ELTOFF is our starting index into the string, no further
28f4ec01 831 calculation is needed. */
1eb4547b 832 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
4148b00d 833 strelts - eltoff);
1eb4547b 834
d01b568a 835 /* Don't know what to return if there was no zero termination.
7d583f42
JL
836 Ideally this would turn into a gcc_checking_assert over time.
837 Set DECL/LEN so callers can examine them. */
d01b568a 838 if (len >= maxelts - eltoff)
6ab24ea8 839 {
7d583f42 840 data->decl = decl;
6c4aa5f6 841 data->off = byteoff;
b71bbbe2 842 data->minlen = ssize_int (len);
6ab24ea8
MS
843 return NULL_TREE;
844 }
1ebf0641 845
1eb4547b 846 return ssize_int (len);
28f4ec01
BS
847}
848
807e902e 849/* Return a constant integer corresponding to target reading
3140b2ed
JJ
850 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
851 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
852 are assumed to be zero, otherwise it reads as many characters
853 as needed. */
854
855rtx
856c_readstr (const char *str, scalar_int_mode mode,
857 bool null_terminated_p/*=true*/)
57814e5e 858{
57814e5e
JJ
859 HOST_WIDE_INT ch;
860 unsigned int i, j;
807e902e 861 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
57814e5e 862
298e6adc 863 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
807e902e
KZ
864 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
865 / HOST_BITS_PER_WIDE_INT;
866
867 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
868 for (i = 0; i < len; i++)
869 tmp[i] = 0;
5906d013 870
57814e5e
JJ
871 ch = 1;
872 for (i = 0; i < GET_MODE_SIZE (mode); i++)
873 {
874 j = i;
875 if (WORDS_BIG_ENDIAN)
876 j = GET_MODE_SIZE (mode) - i - 1;
877 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
e046112d 878 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
57814e5e
JJ
879 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
880 j *= BITS_PER_UNIT;
5906d013 881
3140b2ed 882 if (ch || !null_terminated_p)
57814e5e 883 ch = (unsigned char) str[i];
807e902e 884 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
57814e5e 885 }
807e902e
KZ
886
887 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
888 return immed_wide_int_const (c, mode);
57814e5e
JJ
889}
890
ab937357 891/* Cast a target constant CST to target CHAR and if that value fits into
206048bd 892 host char type, return zero and put that value into variable pointed to by
ab937357
JJ
893 P. */
894
895static int
4682ae04 896target_char_cast (tree cst, char *p)
ab937357
JJ
897{
898 unsigned HOST_WIDE_INT val, hostval;
899
de77ab75 900 if (TREE_CODE (cst) != INTEGER_CST
ab937357
JJ
901 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
902 return 1;
903
807e902e 904 /* Do not care if it fits or not right here. */
de77ab75 905 val = TREE_INT_CST_LOW (cst);
807e902e 906
ab937357 907 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
fecfbfa4 908 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ab937357
JJ
909
910 hostval = val;
911 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
fecfbfa4 912 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ab937357
JJ
913
914 if (val != hostval)
915 return 1;
916
917 *p = hostval;
918 return 0;
919}
920
6de9cd9a
DN
921/* Similar to save_expr, but assumes that arbitrary code is not executed
922 in between the multiple evaluations. In particular, we assume that a
923 non-addressable local variable will not be modified. */
924
925static tree
926builtin_save_expr (tree exp)
927{
5cbf5c20
RG
928 if (TREE_CODE (exp) == SSA_NAME
929 || (TREE_ADDRESSABLE (exp) == 0
930 && (TREE_CODE (exp) == PARM_DECL
8813a647 931 || (VAR_P (exp) && !TREE_STATIC (exp)))))
6de9cd9a
DN
932 return exp;
933
934 return save_expr (exp);
935}
936
28f4ec01
BS
937/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
938 times to get the address of either a higher stack frame, or a return
939 address located within it (depending on FNDECL_CODE). */
fed3cef0 940
54e62799 941static rtx
c6d01079 942expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
28f4ec01
BS
943{
944 int i;
c6d01079 945 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
579f2946 946 if (tem == NULL_RTX)
c8f27794 947 {
579f2946
TS
948 /* For a zero count with __builtin_return_address, we don't care what
949 frame address we return, because target-specific definitions will
950 override us. Therefore frame pointer elimination is OK, and using
951 the soft frame pointer is OK.
952
953 For a nonzero count, or a zero count with __builtin_frame_address,
954 we require a stable offset from the current frame pointer to the
955 previous one, so we must use the hard frame pointer, and
956 we must disable frame pointer elimination. */
957 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
958 tem = frame_pointer_rtx;
959 else
960 {
961 tem = hard_frame_pointer_rtx;
c8f27794 962
579f2946
TS
963 /* Tell reload not to eliminate the frame pointer. */
964 crtl->accesses_prior_frames = 1;
965 }
c8f27794 966 }
c6d01079 967
28f4ec01
BS
968 if (count > 0)
969 SETUP_FRAME_ADDRESSES ();
28f4ec01 970
224869d9 971 /* On the SPARC, the return address is not in the frame, it is in a
28f4ec01
BS
972 register. There is no way to access it off of the current frame
973 pointer, but it can be accessed off the previous frame pointer by
974 reading the value from the register window save area. */
2e612c47 975 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
28f4ec01 976 count--;
28f4ec01
BS
977
978 /* Scan back COUNT frames to the specified frame. */
979 for (i = 0; i < count; i++)
980 {
981 /* Assume the dynamic chain pointer is in the word that the
982 frame address points to, unless otherwise specified. */
28f4ec01 983 tem = DYNAMIC_CHAIN_ADDRESS (tem);
28f4ec01 984 tem = memory_address (Pmode, tem);
bf877a76 985 tem = gen_frame_mem (Pmode, tem);
432fd734 986 tem = copy_to_reg (tem);
28f4ec01
BS
987 }
988
224869d9
EB
989 /* For __builtin_frame_address, return what we've got. But, on
990 the SPARC for example, we may have to add a bias. */
28f4ec01 991 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
224869d9 992 return FRAME_ADDR_RTX (tem);
28f4ec01 993
224869d9 994 /* For __builtin_return_address, get the return address from that frame. */
28f4ec01
BS
995#ifdef RETURN_ADDR_RTX
996 tem = RETURN_ADDR_RTX (count, tem);
997#else
998 tem = memory_address (Pmode,
0a81f074 999 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
bf877a76 1000 tem = gen_frame_mem (Pmode, tem);
28f4ec01
BS
1001#endif
1002 return tem;
1003}
1004
3bdf5ad1 1005/* Alias set used for setjmp buffer. */
4862826d 1006static alias_set_type setjmp_alias_set = -1;
3bdf5ad1 1007
250d07b6 1008/* Construct the leading half of a __builtin_setjmp call. Control will
4f6c2131
EB
1009 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1010 exception handling code. */
28f4ec01 1011
250d07b6 1012void
4682ae04 1013expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
28f4ec01 1014{
ef4bddc2 1015 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 1016 rtx stack_save;
3bdf5ad1 1017 rtx mem;
28f4ec01 1018
3bdf5ad1
RK
1019 if (setjmp_alias_set == -1)
1020 setjmp_alias_set = new_alias_set ();
1021
5ae6cd0d 1022 buf_addr = convert_memory_address (Pmode, buf_addr);
28f4ec01 1023
7d505b82 1024 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
28f4ec01 1025
250d07b6
RH
1026 /* We store the frame pointer and the address of receiver_label in
1027 the buffer and use the rest of it for the stack save area, which
1028 is machine-dependent. */
28f4ec01 1029
3bdf5ad1 1030 mem = gen_rtx_MEM (Pmode, buf_addr);
ba4828e0 1031 set_mem_alias_set (mem, setjmp_alias_set);
25403c41 1032 emit_move_insn (mem, hard_frame_pointer_rtx);
3bdf5ad1 1033
0a81f074
RS
1034 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 GET_MODE_SIZE (Pmode))),
ba4828e0 1036 set_mem_alias_set (mem, setjmp_alias_set);
3bdf5ad1
RK
1037
1038 emit_move_insn (validize_mem (mem),
250d07b6 1039 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
28f4ec01
BS
1040
1041 stack_save = gen_rtx_MEM (sa_mode,
0a81f074 1042 plus_constant (Pmode, buf_addr,
28f4ec01 1043 2 * GET_MODE_SIZE (Pmode)));
ba4828e0 1044 set_mem_alias_set (stack_save, setjmp_alias_set);
9eac0f2a 1045 emit_stack_save (SAVE_NONLOCAL, &stack_save);
28f4ec01
BS
1046
1047 /* If there is further processing to do, do it. */
95a3fb9d
RS
1048 if (targetm.have_builtin_setjmp_setup ())
1049 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
28f4ec01 1050
ecaebb9e 1051 /* We have a nonlocal label. */
e3b5732b 1052 cfun->has_nonlocal_label = 1;
250d07b6 1053}
28f4ec01 1054
4f6c2131 1055/* Construct the trailing part of a __builtin_setjmp call. This is
e90d1568
HPN
1056 also called directly by the SJLJ exception handling code.
1057 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
250d07b6
RH
1058
1059void
95a3fb9d 1060expand_builtin_setjmp_receiver (rtx receiver_label)
250d07b6 1061{
531ca746
RH
1062 rtx chain;
1063
e90d1568 1064 /* Mark the FP as used when we get here, so we have to make sure it's
28f4ec01 1065 marked as used by this function. */
c41c1387 1066 emit_use (hard_frame_pointer_rtx);
28f4ec01
BS
1067
1068 /* Mark the static chain as clobbered here so life information
1069 doesn't get messed up for it. */
4b522b8f 1070 chain = rtx_for_static_chain (current_function_decl, true);
531ca746
RH
1071 if (chain && REG_P (chain))
1072 emit_clobber (chain);
28f4ec01 1073
38b0b093 1074 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
28f4ec01 1075 {
e90d1568
HPN
1076 /* If the argument pointer can be eliminated in favor of the
1077 frame pointer, we don't need to restore it. We assume here
1078 that if such an elimination is present, it can always be used.
1079 This is the case on all known machines; if we don't make this
1080 assumption, we do unnecessary saving on many machines. */
28f4ec01 1081 size_t i;
8b60264b 1082 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
28f4ec01 1083
b6a1cbae 1084 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
28f4ec01
BS
1085 if (elim_regs[i].from == ARG_POINTER_REGNUM
1086 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1087 break;
1088
b6a1cbae 1089 if (i == ARRAY_SIZE (elim_regs))
28f4ec01
BS
1090 {
1091 /* Now restore our arg pointer from the address at which it
278ed218 1092 was saved in our stack frame. */
2e3f842f 1093 emit_move_insn (crtl->args.internal_arg_pointer,
bd60bab2 1094 copy_to_reg (get_arg_pointer_save_area ()));
28f4ec01
BS
1095 }
1096 }
28f4ec01 1097
95a3fb9d
RS
1098 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1099 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1100 else if (targetm.have_nonlocal_goto_receiver ())
1101 emit_insn (targetm.gen_nonlocal_goto_receiver ());
28f4ec01 1102 else
95a3fb9d 1103 { /* Nothing */ }
bcd7edfe 1104
6fb5fa3c
DB
1105 /* We must not allow the code we just generated to be reordered by
1106 scheduling. Specifically, the update of the frame pointer must
f1257268 1107 happen immediately, not later. */
6fb5fa3c 1108 emit_insn (gen_blockage ());
250d07b6 1109}
28f4ec01 1110
28f4ec01
BS
1111/* __builtin_longjmp is passed a pointer to an array of five words (not
1112 all will be used on all machines). It operates similarly to the C
1113 library function of the same name, but is more efficient. Much of
4f6c2131 1114 the code below is copied from the handling of non-local gotos. */
28f4ec01 1115
54e62799 1116static void
4682ae04 1117expand_builtin_longjmp (rtx buf_addr, rtx value)
28f4ec01 1118{
58f4cf2a
DM
1119 rtx fp, lab, stack;
1120 rtx_insn *insn, *last;
ef4bddc2 1121 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 1122
b8698a0f 1123 /* DRAP is needed for stack realign if longjmp is expanded to current
2e3f842f
L
1124 function */
1125 if (SUPPORTS_STACK_ALIGNMENT)
1126 crtl->need_drap = true;
1127
3bdf5ad1
RK
1128 if (setjmp_alias_set == -1)
1129 setjmp_alias_set = new_alias_set ();
1130
5ae6cd0d 1131 buf_addr = convert_memory_address (Pmode, buf_addr);
4b6c1672 1132
28f4ec01
BS
1133 buf_addr = force_reg (Pmode, buf_addr);
1134
531ca746
RH
1135 /* We require that the user must pass a second argument of 1, because
1136 that is what builtin_setjmp will return. */
298e6adc 1137 gcc_assert (value == const1_rtx);
28f4ec01 1138
d337d653 1139 last = get_last_insn ();
95a3fb9d
RS
1140 if (targetm.have_builtin_longjmp ())
1141 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
28f4ec01 1142 else
28f4ec01
BS
1143 {
1144 fp = gen_rtx_MEM (Pmode, buf_addr);
0a81f074 1145 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
28f4ec01
BS
1146 GET_MODE_SIZE (Pmode)));
1147
0a81f074 1148 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
28f4ec01 1149 2 * GET_MODE_SIZE (Pmode)));
ba4828e0
RK
1150 set_mem_alias_set (fp, setjmp_alias_set);
1151 set_mem_alias_set (lab, setjmp_alias_set);
1152 set_mem_alias_set (stack, setjmp_alias_set);
28f4ec01
BS
1153
1154 /* Pick up FP, label, and SP from the block and jump. This code is
1155 from expand_goto in stmt.c; see there for detailed comments. */
95a3fb9d 1156 if (targetm.have_nonlocal_goto ())
28f4ec01
BS
1157 /* We have to pass a value to the nonlocal_goto pattern that will
1158 get copied into the static_chain pointer, but it does not matter
1159 what that value is, because builtin_setjmp does not use it. */
95a3fb9d 1160 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
28f4ec01 1161 else
28f4ec01 1162 {
c41c1387
RS
1163 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1164 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
41439bf6 1165
511ed59d
WD
1166 lab = copy_to_reg (lab);
1167
71b14428
WD
1168 /* Restore the frame pointer and stack pointer. We must use a
1169 temporary since the setjmp buffer may be a local. */
1170 fp = copy_to_reg (fp);
9eac0f2a 1171 emit_stack_restore (SAVE_NONLOCAL, stack);
511ed59d
WD
1172
1173 /* Ensure the frame pointer move is not optimized. */
1174 emit_insn (gen_blockage ());
1175 emit_clobber (hard_frame_pointer_rtx);
1176 emit_clobber (frame_pointer_rtx);
71b14428 1177 emit_move_insn (hard_frame_pointer_rtx, fp);
28f4ec01 1178
c41c1387
RS
1179 emit_use (hard_frame_pointer_rtx);
1180 emit_use (stack_pointer_rtx);
28f4ec01
BS
1181 emit_indirect_jump (lab);
1182 }
1183 }
4b01bd16
RH
1184
1185 /* Search backwards and mark the jump insn as a non-local goto.
1186 Note that this precludes the use of __builtin_longjmp to a
1187 __builtin_setjmp target in the same function. However, we've
1188 already cautioned the user that these functions are for
1189 internal exception handling use only. */
8206fc89
AM
1190 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1191 {
298e6adc 1192 gcc_assert (insn != last);
5906d013 1193
4b4bf941 1194 if (JUMP_P (insn))
8206fc89 1195 {
65c5f2a6 1196 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
8206fc89
AM
1197 break;
1198 }
4b4bf941 1199 else if (CALL_P (insn))
ca7fd9cd 1200 break;
8206fc89 1201 }
28f4ec01
BS
1202}
1203
862d0b35
DN
1204static inline bool
1205more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1206{
1207 return (iter->i < iter->n);
1208}
1209
1210/* This function validates the types of a function call argument list
1211 against a specified list of tree_codes. If the last specifier is a 0,
474da67e 1212 that represents an ellipsis, otherwise the last specifier must be a
862d0b35
DN
1213 VOID_TYPE. */
1214
1215static bool
1216validate_arglist (const_tree callexpr, ...)
1217{
1218 enum tree_code code;
1219 bool res = 0;
1220 va_list ap;
1221 const_call_expr_arg_iterator iter;
1222 const_tree arg;
1223
1224 va_start (ap, callexpr);
1225 init_const_call_expr_arg_iterator (callexpr, &iter);
1226
474da67e 1227 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
0dba7960
JJ
1228 tree fn = CALL_EXPR_FN (callexpr);
1229 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
474da67e
MS
1230
1231 for (unsigned argno = 1; ; ++argno)
862d0b35
DN
1232 {
1233 code = (enum tree_code) va_arg (ap, int);
474da67e 1234
862d0b35
DN
1235 switch (code)
1236 {
1237 case 0:
1238 /* This signifies an ellipses, any further arguments are all ok. */
1239 res = true;
1240 goto end;
1241 case VOID_TYPE:
1242 /* This signifies an endlink, if no arguments remain, return
1243 true, otherwise return false. */
1244 res = !more_const_call_expr_args_p (&iter);
1245 goto end;
474da67e
MS
1246 case POINTER_TYPE:
1247 /* The actual argument must be nonnull when either the whole
1248 called function has been declared nonnull, or when the formal
1249 argument corresponding to the actual argument has been. */
0dba7960
JJ
1250 if (argmap
1251 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1252 {
1253 arg = next_const_call_expr_arg (&iter);
1254 if (!validate_arg (arg, code) || integer_zerop (arg))
1255 goto end;
1256 break;
1257 }
474da67e 1258 /* FALLTHRU */
862d0b35
DN
1259 default:
1260 /* If no parameters remain or the parameter's code does not
1261 match the specified code, return false. Otherwise continue
1262 checking any remaining arguments. */
1263 arg = next_const_call_expr_arg (&iter);
0dba7960 1264 if (!validate_arg (arg, code))
862d0b35
DN
1265 goto end;
1266 break;
1267 }
1268 }
862d0b35
DN
1269
1270 /* We need gotos here since we can only have one VA_CLOSE in a
1271 function. */
1272 end: ;
1273 va_end (ap);
1274
474da67e
MS
1275 BITMAP_FREE (argmap);
1276
862d0b35
DN
1277 return res;
1278}
1279
6de9cd9a
DN
1280/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1281 and the address of the save area. */
1282
1283static rtx
5039610b 1284expand_builtin_nonlocal_goto (tree exp)
6de9cd9a
DN
1285{
1286 tree t_label, t_save_area;
58f4cf2a
DM
1287 rtx r_label, r_save_area, r_fp, r_sp;
1288 rtx_insn *insn;
6de9cd9a 1289
5039610b 1290 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
1291 return NULL_RTX;
1292
5039610b
SL
1293 t_label = CALL_EXPR_ARG (exp, 0);
1294 t_save_area = CALL_EXPR_ARG (exp, 1);
6de9cd9a 1295
84217346 1296 r_label = expand_normal (t_label);
5e89a381 1297 r_label = convert_memory_address (Pmode, r_label);
84217346 1298 r_save_area = expand_normal (t_save_area);
5e89a381 1299 r_save_area = convert_memory_address (Pmode, r_save_area);
bc6d3f91
EB
1300 /* Copy the address of the save location to a register just in case it was
1301 based on the frame pointer. */
cba2d79f 1302 r_save_area = copy_to_reg (r_save_area);
6de9cd9a
DN
1303 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1304 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
0a81f074
RS
1305 plus_constant (Pmode, r_save_area,
1306 GET_MODE_SIZE (Pmode)));
6de9cd9a 1307
e3b5732b 1308 crtl->has_nonlocal_goto = 1;
6de9cd9a 1309
6de9cd9a 1310 /* ??? We no longer need to pass the static chain value, afaik. */
95a3fb9d
RS
1311 if (targetm.have_nonlocal_goto ())
1312 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
6de9cd9a 1313 else
6de9cd9a 1314 {
c41c1387
RS
1315 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1316 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
caf93cb0 1317
511ed59d
WD
1318 r_label = copy_to_reg (r_label);
1319
71b14428
WD
1320 /* Restore the frame pointer and stack pointer. We must use a
1321 temporary since the setjmp buffer may be a local. */
1322 r_fp = copy_to_reg (r_fp);
9eac0f2a 1323 emit_stack_restore (SAVE_NONLOCAL, r_sp);
511ed59d
WD
1324
1325 /* Ensure the frame pointer move is not optimized. */
1326 emit_insn (gen_blockage ());
1327 emit_clobber (hard_frame_pointer_rtx);
1328 emit_clobber (frame_pointer_rtx);
71b14428 1329 emit_move_insn (hard_frame_pointer_rtx, r_fp);
caf93cb0 1330
6de9cd9a
DN
1331 /* USE of hard_frame_pointer_rtx added for consistency;
1332 not clear if really needed. */
c41c1387
RS
1333 emit_use (hard_frame_pointer_rtx);
1334 emit_use (stack_pointer_rtx);
eae645b6
RS
1335
1336 /* If the architecture is using a GP register, we must
1337 conservatively assume that the target function makes use of it.
1338 The prologue of functions with nonlocal gotos must therefore
1339 initialize the GP register to the appropriate value, and we
1340 must then make sure that this value is live at the point
1341 of the jump. (Note that this doesn't necessarily apply
1342 to targets with a nonlocal_goto pattern; they are free
1343 to implement it in their own way. Note also that this is
1344 a no-op if the GP register is a global invariant.) */
959c1e20
AH
1345 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1346 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
c41c1387 1347 emit_use (pic_offset_table_rtx);
eae645b6 1348
6de9cd9a
DN
1349 emit_indirect_jump (r_label);
1350 }
caf93cb0 1351
6de9cd9a
DN
1352 /* Search backwards to the jump insn and mark it as a
1353 non-local goto. */
1354 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1355 {
4b4bf941 1356 if (JUMP_P (insn))
6de9cd9a 1357 {
65c5f2a6 1358 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
6de9cd9a
DN
1359 break;
1360 }
4b4bf941 1361 else if (CALL_P (insn))
6de9cd9a
DN
1362 break;
1363 }
1364
1365 return const0_rtx;
1366}
1367
2b92e7f5
RK
1368/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1369 (not all will be used on all machines) that was passed to __builtin_setjmp.
d33606c3
EB
1370 It updates the stack pointer in that block to the current value. This is
1371 also called directly by the SJLJ exception handling code. */
2b92e7f5 1372
d33606c3 1373void
2b92e7f5
RK
1374expand_builtin_update_setjmp_buf (rtx buf_addr)
1375{
ef4bddc2 1376 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
4887028b 1377 buf_addr = convert_memory_address (Pmode, buf_addr);
bc6d3f91 1378 rtx stack_save
2b92e7f5
RK
1379 = gen_rtx_MEM (sa_mode,
1380 memory_address
1381 (sa_mode,
0a81f074
RS
1382 plus_constant (Pmode, buf_addr,
1383 2 * GET_MODE_SIZE (Pmode))));
2b92e7f5 1384
9eac0f2a 1385 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2b92e7f5
RK
1386}
1387
a9ccbb60
JJ
1388/* Expand a call to __builtin_prefetch. For a target that does not support
1389 data prefetch, evaluate the memory address argument in case it has side
1390 effects. */
1391
1392static void
5039610b 1393expand_builtin_prefetch (tree exp)
a9ccbb60
JJ
1394{
1395 tree arg0, arg1, arg2;
5039610b 1396 int nargs;
a9ccbb60
JJ
1397 rtx op0, op1, op2;
1398
5039610b 1399 if (!validate_arglist (exp, POINTER_TYPE, 0))
e83d297b
JJ
1400 return;
1401
5039610b
SL
1402 arg0 = CALL_EXPR_ARG (exp, 0);
1403
e83d297b
JJ
1404 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1405 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1406 locality). */
5039610b
SL
1407 nargs = call_expr_nargs (exp);
1408 if (nargs > 1)
1409 arg1 = CALL_EXPR_ARG (exp, 1);
e83d297b 1410 else
5039610b
SL
1411 arg1 = integer_zero_node;
1412 if (nargs > 2)
1413 arg2 = CALL_EXPR_ARG (exp, 2);
1414 else
9a9d280e 1415 arg2 = integer_three_node;
a9ccbb60
JJ
1416
1417 /* Argument 0 is an address. */
1418 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1419
1420 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1421 if (TREE_CODE (arg1) != INTEGER_CST)
1422 {
40b97a2e 1423 error ("second argument to %<__builtin_prefetch%> must be a constant");
ca7fd9cd 1424 arg1 = integer_zero_node;
a9ccbb60 1425 }
84217346 1426 op1 = expand_normal (arg1);
a9ccbb60
JJ
1427 /* Argument 1 must be either zero or one. */
1428 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1429 {
d4ee4d25 1430 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
40b97a2e 1431 " using zero");
a9ccbb60
JJ
1432 op1 = const0_rtx;
1433 }
1434
1435 /* Argument 2 (locality) must be a compile-time constant int. */
1436 if (TREE_CODE (arg2) != INTEGER_CST)
1437 {
40b97a2e 1438 error ("third argument to %<__builtin_prefetch%> must be a constant");
a9ccbb60
JJ
1439 arg2 = integer_zero_node;
1440 }
84217346 1441 op2 = expand_normal (arg2);
a9ccbb60
JJ
1442 /* Argument 2 must be 0, 1, 2, or 3. */
1443 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1444 {
d4ee4d25 1445 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
a9ccbb60
JJ
1446 op2 = const0_rtx;
1447 }
1448
134b044d 1449 if (targetm.have_prefetch ())
a9ccbb60 1450 {
99b1c316 1451 class expand_operand ops[3];
a5c7d693
RS
1452
1453 create_address_operand (&ops[0], op0);
1454 create_integer_operand (&ops[1], INTVAL (op1));
1455 create_integer_operand (&ops[2], INTVAL (op2));
134b044d 1456 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
a5c7d693 1457 return;
a9ccbb60 1458 }
ad76cef8 1459
5ab2f7b7
KH
1460 /* Don't do anything with direct references to volatile memory, but
1461 generate code to handle other side effects. */
3c0cb5de 1462 if (!MEM_P (op0) && side_effects_p (op0))
5ab2f7b7 1463 emit_insn (op0);
a9ccbb60
JJ
1464}
1465
3bdf5ad1 1466/* Get a MEM rtx for expression EXP which is the address of an operand
76715c32 1467 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
435bb2a1
JJ
1468 the maximum length of the block of memory that might be accessed or
1469 NULL if unknown. */
3bdf5ad1 1470
28f4ec01 1471static rtx
435bb2a1 1472get_memory_rtx (tree exp, tree len)
28f4ec01 1473{
805903b5
JJ
1474 tree orig_exp = exp;
1475 rtx addr, mem;
805903b5
JJ
1476
1477 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1478 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1479 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1480 exp = TREE_OPERAND (exp, 0);
1481
1482 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1483 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
8ac61af7 1484
3bdf5ad1 1485 /* Get an expression we can use to find the attributes to assign to MEM.
625ed172 1486 First remove any nops. */
1043771b 1487 while (CONVERT_EXPR_P (exp)
3bdf5ad1
RK
1488 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1489 exp = TREE_OPERAND (exp, 0);
1490
625ed172
MM
1491 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1492 (as builtin stringops may alias with anything). */
1493 exp = fold_build2 (MEM_REF,
1494 build_array_type (char_type_node,
1495 build_range_type (sizetype,
1496 size_one_node, len)),
1497 exp, build_int_cst (ptr_type_node, 0));
1498
1499 /* If the MEM_REF has no acceptable address, try to get the base object
1500 from the original address we got, and build an all-aliasing
1501 unknown-sized access to that one. */
1502 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1503 set_mem_attributes (mem, exp, 0);
1504 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1505 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1506 0))))
343fb412 1507 {
625ed172
MM
1508 exp = build_fold_addr_expr (exp);
1509 exp = fold_build2 (MEM_REF,
1510 build_array_type (char_type_node,
1511 build_range_type (sizetype,
1512 size_zero_node,
1513 NULL)),
1514 exp, build_int_cst (ptr_type_node, 0));
931e6c29 1515 set_mem_attributes (mem, exp, 0);
343fb412 1516 }
625ed172 1517 set_mem_alias_set (mem, 0);
28f4ec01
BS
1518 return mem;
1519}
1520\f
1521/* Built-in functions to perform an untyped call and return. */
1522
fa19795e
RS
1523#define apply_args_mode \
1524 (this_target_builtins->x_apply_args_mode)
1525#define apply_result_mode \
1526 (this_target_builtins->x_apply_result_mode)
28f4ec01 1527
28f4ec01
BS
1528/* Return the size required for the block returned by __builtin_apply_args,
1529 and initialize apply_args_mode. */
1530
1531static int
4682ae04 1532apply_args_size (void)
28f4ec01
BS
1533{
1534 static int size = -1;
cbf5468f
AH
1535 int align;
1536 unsigned int regno;
28f4ec01
BS
1537
1538 /* The values computed by this function never change. */
1539 if (size < 0)
1540 {
1541 /* The first value is the incoming arg-pointer. */
1542 size = GET_MODE_SIZE (Pmode);
1543
1544 /* The second value is the structure value address unless this is
1545 passed as an "invisible" first argument. */
92f6864c 1546 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1547 size += GET_MODE_SIZE (Pmode);
1548
1549 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1550 if (FUNCTION_ARG_REGNO_P (regno))
1551 {
b660eccf 1552 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
33521f7d 1553
298e6adc 1554 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1555
1556 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1557 if (size % align != 0)
1558 size = CEIL (size, align) * align;
28f4ec01
BS
1559 size += GET_MODE_SIZE (mode);
1560 apply_args_mode[regno] = mode;
1561 }
1562 else
1563 {
b660eccf 1564 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1565 }
1566 }
1567 return size;
1568}
1569
1570/* Return the size required for the block returned by __builtin_apply,
1571 and initialize apply_result_mode. */
1572
1573static int
4682ae04 1574apply_result_size (void)
28f4ec01
BS
1575{
1576 static int size = -1;
1577 int align, regno;
28f4ec01
BS
1578
1579 /* The values computed by this function never change. */
1580 if (size < 0)
1581 {
1582 size = 0;
1583
1584 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
82f81f18 1585 if (targetm.calls.function_value_regno_p (regno))
28f4ec01 1586 {
b660eccf 1587 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
33521f7d 1588
298e6adc 1589 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1590
1591 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1592 if (size % align != 0)
1593 size = CEIL (size, align) * align;
1594 size += GET_MODE_SIZE (mode);
1595 apply_result_mode[regno] = mode;
1596 }
1597 else
b660eccf 1598 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1599
1600 /* Allow targets that use untyped_call and untyped_return to override
1601 the size so that machine-specific information can be stored here. */
1602#ifdef APPLY_RESULT_SIZE
1603 size = APPLY_RESULT_SIZE;
1604#endif
1605 }
1606 return size;
1607}
1608
28f4ec01
BS
1609/* Create a vector describing the result block RESULT. If SAVEP is true,
1610 the result block is used to save the values; otherwise it is used to
1611 restore the values. */
1612
1613static rtx
4682ae04 1614result_vector (int savep, rtx result)
28f4ec01
BS
1615{
1616 int regno, size, align, nelts;
b660eccf 1617 fixed_size_mode mode;
28f4ec01 1618 rtx reg, mem;
f883e0a7 1619 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
8d51ecf8 1620
28f4ec01
BS
1621 size = nelts = 0;
1622 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1623 if ((mode = apply_result_mode[regno]) != VOIDmode)
1624 {
1625 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1626 if (size % align != 0)
1627 size = CEIL (size, align) * align;
1628 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
f4ef873c 1629 mem = adjust_address (result, mode, size);
28f4ec01 1630 savevec[nelts++] = (savep
f7df4a84
RS
1631 ? gen_rtx_SET (mem, reg)
1632 : gen_rtx_SET (reg, mem));
28f4ec01
BS
1633 size += GET_MODE_SIZE (mode);
1634 }
1635 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1636}
28f4ec01
BS
1637
1638/* Save the state required to perform an untyped call with the same
1639 arguments as were passed to the current function. */
1640
1641static rtx
4682ae04 1642expand_builtin_apply_args_1 (void)
28f4ec01 1643{
88e541e1 1644 rtx registers, tem;
28f4ec01 1645 int size, align, regno;
b660eccf 1646 fixed_size_mode mode;
92f6864c 1647 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
28f4ec01
BS
1648
1649 /* Create a block where the arg-pointer, structure value address,
1650 and argument registers can be saved. */
1651 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1652
1653 /* Walk past the arg-pointer and structure value address. */
1654 size = GET_MODE_SIZE (Pmode);
92f6864c 1655 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1656 size += GET_MODE_SIZE (Pmode);
1657
1658 /* Save each register used in calling a function to the block. */
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_args_mode[regno]) != VOIDmode)
1661 {
28f4ec01
BS
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1665
1666 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1667
f4ef873c 1668 emit_move_insn (adjust_address (registers, mode, size), tem);
28f4ec01
BS
1669 size += GET_MODE_SIZE (mode);
1670 }
1671
1672 /* Save the arg pointer to the block. */
2e3f842f 1673 tem = copy_to_reg (crtl->args.internal_arg_pointer);
88e541e1 1674 /* We need the pointer as the caller actually passed them to us, not
ac3f5df7
HPN
1675 as we might have pretended they were passed. Make sure it's a valid
1676 operand, as emit_move_insn isn't expected to handle a PLUS. */
581edfa3
TS
1677 if (STACK_GROWS_DOWNWARD)
1678 tem
1679 = force_operand (plus_constant (Pmode, tem,
1680 crtl->args.pretend_args_size),
1681 NULL_RTX);
88e541e1 1682 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
33521f7d 1683
28f4ec01
BS
1684 size = GET_MODE_SIZE (Pmode);
1685
1686 /* Save the structure value address unless this is passed as an
1687 "invisible" first argument. */
61f71b34 1688 if (struct_incoming_value)
45309d28
ML
1689 emit_move_insn (adjust_address (registers, Pmode, size),
1690 copy_to_reg (struct_incoming_value));
28f4ec01
BS
1691
1692 /* Return the address of the block. */
1693 return copy_addr_to_reg (XEXP (registers, 0));
1694}
1695
1696/* __builtin_apply_args returns block of memory allocated on
1697 the stack into which is stored the arg pointer, structure
1698 value address, static chain, and all the registers that might
1699 possibly be used in performing a function call. The code is
1700 moved to the start of the function so the incoming values are
1701 saved. */
5197bd50 1702
28f4ec01 1703static rtx
4682ae04 1704expand_builtin_apply_args (void)
28f4ec01
BS
1705{
1706 /* Don't do __builtin_apply_args more than once in a function.
1707 Save the result of the first call and reuse it. */
1708 if (apply_args_value != 0)
1709 return apply_args_value;
1710 {
1711 /* When this function is called, it means that registers must be
1712 saved on entry to this function. So we migrate the
1713 call to the first insn of this function. */
1714 rtx temp;
28f4ec01
BS
1715
1716 start_sequence ();
1717 temp = expand_builtin_apply_args_1 ();
e67d1102 1718 rtx_insn *seq = get_insns ();
28f4ec01
BS
1719 end_sequence ();
1720
1721 apply_args_value = temp;
1722
2f937369
DM
1723 /* Put the insns after the NOTE that starts the function.
1724 If this is inside a start_sequence, make the outer-level insn
28f4ec01 1725 chain current, so the code is placed at the start of the
1f21b6f4
JJ
1726 function. If internal_arg_pointer is a non-virtual pseudo,
1727 it needs to be placed after the function that initializes
1728 that pseudo. */
28f4ec01 1729 push_topmost_sequence ();
1f21b6f4
JJ
1730 if (REG_P (crtl->args.internal_arg_pointer)
1731 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1732 emit_insn_before (seq, parm_birth_insn);
1733 else
1734 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
28f4ec01
BS
1735 pop_topmost_sequence ();
1736 return temp;
1737 }
1738}
1739
1740/* Perform an untyped call and save the state required to perform an
1741 untyped return of whatever value was returned by the given function. */
1742
1743static rtx
4682ae04 1744expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
28f4ec01
BS
1745{
1746 int size, align, regno;
b660eccf 1747 fixed_size_mode mode;
58f4cf2a
DM
1748 rtx incoming_args, result, reg, dest, src;
1749 rtx_call_insn *call_insn;
28f4ec01
BS
1750 rtx old_stack_level = 0;
1751 rtx call_fusage = 0;
92f6864c 1752 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
28f4ec01 1753
5ae6cd0d 1754 arguments = convert_memory_address (Pmode, arguments);
ce2d32cd 1755
28f4ec01
BS
1756 /* Create a block where the return registers can be saved. */
1757 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1758
28f4ec01
BS
1759 /* Fetch the arg pointer from the ARGUMENTS block. */
1760 incoming_args = gen_reg_rtx (Pmode);
ce2d32cd 1761 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
581edfa3
TS
1762 if (!STACK_GROWS_DOWNWARD)
1763 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1764 incoming_args, 0, OPTAB_LIB_WIDEN);
28f4ec01 1765
9d53e585
JM
1766 /* Push a new argument block and copy the arguments. Do not allow
1767 the (potential) memcpy call below to interfere with our stack
1768 manipulations. */
28f4ec01 1769 do_pending_stack_adjust ();
9d53e585 1770 NO_DEFER_POP;
28f4ec01 1771
f9da5064 1772 /* Save the stack with nonlocal if available. */
4476e1a0 1773 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1774 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
28f4ec01 1775 else
9eac0f2a 1776 emit_stack_save (SAVE_BLOCK, &old_stack_level);
28f4ec01 1777
316d0b19 1778 /* Allocate a block of memory onto the stack and copy the memory
d3c12306
EB
1779 arguments to the outgoing arguments address. We can pass TRUE
1780 as the 4th argument because we just saved the stack pointer
1781 and will restore it right after the call. */
9e878cf1 1782 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2e3f842f
L
1783
1784 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1785 may have already set current_function_calls_alloca to true.
1786 current_function_calls_alloca won't be set if argsize is zero,
1787 so we have to guarantee need_drap is true here. */
1788 if (SUPPORTS_STACK_ALIGNMENT)
1789 crtl->need_drap = true;
1790
316d0b19 1791 dest = virtual_outgoing_args_rtx;
581edfa3
TS
1792 if (!STACK_GROWS_DOWNWARD)
1793 {
1794 if (CONST_INT_P (argsize))
1795 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1796 else
1797 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1798 }
8ac61af7
RK
1799 dest = gen_rtx_MEM (BLKmode, dest);
1800 set_mem_align (dest, PARM_BOUNDARY);
1801 src = gen_rtx_MEM (BLKmode, incoming_args);
1802 set_mem_align (src, PARM_BOUNDARY);
44bb111a 1803 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
28f4ec01
BS
1804
1805 /* Refer to the argument block. */
1806 apply_args_size ();
1807 arguments = gen_rtx_MEM (BLKmode, arguments);
8ac61af7 1808 set_mem_align (arguments, PARM_BOUNDARY);
28f4ec01
BS
1809
1810 /* Walk past the arg-pointer and structure value address. */
1811 size = GET_MODE_SIZE (Pmode);
61f71b34 1812 if (struct_value)
28f4ec01
BS
1813 size += GET_MODE_SIZE (Pmode);
1814
1815 /* Restore each of the registers previously saved. Make USE insns
1816 for each of these registers for use in making the call. */
1817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1818 if ((mode = apply_args_mode[regno]) != VOIDmode)
1819 {
1820 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1821 if (size % align != 0)
1822 size = CEIL (size, align) * align;
1823 reg = gen_rtx_REG (mode, regno);
f4ef873c 1824 emit_move_insn (reg, adjust_address (arguments, mode, size));
28f4ec01
BS
1825 use_reg (&call_fusage, reg);
1826 size += GET_MODE_SIZE (mode);
1827 }
1828
1829 /* Restore the structure value address unless this is passed as an
1830 "invisible" first argument. */
1831 size = GET_MODE_SIZE (Pmode);
61f71b34 1832 if (struct_value)
28f4ec01
BS
1833 {
1834 rtx value = gen_reg_rtx (Pmode);
f4ef873c 1835 emit_move_insn (value, adjust_address (arguments, Pmode, size));
61f71b34 1836 emit_move_insn (struct_value, value);
f8cfc6aa 1837 if (REG_P (struct_value))
61f71b34 1838 use_reg (&call_fusage, struct_value);
28f4ec01
BS
1839 }
1840
1841 /* All arguments and registers used for the call are set up by now! */
531ca746 1842 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
28f4ec01
BS
1843
1844 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1845 and we don't want to load it into a register as an optimization,
1846 because prepare_call_address already did it if it should be done. */
1847 if (GET_CODE (function) != SYMBOL_REF)
1848 function = memory_address (FUNCTION_MODE, function);
1849
1850 /* Generate the actual call instruction and save the return value. */
43c7dca8
RS
1851 if (targetm.have_untyped_call ())
1852 {
1853 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1854 emit_call_insn (targetm.gen_untyped_call (mem, result,
1855 result_vector (1, result)));
1856 }
58d745ec 1857 else if (targetm.have_call_value ())
28f4ec01
BS
1858 {
1859 rtx valreg = 0;
1860
1861 /* Locate the unique return register. It is not possible to
1862 express a call that sets more than one return register using
1863 call_value; use untyped_call for that. In fact, untyped_call
1864 only needs to save the return registers in the given block. */
1865 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1866 if ((mode = apply_result_mode[regno]) != VOIDmode)
1867 {
58d745ec 1868 gcc_assert (!valreg); /* have_untyped_call required. */
5906d013 1869
28f4ec01
BS
1870 valreg = gen_rtx_REG (mode, regno);
1871 }
1872
58d745ec
RS
1873 emit_insn (targetm.gen_call_value (valreg,
1874 gen_rtx_MEM (FUNCTION_MODE, function),
1875 const0_rtx, NULL_RTX, const0_rtx));
28f4ec01 1876
f4ef873c 1877 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
28f4ec01
BS
1878 }
1879 else
298e6adc 1880 gcc_unreachable ();
28f4ec01 1881
ee960939
OH
1882 /* Find the CALL insn we just emitted, and attach the register usage
1883 information. */
1884 call_insn = last_call_insn ();
1885 add_function_usage_to (call_insn, call_fusage);
28f4ec01
BS
1886
1887 /* Restore the stack. */
4476e1a0 1888 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1889 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
28f4ec01 1890 else
9eac0f2a 1891 emit_stack_restore (SAVE_BLOCK, old_stack_level);
c3284718 1892 fixup_args_size_notes (call_insn, get_last_insn (), 0);
28f4ec01 1893
9d53e585
JM
1894 OK_DEFER_POP;
1895
28f4ec01 1896 /* Return the address of the result block. */
5ae6cd0d
MM
1897 result = copy_addr_to_reg (XEXP (result, 0));
1898 return convert_memory_address (ptr_mode, result);
28f4ec01
BS
1899}
1900
1901/* Perform an untyped return. */
1902
1903static void
4682ae04 1904expand_builtin_return (rtx result)
28f4ec01
BS
1905{
1906 int size, align, regno;
b660eccf 1907 fixed_size_mode mode;
28f4ec01 1908 rtx reg;
fee3e72c 1909 rtx_insn *call_fusage = 0;
28f4ec01 1910
5ae6cd0d 1911 result = convert_memory_address (Pmode, result);
ce2d32cd 1912
28f4ec01
BS
1913 apply_result_size ();
1914 result = gen_rtx_MEM (BLKmode, result);
1915
43c7dca8 1916 if (targetm.have_untyped_return ())
28f4ec01 1917 {
43c7dca8
RS
1918 rtx vector = result_vector (0, result);
1919 emit_jump_insn (targetm.gen_untyped_return (result, vector));
28f4ec01
BS
1920 emit_barrier ();
1921 return;
1922 }
28f4ec01
BS
1923
1924 /* Restore the return value and note that each value is used. */
1925 size = 0;
1926 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1927 if ((mode = apply_result_mode[regno]) != VOIDmode)
1928 {
1929 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1930 if (size % align != 0)
1931 size = CEIL (size, align) * align;
1932 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
f4ef873c 1933 emit_move_insn (reg, adjust_address (result, mode, size));
28f4ec01
BS
1934
1935 push_to_sequence (call_fusage);
c41c1387 1936 emit_use (reg);
28f4ec01
BS
1937 call_fusage = get_insns ();
1938 end_sequence ();
1939 size += GET_MODE_SIZE (mode);
1940 }
1941
1942 /* Put the USE insns before the return. */
2f937369 1943 emit_insn (call_fusage);
28f4ec01
BS
1944
1945 /* Return whatever values was restored by jumping directly to the end
1946 of the function. */
6e3077c6 1947 expand_naked_return ();
28f4ec01
BS
1948}
1949
ad82abb8 1950/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
5197bd50 1951
ad82abb8 1952static enum type_class
4682ae04 1953type_to_class (tree type)
ad82abb8
ZW
1954{
1955 switch (TREE_CODE (type))
1956 {
1957 case VOID_TYPE: return void_type_class;
1958 case INTEGER_TYPE: return integer_type_class;
ad82abb8
ZW
1959 case ENUMERAL_TYPE: return enumeral_type_class;
1960 case BOOLEAN_TYPE: return boolean_type_class;
1961 case POINTER_TYPE: return pointer_type_class;
1962 case REFERENCE_TYPE: return reference_type_class;
1963 case OFFSET_TYPE: return offset_type_class;
1964 case REAL_TYPE: return real_type_class;
1965 case COMPLEX_TYPE: return complex_type_class;
1966 case FUNCTION_TYPE: return function_type_class;
1967 case METHOD_TYPE: return method_type_class;
1968 case RECORD_TYPE: return record_type_class;
1969 case UNION_TYPE:
1970 case QUAL_UNION_TYPE: return union_type_class;
1971 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1972 ? string_type_class : array_type_class);
ad82abb8
ZW
1973 case LANG_TYPE: return lang_type_class;
1974 default: return no_type_class;
1975 }
1976}
8d51ecf8 1977
5039610b 1978/* Expand a call EXP to __builtin_classify_type. */
5197bd50 1979
28f4ec01 1980static rtx
5039610b 1981expand_builtin_classify_type (tree exp)
28f4ec01 1982{
5039610b
SL
1983 if (call_expr_nargs (exp))
1984 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
28f4ec01
BS
1985 return GEN_INT (no_type_class);
1986}
1987
ee5fd23a
MM
1988/* This helper macro, meant to be used in mathfn_built_in below, determines
1989 which among a set of builtin math functions is appropriate for a given type
1990 mode. The `F' (float) and `L' (long double) are automatically generated
1991 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1992 types, there are additional types that are considered with 'F32', 'F64',
1993 'F128', etc. suffixes. */
b03ff92e
RS
1994#define CASE_MATHFN(MATHFN) \
1995 CASE_CFN_##MATHFN: \
1996 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1997 fcodel = BUILT_IN_##MATHFN##L ; break;
ee5fd23a
MM
1998/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1999 types. */
2000#define CASE_MATHFN_FLOATN(MATHFN) \
2001 CASE_CFN_##MATHFN: \
2002 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2003 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2004 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2005 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2006 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2007 break;
bf460eec 2008/* Similar to above, but appends _R after any F/L suffix. */
b03ff92e
RS
2009#define CASE_MATHFN_REENT(MATHFN) \
2010 case CFN_BUILT_IN_##MATHFN##_R: \
2011 case CFN_BUILT_IN_##MATHFN##F_R: \
2012 case CFN_BUILT_IN_##MATHFN##L_R: \
2013 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2014 fcodel = BUILT_IN_##MATHFN##L_R ; break;
daa027cc 2015
5c1a2e63
RS
2016/* Return a function equivalent to FN but operating on floating-point
2017 values of type TYPE, or END_BUILTINS if no such function exists.
b03ff92e
RS
2018 This is purely an operation on function codes; it does not guarantee
2019 that the target actually has an implementation of the function. */
05f41289 2020
5c1a2e63 2021static built_in_function
b03ff92e 2022mathfn_built_in_2 (tree type, combined_fn fn)
272f51a3 2023{
ee5fd23a 2024 tree mtype;
5c1a2e63 2025 built_in_function fcode, fcodef, fcodel;
ee5fd23a
MM
2026 built_in_function fcodef16 = END_BUILTINS;
2027 built_in_function fcodef32 = END_BUILTINS;
2028 built_in_function fcodef64 = END_BUILTINS;
2029 built_in_function fcodef128 = END_BUILTINS;
2030 built_in_function fcodef32x = END_BUILTINS;
2031 built_in_function fcodef64x = END_BUILTINS;
2032 built_in_function fcodef128x = END_BUILTINS;
daa027cc
KG
2033
2034 switch (fn)
2035 {
b03ff92e
RS
2036 CASE_MATHFN (ACOS)
2037 CASE_MATHFN (ACOSH)
2038 CASE_MATHFN (ASIN)
2039 CASE_MATHFN (ASINH)
2040 CASE_MATHFN (ATAN)
2041 CASE_MATHFN (ATAN2)
2042 CASE_MATHFN (ATANH)
2043 CASE_MATHFN (CBRT)
c6cfa2bf 2044 CASE_MATHFN_FLOATN (CEIL)
b03ff92e 2045 CASE_MATHFN (CEXPI)
ee5fd23a 2046 CASE_MATHFN_FLOATN (COPYSIGN)
b03ff92e
RS
2047 CASE_MATHFN (COS)
2048 CASE_MATHFN (COSH)
2049 CASE_MATHFN (DREM)
2050 CASE_MATHFN (ERF)
2051 CASE_MATHFN (ERFC)
2052 CASE_MATHFN (EXP)
2053 CASE_MATHFN (EXP10)
2054 CASE_MATHFN (EXP2)
2055 CASE_MATHFN (EXPM1)
2056 CASE_MATHFN (FABS)
2057 CASE_MATHFN (FDIM)
c6cfa2bf 2058 CASE_MATHFN_FLOATN (FLOOR)
ee5fd23a
MM
2059 CASE_MATHFN_FLOATN (FMA)
2060 CASE_MATHFN_FLOATN (FMAX)
2061 CASE_MATHFN_FLOATN (FMIN)
b03ff92e
RS
2062 CASE_MATHFN (FMOD)
2063 CASE_MATHFN (FREXP)
2064 CASE_MATHFN (GAMMA)
2065 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2066 CASE_MATHFN (HUGE_VAL)
2067 CASE_MATHFN (HYPOT)
2068 CASE_MATHFN (ILOGB)
2069 CASE_MATHFN (ICEIL)
2070 CASE_MATHFN (IFLOOR)
2071 CASE_MATHFN (INF)
2072 CASE_MATHFN (IRINT)
2073 CASE_MATHFN (IROUND)
2074 CASE_MATHFN (ISINF)
2075 CASE_MATHFN (J0)
2076 CASE_MATHFN (J1)
2077 CASE_MATHFN (JN)
2078 CASE_MATHFN (LCEIL)
2079 CASE_MATHFN (LDEXP)
2080 CASE_MATHFN (LFLOOR)
2081 CASE_MATHFN (LGAMMA)
2082 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2083 CASE_MATHFN (LLCEIL)
2084 CASE_MATHFN (LLFLOOR)
2085 CASE_MATHFN (LLRINT)
2086 CASE_MATHFN (LLROUND)
2087 CASE_MATHFN (LOG)
2088 CASE_MATHFN (LOG10)
2089 CASE_MATHFN (LOG1P)
2090 CASE_MATHFN (LOG2)
2091 CASE_MATHFN (LOGB)
2092 CASE_MATHFN (LRINT)
2093 CASE_MATHFN (LROUND)
2094 CASE_MATHFN (MODF)
2095 CASE_MATHFN (NAN)
2096 CASE_MATHFN (NANS)
c6cfa2bf 2097 CASE_MATHFN_FLOATN (NEARBYINT)
b03ff92e
RS
2098 CASE_MATHFN (NEXTAFTER)
2099 CASE_MATHFN (NEXTTOWARD)
2100 CASE_MATHFN (POW)
2101 CASE_MATHFN (POWI)
2102 CASE_MATHFN (POW10)
2103 CASE_MATHFN (REMAINDER)
2104 CASE_MATHFN (REMQUO)
c6cfa2bf
MM
2105 CASE_MATHFN_FLOATN (RINT)
2106 CASE_MATHFN_FLOATN (ROUND)
7d7b99f9 2107 CASE_MATHFN_FLOATN (ROUNDEVEN)
b03ff92e
RS
2108 CASE_MATHFN (SCALB)
2109 CASE_MATHFN (SCALBLN)
2110 CASE_MATHFN (SCALBN)
2111 CASE_MATHFN (SIGNBIT)
2112 CASE_MATHFN (SIGNIFICAND)
2113 CASE_MATHFN (SIN)
2114 CASE_MATHFN (SINCOS)
2115 CASE_MATHFN (SINH)
ee5fd23a 2116 CASE_MATHFN_FLOATN (SQRT)
b03ff92e
RS
2117 CASE_MATHFN (TAN)
2118 CASE_MATHFN (TANH)
2119 CASE_MATHFN (TGAMMA)
c6cfa2bf 2120 CASE_MATHFN_FLOATN (TRUNC)
b03ff92e
RS
2121 CASE_MATHFN (Y0)
2122 CASE_MATHFN (Y1)
2123 CASE_MATHFN (YN)
daa027cc 2124
b03ff92e
RS
2125 default:
2126 return END_BUILTINS;
2127 }
daa027cc 2128
ee5fd23a
MM
2129 mtype = TYPE_MAIN_VARIANT (type);
2130 if (mtype == double_type_node)
5c1a2e63 2131 return fcode;
ee5fd23a 2132 else if (mtype == float_type_node)
5c1a2e63 2133 return fcodef;
ee5fd23a 2134 else if (mtype == long_double_type_node)
5c1a2e63 2135 return fcodel;
ee5fd23a
MM
2136 else if (mtype == float16_type_node)
2137 return fcodef16;
2138 else if (mtype == float32_type_node)
2139 return fcodef32;
2140 else if (mtype == float64_type_node)
2141 return fcodef64;
2142 else if (mtype == float128_type_node)
2143 return fcodef128;
2144 else if (mtype == float32x_type_node)
2145 return fcodef32x;
2146 else if (mtype == float64x_type_node)
2147 return fcodef64x;
2148 else if (mtype == float128x_type_node)
2149 return fcodef128x;
daa027cc 2150 else
5c1a2e63
RS
2151 return END_BUILTINS;
2152}
2153
2154/* Return mathematic function equivalent to FN but operating directly on TYPE,
2155 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2156 otherwise use the explicit declaration. If we can't do the conversion,
2157 return null. */
2158
2159static tree
b03ff92e 2160mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
5c1a2e63
RS
2161{
2162 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2163 if (fcode2 == END_BUILTINS)
5039610b 2164 return NULL_TREE;
e79983f4
MM
2165
2166 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2167 return NULL_TREE;
2168
2169 return builtin_decl_explicit (fcode2);
272f51a3
JH
2170}
2171
b03ff92e 2172/* Like mathfn_built_in_1, but always use the implicit array. */
05f41289
KG
2173
2174tree
b03ff92e 2175mathfn_built_in (tree type, combined_fn fn)
05f41289
KG
2176{
2177 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2178}
2179
b03ff92e
RS
2180/* Like mathfn_built_in_1, but take a built_in_function and
2181 always use the implicit array. */
2182
2183tree
2184mathfn_built_in (tree type, enum built_in_function fn)
2185{
2186 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2187}
2188
686ee971
RS
2189/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2190 return its code, otherwise return IFN_LAST. Note that this function
2191 only tests whether the function is defined in internals.def, not whether
2192 it is actually available on the target. */
2193
2194internal_fn
2195associated_internal_fn (tree fndecl)
2196{
2197 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2198 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2199 switch (DECL_FUNCTION_CODE (fndecl))
2200 {
2201#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2202 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
ee5fd23a
MM
2203#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2204 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2205 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
4959a752
RS
2206#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2207 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
686ee971
RS
2208#include "internal-fn.def"
2209
2210 CASE_FLT_FN (BUILT_IN_POW10):
2211 return IFN_EXP10;
2212
2213 CASE_FLT_FN (BUILT_IN_DREM):
2214 return IFN_REMAINDER;
2215
2216 CASE_FLT_FN (BUILT_IN_SCALBN):
2217 CASE_FLT_FN (BUILT_IN_SCALBLN):
2218 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2219 return IFN_LDEXP;
2220 return IFN_LAST;
2221
2222 default:
2223 return IFN_LAST;
2224 }
2225}
2226
2227/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2228 on the current target by a call to an internal function, return the
2229 code of that internal function, otherwise return IFN_LAST. The caller
2230 is responsible for ensuring that any side-effects of the built-in
2231 call are dealt with correctly. E.g. if CALL sets errno, the caller
2232 must decide that the errno result isn't needed or make it available
2233 in some other way. */
2234
2235internal_fn
2236replacement_internal_fn (gcall *call)
2237{
2238 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2239 {
2240 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2241 if (ifn != IFN_LAST)
2242 {
2243 tree_pair types = direct_internal_fn_types (ifn, call);
d95ab70a
RS
2244 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2245 if (direct_internal_fn_supported_p (ifn, types, opt_type))
686ee971
RS
2246 return ifn;
2247 }
2248 }
2249 return IFN_LAST;
2250}
2251
1b1562a5
MM
2252/* Expand a call to the builtin trinary math functions (fma).
2253 Return NULL_RTX if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function; if convenient, the result should be placed in TARGET.
2256 SUBTARGET may be used as the target for computing one of EXP's
2257 operands. */
2258
2259static rtx
2260expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2261{
2262 optab builtin_optab;
58f4cf2a
DM
2263 rtx op0, op1, op2, result;
2264 rtx_insn *insns;
1b1562a5
MM
2265 tree fndecl = get_callee_fndecl (exp);
2266 tree arg0, arg1, arg2;
ef4bddc2 2267 machine_mode mode;
1b1562a5
MM
2268
2269 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2270 return NULL_RTX;
2271
2272 arg0 = CALL_EXPR_ARG (exp, 0);
2273 arg1 = CALL_EXPR_ARG (exp, 1);
2274 arg2 = CALL_EXPR_ARG (exp, 2);
2275
2276 switch (DECL_FUNCTION_CODE (fndecl))
2277 {
2278 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 2279 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
2280 builtin_optab = fma_optab; break;
2281 default:
2282 gcc_unreachable ();
2283 }
2284
2285 /* Make a suitable register to place result in. */
2286 mode = TYPE_MODE (TREE_TYPE (exp));
2287
2288 /* Before working hard, check whether the instruction is available. */
2289 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2290 return NULL_RTX;
2291
04b80dbb 2292 result = gen_reg_rtx (mode);
1b1562a5
MM
2293
2294 /* Always stabilize the argument list. */
2295 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2296 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2297 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2298
2299 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2300 op1 = expand_normal (arg1);
2301 op2 = expand_normal (arg2);
2302
2303 start_sequence ();
2304
04b80dbb
RS
2305 /* Compute into RESULT.
2306 Set RESULT to wherever the result comes back. */
2307 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2308 result, 0);
1b1562a5
MM
2309
2310 /* If we were unable to expand via the builtin, stop the sequence
2311 (without outputting the insns) and call to the library function
2312 with the stabilized argument list. */
04b80dbb 2313 if (result == 0)
1b1562a5
MM
2314 {
2315 end_sequence ();
2316 return expand_call (exp, target, target == const0_rtx);
2317 }
2318
2319 /* Output the entire sequence. */
2320 insns = get_insns ();
2321 end_sequence ();
2322 emit_insn (insns);
2323
04b80dbb 2324 return result;
1b1562a5
MM
2325}
2326
6c7cf1f0 2327/* Expand a call to the builtin sin and cos math functions.
5039610b 2328 Return NULL_RTX if a normal call should be emitted rather than expanding the
6c7cf1f0
UB
2329 function in-line. EXP is the expression that is a call to the builtin
2330 function; if convenient, the result should be placed in TARGET.
2331 SUBTARGET may be used as the target for computing one of EXP's
2332 operands. */
2333
2334static rtx
2335expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2336{
2337 optab builtin_optab;
58f4cf2a
DM
2338 rtx op0;
2339 rtx_insn *insns;
6c7cf1f0 2340 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2341 machine_mode mode;
5799f732 2342 tree arg;
6c7cf1f0 2343
5039610b
SL
2344 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2345 return NULL_RTX;
6c7cf1f0 2346
5039610b 2347 arg = CALL_EXPR_ARG (exp, 0);
6c7cf1f0
UB
2348
2349 switch (DECL_FUNCTION_CODE (fndecl))
2350 {
ea6a6627
VR
2351 CASE_FLT_FN (BUILT_IN_SIN):
2352 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2353 builtin_optab = sincos_optab; break;
2354 default:
298e6adc 2355 gcc_unreachable ();
6c7cf1f0
UB
2356 }
2357
2358 /* Make a suitable register to place result in. */
2359 mode = TYPE_MODE (TREE_TYPE (exp));
2360
6c7cf1f0 2361 /* Check if sincos insn is available, otherwise fallback
9cf737f8 2362 to sin or cos insn. */
947131ba 2363 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6c7cf1f0
UB
2364 switch (DECL_FUNCTION_CODE (fndecl))
2365 {
ea6a6627 2366 CASE_FLT_FN (BUILT_IN_SIN):
6c7cf1f0 2367 builtin_optab = sin_optab; break;
ea6a6627 2368 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2369 builtin_optab = cos_optab; break;
2370 default:
298e6adc 2371 gcc_unreachable ();
6c7cf1f0 2372 }
6c7cf1f0
UB
2373
2374 /* Before working hard, check whether the instruction is available. */
947131ba 2375 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6c7cf1f0 2376 {
04b80dbb 2377 rtx result = gen_reg_rtx (mode);
6c7cf1f0
UB
2378
2379 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2380 need to expand the argument again. This way, we will not perform
2381 side-effects more the once. */
5799f732 2382 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6c7cf1f0 2383
49452c07 2384 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6c7cf1f0 2385
6c7cf1f0
UB
2386 start_sequence ();
2387
04b80dbb
RS
2388 /* Compute into RESULT.
2389 Set RESULT to wherever the result comes back. */
6c7cf1f0
UB
2390 if (builtin_optab == sincos_optab)
2391 {
04b80dbb 2392 int ok;
5906d013 2393
6c7cf1f0
UB
2394 switch (DECL_FUNCTION_CODE (fndecl))
2395 {
ea6a6627 2396 CASE_FLT_FN (BUILT_IN_SIN):
04b80dbb 2397 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6c7cf1f0 2398 break;
ea6a6627 2399 CASE_FLT_FN (BUILT_IN_COS):
04b80dbb 2400 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6c7cf1f0
UB
2401 break;
2402 default:
298e6adc 2403 gcc_unreachable ();
6c7cf1f0 2404 }
04b80dbb 2405 gcc_assert (ok);
6c7cf1f0
UB
2406 }
2407 else
04b80dbb 2408 result = expand_unop (mode, builtin_optab, op0, result, 0);
6c7cf1f0 2409
04b80dbb 2410 if (result != 0)
6c7cf1f0 2411 {
6c7cf1f0
UB
2412 /* Output the entire sequence. */
2413 insns = get_insns ();
2414 end_sequence ();
2415 emit_insn (insns);
04b80dbb 2416 return result;
6c7cf1f0
UB
2417 }
2418
2419 /* If we were unable to expand via the builtin, stop the sequence
2420 (without outputting the insns) and call to the library function
2421 with the stabilized argument list. */
2422 end_sequence ();
2423 }
2424
04b80dbb 2425 return expand_call (exp, target, target == const0_rtx);
6c7cf1f0
UB
2426}
2427
44e10129
MM
2428/* Given an interclass math builtin decl FNDECL and it's argument ARG
2429 return an RTL instruction code that implements the functionality.
2430 If that isn't possible or available return CODE_FOR_nothing. */
eaee4464 2431
44e10129
MM
2432static enum insn_code
2433interclass_mathfn_icode (tree arg, tree fndecl)
eaee4464 2434{
44e10129 2435 bool errno_set = false;
2225b9f2 2436 optab builtin_optab = unknown_optab;
ef4bddc2 2437 machine_mode mode;
eaee4464
UB
2438
2439 switch (DECL_FUNCTION_CODE (fndecl))
2440 {
2441 CASE_FLT_FN (BUILT_IN_ILOGB):
903c723b
TC
2442 errno_set = true; builtin_optab = ilogb_optab; break;
2443 CASE_FLT_FN (BUILT_IN_ISINF):
2444 builtin_optab = isinf_optab; break;
2445 case BUILT_IN_ISNORMAL:
2446 case BUILT_IN_ISFINITE:
2447 CASE_FLT_FN (BUILT_IN_FINITE):
2448 case BUILT_IN_FINITED32:
2449 case BUILT_IN_FINITED64:
2450 case BUILT_IN_FINITED128:
2451 case BUILT_IN_ISINFD32:
2452 case BUILT_IN_ISINFD64:
2453 case BUILT_IN_ISINFD128:
2454 /* These builtins have no optabs (yet). */
0c8d3c2b 2455 break;
eaee4464
UB
2456 default:
2457 gcc_unreachable ();
2458 }
2459
2460 /* There's no easy way to detect the case we need to set EDOM. */
2461 if (flag_errno_math && errno_set)
44e10129 2462 return CODE_FOR_nothing;
eaee4464
UB
2463
2464 /* Optab mode depends on the mode of the input argument. */
2465 mode = TYPE_MODE (TREE_TYPE (arg));
2466
0c8d3c2b 2467 if (builtin_optab)
947131ba 2468 return optab_handler (builtin_optab, mode);
44e10129
MM
2469 return CODE_FOR_nothing;
2470}
2471
2472/* Expand a call to one of the builtin math functions that operate on
903c723b
TC
2473 floating point argument and output an integer result (ilogb, isinf,
2474 isnan, etc).
44e10129
MM
2475 Return 0 if a normal call should be emitted rather than expanding the
2476 function in-line. EXP is the expression that is a call to the builtin
4359dc2a 2477 function; if convenient, the result should be placed in TARGET. */
44e10129
MM
2478
2479static rtx
4359dc2a 2480expand_builtin_interclass_mathfn (tree exp, rtx target)
44e10129
MM
2481{
2482 enum insn_code icode = CODE_FOR_nothing;
2483 rtx op0;
2484 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2485 machine_mode mode;
44e10129
MM
2486 tree arg;
2487
2488 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2489 return NULL_RTX;
2490
2491 arg = CALL_EXPR_ARG (exp, 0);
2492 icode = interclass_mathfn_icode (arg, fndecl);
2493 mode = TYPE_MODE (TREE_TYPE (arg));
2494
eaee4464
UB
2495 if (icode != CODE_FOR_nothing)
2496 {
99b1c316 2497 class expand_operand ops[1];
58f4cf2a 2498 rtx_insn *last = get_last_insn ();
8a0b1aa4 2499 tree orig_arg = arg;
eaee4464
UB
2500
2501 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2502 need to expand the argument again. This way, we will not perform
2503 side-effects more the once. */
5799f732 2504 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
eaee4464 2505
4359dc2a 2506 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
eaee4464
UB
2507
2508 if (mode != GET_MODE (op0))
2509 op0 = convert_to_mode (mode, op0, 0);
2510
a5c7d693
RS
2511 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2512 if (maybe_legitimize_operands (icode, 0, 1, ops)
2513 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2514 return ops[0].value;
2515
8a0b1aa4
MM
2516 delete_insns_since (last);
2517 CALL_EXPR_ARG (exp, 0) = orig_arg;
eaee4464
UB
2518 }
2519
44e10129 2520 return NULL_RTX;
eaee4464
UB
2521}
2522
403e54f0 2523/* Expand a call to the builtin sincos math function.
5039610b 2524 Return NULL_RTX if a normal call should be emitted rather than expanding the
403e54f0
RG
2525 function in-line. EXP is the expression that is a call to the builtin
2526 function. */
2527
2528static rtx
2529expand_builtin_sincos (tree exp)
2530{
2531 rtx op0, op1, op2, target1, target2;
ef4bddc2 2532 machine_mode mode;
403e54f0
RG
2533 tree arg, sinp, cosp;
2534 int result;
db3927fb 2535 location_t loc = EXPR_LOCATION (exp);
ca818bd9 2536 tree alias_type, alias_off;
403e54f0 2537
5039610b
SL
2538 if (!validate_arglist (exp, REAL_TYPE,
2539 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2540 return NULL_RTX;
403e54f0 2541
5039610b
SL
2542 arg = CALL_EXPR_ARG (exp, 0);
2543 sinp = CALL_EXPR_ARG (exp, 1);
2544 cosp = CALL_EXPR_ARG (exp, 2);
403e54f0
RG
2545
2546 /* Make a suitable register to place result in. */
2547 mode = TYPE_MODE (TREE_TYPE (arg));
2548
2549 /* Check if sincos insn is available, otherwise emit the call. */
947131ba 2550 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
403e54f0
RG
2551 return NULL_RTX;
2552
2553 target1 = gen_reg_rtx (mode);
2554 target2 = gen_reg_rtx (mode);
2555
84217346 2556 op0 = expand_normal (arg);
ca818bd9
RG
2557 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2558 alias_off = build_int_cst (alias_type, 0);
2559 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2560 sinp, alias_off));
2561 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2562 cosp, alias_off));
403e54f0
RG
2563
2564 /* Compute into target1 and target2.
2565 Set TARGET to wherever the result comes back. */
2566 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2567 gcc_assert (result);
2568
2569 /* Move target1 and target2 to the memory locations indicated
2570 by op1 and op2. */
2571 emit_move_insn (op1, target1);
2572 emit_move_insn (op2, target2);
2573
2574 return const0_rtx;
2575}
2576
75c7c595
RG
2577/* Expand a call to the internal cexpi builtin to the sincos math function.
2578 EXP is the expression that is a call to the builtin function; if convenient,
4359dc2a 2579 the result should be placed in TARGET. */
75c7c595
RG
2580
2581static rtx
4359dc2a 2582expand_builtin_cexpi (tree exp, rtx target)
75c7c595
RG
2583{
2584 tree fndecl = get_callee_fndecl (exp);
75c7c595 2585 tree arg, type;
ef4bddc2 2586 machine_mode mode;
75c7c595 2587 rtx op0, op1, op2;
db3927fb 2588 location_t loc = EXPR_LOCATION (exp);
75c7c595 2589
5039610b
SL
2590 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2591 return NULL_RTX;
75c7c595 2592
5039610b 2593 arg = CALL_EXPR_ARG (exp, 0);
75c7c595
RG
2594 type = TREE_TYPE (arg);
2595 mode = TYPE_MODE (TREE_TYPE (arg));
2596
2597 /* Try expanding via a sincos optab, fall back to emitting a libcall
b54c5497
RG
2598 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2599 is only generated from sincos, cexp or if we have either of them. */
947131ba 2600 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
75c7c595
RG
2601 {
2602 op1 = gen_reg_rtx (mode);
2603 op2 = gen_reg_rtx (mode);
2604
4359dc2a 2605 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2606
2607 /* Compute into op1 and op2. */
2608 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2609 }
d33d9e47 2610 else if (targetm.libc_has_function (function_sincos))
75c7c595 2611 {
5039610b 2612 tree call, fn = NULL_TREE;
75c7c595
RG
2613 tree top1, top2;
2614 rtx op1a, op2a;
2615
2616 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2617 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
75c7c595 2618 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2619 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
75c7c595 2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2621 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
5039610b
SL
2622 else
2623 gcc_unreachable ();
b8698a0f 2624
9474e8ab
MM
2625 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2626 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
18ae1560
UB
2627 op1a = copy_addr_to_reg (XEXP (op1, 0));
2628 op2a = copy_addr_to_reg (XEXP (op2, 0));
75c7c595
RG
2629 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2630 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2631
75c7c595
RG
2632 /* Make sure not to fold the sincos call again. */
2633 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5039610b
SL
2634 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2635 call, 3, arg, top1, top2));
75c7c595 2636 }
b54c5497
RG
2637 else
2638 {
9d972b2d 2639 tree call, fn = NULL_TREE, narg;
b54c5497
RG
2640 tree ctype = build_complex_type (type);
2641
9d972b2d 2642 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2643 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
9d972b2d 2644 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2645 fn = builtin_decl_explicit (BUILT_IN_CEXP);
9d972b2d 2646 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2647 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
5039610b
SL
2648 else
2649 gcc_unreachable ();
34a24c11
RG
2650
2651 /* If we don't have a decl for cexp create one. This is the
2652 friendliest fallback if the user calls __builtin_cexpi
2653 without full target C99 function support. */
2654 if (fn == NULL_TREE)
2655 {
2656 tree fntype;
2657 const char *name = NULL;
2658
2659 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2660 name = "cexpf";
2661 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2662 name = "cexp";
2663 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2664 name = "cexpl";
2665
2666 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2667 fn = build_fn_decl (name, fntype);
2668 }
2669
db3927fb 2670 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
b54c5497
RG
2671 build_real (type, dconst0), arg);
2672
2673 /* Make sure not to fold the cexp call again. */
2674 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
b8698a0f 2675 return expand_expr (build_call_nary (ctype, call, 1, narg),
49452c07 2676 target, VOIDmode, EXPAND_NORMAL);
b54c5497 2677 }
75c7c595
RG
2678
2679 /* Now build the proper return type. */
2680 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2681 make_tree (TREE_TYPE (arg), op2),
2682 make_tree (TREE_TYPE (arg), op1)),
49452c07 2683 target, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2684}
2685
44e10129
MM
2686/* Conveniently construct a function call expression. FNDECL names the
2687 function to be called, N is the number of arguments, and the "..."
2688 parameters are the argument expressions. Unlike build_call_exr
2689 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2690
2691static tree
2692build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2693{
2694 va_list ap;
2695 tree fntype = TREE_TYPE (fndecl);
2696 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2697
2698 va_start (ap, n);
2699 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2700 va_end (ap);
2701 SET_EXPR_LOCATION (fn, loc);
2702 return fn;
2703}
44e10129 2704
0bfa1541
RG
2705/* Expand a call to one of the builtin rounding functions gcc defines
2706 as an extension (lfloor and lceil). As these are gcc extensions we
2707 do not need to worry about setting errno to EDOM.
d8b42d06
UB
2708 If expanding via optab fails, lower expression to (int)(floor(x)).
2709 EXP is the expression that is a call to the builtin function;
1856c8dc 2710 if convenient, the result should be placed in TARGET. */
d8b42d06
UB
2711
2712static rtx
1856c8dc 2713expand_builtin_int_roundingfn (tree exp, rtx target)
d8b42d06 2714{
c3a4177f 2715 convert_optab builtin_optab;
58f4cf2a
DM
2716 rtx op0, tmp;
2717 rtx_insn *insns;
d8b42d06 2718 tree fndecl = get_callee_fndecl (exp);
d8b42d06
UB
2719 enum built_in_function fallback_fn;
2720 tree fallback_fndecl;
ef4bddc2 2721 machine_mode mode;
968fc3b6 2722 tree arg;
d8b42d06 2723
5039610b 2724 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1c178a5a 2725 return NULL_RTX;
d8b42d06 2726
5039610b 2727 arg = CALL_EXPR_ARG (exp, 0);
d8b42d06
UB
2728
2729 switch (DECL_FUNCTION_CODE (fndecl))
2730 {
6c32ee74 2731 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
2732 CASE_FLT_FN (BUILT_IN_LCEIL):
2733 CASE_FLT_FN (BUILT_IN_LLCEIL):
f94b1661
UB
2734 builtin_optab = lceil_optab;
2735 fallback_fn = BUILT_IN_CEIL;
2736 break;
2737
6c32ee74 2738 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627
VR
2739 CASE_FLT_FN (BUILT_IN_LFLOOR):
2740 CASE_FLT_FN (BUILT_IN_LLFLOOR):
d8b42d06
UB
2741 builtin_optab = lfloor_optab;
2742 fallback_fn = BUILT_IN_FLOOR;
2743 break;
2744
2745 default:
2746 gcc_unreachable ();
2747 }
2748
2749 /* Make a suitable register to place result in. */
2750 mode = TYPE_MODE (TREE_TYPE (exp));
2751
c3a4177f 2752 target = gen_reg_rtx (mode);
d8b42d06 2753
c3a4177f
RG
2754 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2755 need to expand the argument again. This way, we will not perform
2756 side-effects more the once. */
5799f732 2757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
d8b42d06 2758
1856c8dc 2759 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
d8b42d06 2760
c3a4177f 2761 start_sequence ();
d8b42d06 2762
c3a4177f
RG
2763 /* Compute into TARGET. */
2764 if (expand_sfix_optab (target, op0, builtin_optab))
2765 {
2766 /* Output the entire sequence. */
2767 insns = get_insns ();
d8b42d06 2768 end_sequence ();
c3a4177f
RG
2769 emit_insn (insns);
2770 return target;
d8b42d06
UB
2771 }
2772
c3a4177f
RG
2773 /* If we were unable to expand via the builtin, stop the sequence
2774 (without outputting the insns). */
2775 end_sequence ();
2776
d8b42d06
UB
2777 /* Fall back to floating point rounding optab. */
2778 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
34a24c11
RG
2779
2780 /* For non-C99 targets we may end up without a fallback fndecl here
2781 if the user called __builtin_lfloor directly. In this case emit
2782 a call to the floor/ceil variants nevertheless. This should result
2783 in the best user experience for not full C99 targets. */
2784 if (fallback_fndecl == NULL_TREE)
2785 {
2786 tree fntype;
2787 const char *name = NULL;
2788
2789 switch (DECL_FUNCTION_CODE (fndecl))
2790 {
6c32ee74 2791 case BUILT_IN_ICEIL:
34a24c11
RG
2792 case BUILT_IN_LCEIL:
2793 case BUILT_IN_LLCEIL:
2794 name = "ceil";
2795 break;
6c32ee74 2796 case BUILT_IN_ICEILF:
34a24c11
RG
2797 case BUILT_IN_LCEILF:
2798 case BUILT_IN_LLCEILF:
2799 name = "ceilf";
2800 break;
6c32ee74 2801 case BUILT_IN_ICEILL:
34a24c11
RG
2802 case BUILT_IN_LCEILL:
2803 case BUILT_IN_LLCEILL:
2804 name = "ceill";
2805 break;
6c32ee74 2806 case BUILT_IN_IFLOOR:
34a24c11
RG
2807 case BUILT_IN_LFLOOR:
2808 case BUILT_IN_LLFLOOR:
2809 name = "floor";
2810 break;
6c32ee74 2811 case BUILT_IN_IFLOORF:
34a24c11
RG
2812 case BUILT_IN_LFLOORF:
2813 case BUILT_IN_LLFLOORF:
2814 name = "floorf";
2815 break;
6c32ee74 2816 case BUILT_IN_IFLOORL:
34a24c11
RG
2817 case BUILT_IN_LFLOORL:
2818 case BUILT_IN_LLFLOORL:
2819 name = "floorl";
2820 break;
2821 default:
2822 gcc_unreachable ();
2823 }
2824
2825 fntype = build_function_type_list (TREE_TYPE (arg),
2826 TREE_TYPE (arg), NULL_TREE);
2827 fallback_fndecl = build_fn_decl (name, fntype);
2828 }
2829
aa493694 2830 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
d8b42d06 2831
39b1ec97 2832 tmp = expand_normal (exp);
9a002da8 2833 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
d8b42d06
UB
2834
2835 /* Truncate the result of floating point optab to integer
2836 via expand_fix (). */
2837 target = gen_reg_rtx (mode);
2838 expand_fix (target, tmp, 0);
2839
2840 return target;
2841}
2842
0bfa1541
RG
2843/* Expand a call to one of the builtin math functions doing integer
2844 conversion (lrint).
2845 Return 0 if a normal call should be emitted rather than expanding the
2846 function in-line. EXP is the expression that is a call to the builtin
1856c8dc 2847 function; if convenient, the result should be placed in TARGET. */
0bfa1541
RG
2848
2849static rtx
1856c8dc 2850expand_builtin_int_roundingfn_2 (tree exp, rtx target)
0bfa1541 2851{
bb7f0423 2852 convert_optab builtin_optab;
58f4cf2a
DM
2853 rtx op0;
2854 rtx_insn *insns;
0bfa1541 2855 tree fndecl = get_callee_fndecl (exp);
968fc3b6 2856 tree arg;
ef4bddc2 2857 machine_mode mode;
ff63ac4d 2858 enum built_in_function fallback_fn = BUILT_IN_NONE;
0bfa1541 2859
5039610b 2860 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1c178a5a 2861 return NULL_RTX;
b8698a0f 2862
5039610b 2863 arg = CALL_EXPR_ARG (exp, 0);
0bfa1541
RG
2864
2865 switch (DECL_FUNCTION_CODE (fndecl))
2866 {
6c32ee74 2867 CASE_FLT_FN (BUILT_IN_IRINT):
ff63ac4d 2868 fallback_fn = BUILT_IN_LRINT;
81fea426 2869 gcc_fallthrough ();
0bfa1541
RG
2870 CASE_FLT_FN (BUILT_IN_LRINT):
2871 CASE_FLT_FN (BUILT_IN_LLRINT):
ff63ac4d
JJ
2872 builtin_optab = lrint_optab;
2873 break;
6c32ee74
UB
2874
2875 CASE_FLT_FN (BUILT_IN_IROUND):
ff63ac4d 2876 fallback_fn = BUILT_IN_LROUND;
81fea426 2877 gcc_fallthrough ();
4d81bf84
RG
2878 CASE_FLT_FN (BUILT_IN_LROUND):
2879 CASE_FLT_FN (BUILT_IN_LLROUND):
ff63ac4d
JJ
2880 builtin_optab = lround_optab;
2881 break;
6c32ee74 2882
0bfa1541
RG
2883 default:
2884 gcc_unreachable ();
2885 }
2886
ff63ac4d
JJ
2887 /* There's no easy way to detect the case we need to set EDOM. */
2888 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2889 return NULL_RTX;
2890
0bfa1541
RG
2891 /* Make a suitable register to place result in. */
2892 mode = TYPE_MODE (TREE_TYPE (exp));
2893
ff63ac4d
JJ
2894 /* There's no easy way to detect the case we need to set EDOM. */
2895 if (!flag_errno_math)
2896 {
04b80dbb 2897 rtx result = gen_reg_rtx (mode);
0bfa1541 2898
ff63ac4d
JJ
2899 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2900 need to expand the argument again. This way, we will not perform
2901 side-effects more the once. */
2902 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
0bfa1541 2903
ff63ac4d 2904 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
0bfa1541 2905
ff63ac4d 2906 start_sequence ();
0bfa1541 2907
04b80dbb 2908 if (expand_sfix_optab (result, op0, builtin_optab))
ff63ac4d
JJ
2909 {
2910 /* Output the entire sequence. */
2911 insns = get_insns ();
2912 end_sequence ();
2913 emit_insn (insns);
04b80dbb 2914 return result;
ff63ac4d
JJ
2915 }
2916
2917 /* If we were unable to expand via the builtin, stop the sequence
2918 (without outputting the insns) and call to the library function
2919 with the stabilized argument list. */
0bfa1541
RG
2920 end_sequence ();
2921 }
2922
ff63ac4d
JJ
2923 if (fallback_fn != BUILT_IN_NONE)
2924 {
2925 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2926 targets, (int) round (x) should never be transformed into
2927 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2928 a call to lround in the hope that the target provides at least some
2929 C99 functions. This should result in the best user experience for
2930 not full C99 targets. */
b03ff92e
RS
2931 tree fallback_fndecl = mathfn_built_in_1
2932 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
ff63ac4d
JJ
2933
2934 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2935 fallback_fndecl, 1, arg);
2936
2937 target = expand_call (exp, NULL_RTX, target == const0_rtx);
9a002da8 2938 target = maybe_emit_group_store (target, TREE_TYPE (exp));
ff63ac4d
JJ
2939 return convert_to_mode (mode, target, 0);
2940 }
bb7f0423 2941
04b80dbb 2942 return expand_call (exp, target, target == const0_rtx);
0bfa1541
RG
2943}
2944
5039610b 2945/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
17684d46
RG
2946 a normal call should be emitted rather than expanding the function
2947 in-line. EXP is the expression that is a call to the builtin
2948 function; if convenient, the result should be placed in TARGET. */
2949
2950static rtx
4359dc2a 2951expand_builtin_powi (tree exp, rtx target)
17684d46 2952{
17684d46
RG
2953 tree arg0, arg1;
2954 rtx op0, op1;
ef4bddc2
RS
2955 machine_mode mode;
2956 machine_mode mode2;
17684d46 2957
5039610b
SL
2958 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2959 return NULL_RTX;
17684d46 2960
5039610b
SL
2961 arg0 = CALL_EXPR_ARG (exp, 0);
2962 arg1 = CALL_EXPR_ARG (exp, 1);
17684d46
RG
2963 mode = TYPE_MODE (TREE_TYPE (exp));
2964
17684d46
RG
2965 /* Emit a libcall to libgcc. */
2966
5039610b 2967 /* Mode of the 2nd argument must match that of an int. */
f4b31647 2968 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
0b8495ae 2969
17684d46
RG
2970 if (target == NULL_RTX)
2971 target = gen_reg_rtx (mode);
2972
4359dc2a 2973 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
17684d46
RG
2974 if (GET_MODE (op0) != mode)
2975 op0 = convert_to_mode (mode, op0, 0);
49452c07 2976 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
0b8495ae
FJ
2977 if (GET_MODE (op1) != mode2)
2978 op1 = convert_to_mode (mode2, op1, 0);
17684d46 2979
8a33f100 2980 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
db69559b 2981 target, LCT_CONST, mode,
0b8495ae 2982 op0, mode, op1, mode2);
17684d46
RG
2983
2984 return target;
2985}
2986
b8698a0f 2987/* Expand expression EXP which is a call to the strlen builtin. Return
781ff3d8 2988 NULL_RTX if we failed and the caller should emit a normal call, otherwise
0e9295cf 2989 try to get the result in TARGET, if convenient. */
3bdf5ad1 2990
28f4ec01 2991static rtx
5039610b 2992expand_builtin_strlen (tree exp, rtx target,
ef4bddc2 2993 machine_mode target_mode)
28f4ec01 2994{
5039610b
SL
2995 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2996 return NULL_RTX;
712b7a05 2997
99b1c316 2998 class expand_operand ops[4];
16155777
MS
2999 rtx pat;
3000 tree len;
3001 tree src = CALL_EXPR_ARG (exp, 0);
3002 rtx src_reg;
3003 rtx_insn *before_strlen;
3004 machine_mode insn_mode;
3005 enum insn_code icode = CODE_FOR_nothing;
3006 unsigned int align;
ae808627 3007
16155777
MS
3008 /* If the length can be computed at compile-time, return it. */
3009 len = c_strlen (src, 0);
3010 if (len)
3011 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3012
3013 /* If the length can be computed at compile-time and is constant
3014 integer, but there are side-effects in src, evaluate
3015 src for side-effects, then return len.
3016 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3017 can be optimized into: i++; x = 3; */
3018 len = c_strlen (src, 1);
3019 if (len && TREE_CODE (len) == INTEGER_CST)
3020 {
3021 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3022 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3023 }
28f4ec01 3024
16155777 3025 align = get_pointer_alignment (src) / BITS_PER_UNIT;
28f4ec01 3026
16155777
MS
3027 /* If SRC is not a pointer type, don't do this operation inline. */
3028 if (align == 0)
3029 return NULL_RTX;
3030
3031 /* Bail out if we can't compute strlen in the right mode. */
3032 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3033 {
3034 icode = optab_handler (strlen_optab, insn_mode);
3035 if (icode != CODE_FOR_nothing)
3036 break;
3037 }
3038 if (insn_mode == VOIDmode)
3039 return NULL_RTX;
28f4ec01 3040
16155777
MS
3041 /* Make a place to hold the source address. We will not expand
3042 the actual source until we are sure that the expansion will
3043 not fail -- there are trees that cannot be expanded twice. */
3044 src_reg = gen_reg_rtx (Pmode);
28f4ec01 3045
16155777
MS
3046 /* Mark the beginning of the strlen sequence so we can emit the
3047 source operand later. */
3048 before_strlen = get_last_insn ();
28f4ec01 3049
16155777
MS
3050 create_output_operand (&ops[0], target, insn_mode);
3051 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3052 create_integer_operand (&ops[2], 0);
3053 create_integer_operand (&ops[3], align);
3054 if (!maybe_expand_insn (icode, 4, ops))
3055 return NULL_RTX;
dd05e4fa 3056
16155777
MS
3057 /* Check to see if the argument was declared attribute nonstring
3058 and if so, issue a warning since at this point it's not known
3059 to be nul-terminated. */
3060 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6a33d0ff 3061
16155777
MS
3062 /* Now that we are assured of success, expand the source. */
3063 start_sequence ();
3064 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3065 if (pat != src_reg)
3066 {
fa465762 3067#ifdef POINTERS_EXTEND_UNSIGNED
16155777
MS
3068 if (GET_MODE (pat) != Pmode)
3069 pat = convert_to_mode (Pmode, pat,
3070 POINTERS_EXTEND_UNSIGNED);
fa465762 3071#endif
16155777
MS
3072 emit_move_insn (src_reg, pat);
3073 }
3074 pat = get_insns ();
3075 end_sequence ();
fca9f642 3076
16155777
MS
3077 if (before_strlen)
3078 emit_insn_after (pat, before_strlen);
3079 else
3080 emit_insn_before (pat, get_insns ());
28f4ec01 3081
16155777
MS
3082 /* Return the value in the proper mode for this function. */
3083 if (GET_MODE (ops[0].value) == target_mode)
3084 target = ops[0].value;
3085 else if (target != 0)
3086 convert_move (target, ops[0].value, 0);
3087 else
3088 target = convert_to_mode (target_mode, ops[0].value, 0);
dd05e4fa 3089
16155777 3090 return target;
28f4ec01
BS
3091}
3092
781ff3d8
MS
3093/* Expand call EXP to the strnlen built-in, returning the result
3094 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3095
3096static rtx
3097expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3098{
3099 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3100 return NULL_RTX;
3101
3102 tree src = CALL_EXPR_ARG (exp, 0);
3103 tree bound = CALL_EXPR_ARG (exp, 1);
3104
3105 if (!bound)
3106 return NULL_RTX;
3107
3108 location_t loc = UNKNOWN_LOCATION;
3109 if (EXPR_HAS_LOCATION (exp))
3110 loc = EXPR_LOCATION (exp);
3111
3112 tree maxobjsize = max_object_size ();
3113 tree func = get_callee_fndecl (exp);
3114
1583124e
MS
3115 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3116 so these conversions aren't necessary. */
e09aa5bd
MS
3117 c_strlen_data lendata = { };
3118 tree len = c_strlen (src, 0, &lendata, 1);
1583124e
MS
3119 if (len)
3120 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
781ff3d8
MS
3121
3122 if (TREE_CODE (bound) == INTEGER_CST)
3123 {
3124 if (!TREE_NO_WARNING (exp)
3125 && tree_int_cst_lt (maxobjsize, bound)
3126 && warning_at (loc, OPT_Wstringop_overflow_,
3127 "%K%qD specified bound %E "
3128 "exceeds maximum object size %E",
3129 exp, func, bound, maxobjsize))
1db01ff9 3130 TREE_NO_WARNING (exp) = true;
781ff3d8 3131
6c4aa5f6 3132 bool exact = true;
781ff3d8 3133 if (!len || TREE_CODE (len) != INTEGER_CST)
6c4aa5f6
MS
3134 {
3135 /* Clear EXACT if LEN may be less than SRC suggests,
3136 such as in
3137 strnlen (&a[i], sizeof a)
3138 where the value of i is unknown. Unless i's value is
3139 zero, the call is unsafe because the bound is greater. */
e09aa5bd
MS
3140 lendata.decl = unterminated_array (src, &len, &exact);
3141 if (!lendata.decl)
6c4aa5f6
MS
3142 return NULL_RTX;
3143 }
3144
e09aa5bd 3145 if (lendata.decl
6c4aa5f6
MS
3146 && !TREE_NO_WARNING (exp)
3147 && ((tree_int_cst_lt (len, bound))
3148 || !exact))
3149 {
3150 location_t warnloc
3151 = expansion_point_location_if_in_system_header (loc);
3152
3153 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3154 exact
3155 ? G_("%K%qD specified bound %E exceeds the size %E "
3156 "of unterminated array")
3157 : G_("%K%qD specified bound %E may exceed the size "
3158 "of at most %E of unterminated array"),
3159 exp, func, bound, len))
3160 {
e09aa5bd 3161 inform (DECL_SOURCE_LOCATION (lendata.decl),
6c4aa5f6
MS
3162 "referenced argument declared here");
3163 TREE_NO_WARNING (exp) = true;
3164 return NULL_RTX;
3165 }
3166 }
3167
3168 if (!len)
781ff3d8
MS
3169 return NULL_RTX;
3170
781ff3d8
MS
3171 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3172 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3173 }
3174
3175 if (TREE_CODE (bound) != SSA_NAME)
3176 return NULL_RTX;
3177
3178 wide_int min, max;
54994253 3179 enum value_range_kind rng = get_range_info (bound, &min, &max);
781ff3d8
MS
3180 if (rng != VR_RANGE)
3181 return NULL_RTX;
3182
3183 if (!TREE_NO_WARNING (exp)
1a9b15a7 3184 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
781ff3d8
MS
3185 && warning_at (loc, OPT_Wstringop_overflow_,
3186 "%K%qD specified bound [%wu, %wu] "
3187 "exceeds maximum object size %E",
3188 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
1db01ff9 3189 TREE_NO_WARNING (exp) = true;
781ff3d8 3190
f3431652 3191 bool exact = true;
781ff3d8 3192 if (!len || TREE_CODE (len) != INTEGER_CST)
f3431652 3193 {
e09aa5bd
MS
3194 lendata.decl = unterminated_array (src, &len, &exact);
3195 if (!lendata.decl)
f3431652
MS
3196 return NULL_RTX;
3197 }
781ff3d8 3198
e09aa5bd 3199 if (lendata.decl
f3431652
MS
3200 && !TREE_NO_WARNING (exp)
3201 && (wi::ltu_p (wi::to_wide (len), min)
3202 || !exact))
6c4aa5f6 3203 {
f3431652
MS
3204 location_t warnloc
3205 = expansion_point_location_if_in_system_header (loc);
3206
3207 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3208 exact
3209 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3210 "the size %E of unterminated array")
3211 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3212 "the size of at most %E of unterminated array"),
3213 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3214 {
e09aa5bd 3215 inform (DECL_SOURCE_LOCATION (lendata.decl),
f3431652
MS
3216 "referenced argument declared here");
3217 TREE_NO_WARNING (exp) = true;
3218 }
6c4aa5f6
MS
3219 }
3220
e09aa5bd 3221 if (lendata.decl)
f3431652
MS
3222 return NULL_RTX;
3223
781ff3d8
MS
3224 if (wi::gtu_p (min, wi::to_wide (len)))
3225 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3226
3227 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3228 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3229}
3230
57814e5e
JJ
3231/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3232 bytes from constant string DATA + OFFSET and return it as target
3233 constant. */
3234
3235static rtx
4682ae04 3236builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 3237 scalar_int_mode mode)
57814e5e
JJ
3238{
3239 const char *str = (const char *) data;
3240
298e6adc
NS
3241 gcc_assert (offset >= 0
3242 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3243 <= strlen (str) + 1));
57814e5e
JJ
3244
3245 return c_readstr (str + offset, mode);
3246}
3247
3918b108 3248/* LEN specify length of the block of memcpy/memset operation.
82bb7d4e
JH
3249 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3250 In some cases we can make very likely guess on max size, then we
3251 set it into PROBABLE_MAX_SIZE. */
3918b108
JH
3252
3253static void
3254determine_block_size (tree len, rtx len_rtx,
3255 unsigned HOST_WIDE_INT *min_size,
82bb7d4e
JH
3256 unsigned HOST_WIDE_INT *max_size,
3257 unsigned HOST_WIDE_INT *probable_max_size)
3918b108
JH
3258{
3259 if (CONST_INT_P (len_rtx))
3260 {
2738b4c7 3261 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3918b108
JH
3262 return;
3263 }
3264 else
3265 {
807e902e 3266 wide_int min, max;
54994253 3267 enum value_range_kind range_type = VR_UNDEFINED;
82bb7d4e
JH
3268
3269 /* Determine bounds from the type. */
3270 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3271 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3272 else
3273 *min_size = 0;
3274 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2738b4c7
JJ
3275 *probable_max_size = *max_size
3276 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
82bb7d4e
JH
3277 else
3278 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3279
3280 if (TREE_CODE (len) == SSA_NAME)
3281 range_type = get_range_info (len, &min, &max);
3282 if (range_type == VR_RANGE)
3918b108 3283 {
807e902e 3284 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3918b108 3285 *min_size = min.to_uhwi ();
807e902e 3286 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
82bb7d4e 3287 *probable_max_size = *max_size = max.to_uhwi ();
3918b108 3288 }
82bb7d4e 3289 else if (range_type == VR_ANTI_RANGE)
3918b108 3290 {
70ec86ee 3291 /* Anti range 0...N lets us to determine minimal size to N+1. */
807e902e 3292 if (min == 0)
82bb7d4e 3293 {
807e902e
KZ
3294 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3295 *min_size = max.to_uhwi () + 1;
82bb7d4e
JH
3296 }
3297 /* Code like
3298
3299 int n;
3300 if (n < 100)
70ec86ee 3301 memcpy (a, b, n)
82bb7d4e
JH
3302
3303 Produce anti range allowing negative values of N. We still
3304 can use the information and make a guess that N is not negative.
3305 */
807e902e
KZ
3306 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3307 *probable_max_size = min.to_uhwi () - 1;
3918b108
JH
3308 }
3309 }
3310 gcc_checking_assert (*max_size <=
3311 (unsigned HOST_WIDE_INT)
3312 GET_MODE_MASK (GET_MODE (len_rtx)));
3313}
3314
ee92e7ba
MS
3315/* Try to verify that the sizes and lengths of the arguments to a string
3316 manipulation function given by EXP are within valid bounds and that
cc8bea0a
MS
3317 the operation does not lead to buffer overflow or read past the end.
3318 Arguments other than EXP may be null. When non-null, the arguments
3319 have the following meaning:
3320 DST is the destination of a copy call or NULL otherwise.
3321 SRC is the source of a copy call or NULL otherwise.
3322 DSTWRITE is the number of bytes written into the destination obtained
3323 from the user-supplied size argument to the function (such as in
3324 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3325 MAXREAD is the user-supplied bound on the length of the source sequence
ee92e7ba 3326 (such as in strncat(d, s, N). It specifies the upper limit on the number
cc8bea0a
MS
3327 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3328 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3329 expression EXP is a string function call (as opposed to a memory call
3330 like memcpy). As an exception, SRCSTR can also be an integer denoting
3331 the precomputed size of the source string or object (for functions like
3332 memcpy).
3333 DSTSIZE is the size of the destination object specified by the last
ee92e7ba 3334 argument to the _chk builtins, typically resulting from the expansion
cc8bea0a
MS
3335 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3336 DSTSIZE).
ee92e7ba 3337
cc8bea0a 3338 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
ee92e7ba
MS
3339 SIZE_MAX.
3340
cc8bea0a
MS
3341 If the call is successfully verified as safe return true, otherwise
3342 return false. */
ee92e7ba 3343
54aa6b58 3344bool
cc8bea0a
MS
3345check_access (tree exp, tree, tree, tree dstwrite,
3346 tree maxread, tree srcstr, tree dstsize)
ee92e7ba 3347{
cc8bea0a
MS
3348 int opt = OPT_Wstringop_overflow_;
3349
ee92e7ba 3350 /* The size of the largest object is half the address space, or
cc8bea0a
MS
3351 PTRDIFF_MAX. (This is way too permissive.) */
3352 tree maxobjsize = max_object_size ();
ee92e7ba 3353
cc8bea0a
MS
3354 /* Either the length of the source string for string functions or
3355 the size of the source object for raw memory functions. */
ee92e7ba
MS
3356 tree slen = NULL_TREE;
3357
d9c5a8b9
MS
3358 tree range[2] = { NULL_TREE, NULL_TREE };
3359
ee92e7ba
MS
3360 /* Set to true when the exact number of bytes written by a string
3361 function like strcpy is not known and the only thing that is
3362 known is that it must be at least one (for the terminating nul). */
3363 bool at_least_one = false;
cc8bea0a 3364 if (srcstr)
ee92e7ba 3365 {
cc8bea0a 3366 /* SRCSTR is normally a pointer to string but as a special case
ee92e7ba 3367 it can be an integer denoting the length of a string. */
cc8bea0a 3368 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
ee92e7ba
MS
3369 {
3370 /* Try to determine the range of lengths the source string
d9c5a8b9 3371 refers to. If it can be determined and is less than
cc8bea0a 3372 the upper bound given by MAXREAD add one to it for
ee92e7ba 3373 the terminating nul. Otherwise, set it to one for
cc8bea0a 3374 the same reason, or to MAXREAD as appropriate. */
5d6655eb
MS
3375 c_strlen_data lendata = { };
3376 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3377 range[0] = lendata.minlen;
a7160771 3378 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
cc8bea0a 3379 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
d9c5a8b9 3380 {
cc8bea0a
MS
3381 if (maxread && tree_int_cst_le (maxread, range[0]))
3382 range[0] = range[1] = maxread;
d9c5a8b9
MS
3383 else
3384 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3385 range[0], size_one_node);
3386
cc8bea0a
MS
3387 if (maxread && tree_int_cst_le (maxread, range[1]))
3388 range[1] = maxread;
d9c5a8b9
MS
3389 else if (!integer_all_onesp (range[1]))
3390 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3391 range[1], size_one_node);
3392
3393 slen = range[0];
3394 }
ee92e7ba
MS
3395 else
3396 {
3397 at_least_one = true;
3398 slen = size_one_node;
3399 }
3400 }
3401 else
cc8bea0a 3402 slen = srcstr;
ee92e7ba
MS
3403 }
3404
cc8bea0a 3405 if (!dstwrite && !maxread)
ee92e7ba
MS
3406 {
3407 /* When the only available piece of data is the object size
3408 there is nothing to do. */
3409 if (!slen)
3410 return true;
3411
3412 /* Otherwise, when the length of the source sequence is known
cc8bea0a 3413 (as with strlen), set DSTWRITE to it. */
d9c5a8b9 3414 if (!range[0])
cc8bea0a 3415 dstwrite = slen;
ee92e7ba
MS
3416 }
3417
cc8bea0a
MS
3418 if (!dstsize)
3419 dstsize = maxobjsize;
ee92e7ba 3420
cc8bea0a
MS
3421 if (dstwrite)
3422 get_size_range (dstwrite, range);
ee92e7ba 3423
cc8bea0a 3424 tree func = get_callee_fndecl (exp);
ee92e7ba
MS
3425
3426 /* First check the number of bytes to be written against the maximum
3427 object size. */
bfb9bd47
MS
3428 if (range[0]
3429 && TREE_CODE (range[0]) == INTEGER_CST
3430 && tree_int_cst_lt (maxobjsize, range[0]))
ee92e7ba 3431 {
781ff3d8
MS
3432 if (TREE_NO_WARNING (exp))
3433 return false;
3434
ee92e7ba 3435 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3436 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba 3437
781ff3d8 3438 bool warned;
ee92e7ba 3439 if (range[0] == range[1])
54aa6b58
MS
3440 warned = (func
3441 ? warning_at (loc, opt,
3442 "%K%qD specified size %E "
3443 "exceeds maximum object size %E",
3444 exp, func, range[0], maxobjsize)
3445 : warning_at (loc, opt,
3446 "%Kspecified size %E "
3447 "exceeds maximum object size %E",
3448 exp, range[0], maxobjsize));
781ff3d8 3449 else
54aa6b58
MS
3450 warned = (func
3451 ? warning_at (loc, opt,
3452 "%K%qD specified size between %E and %E "
3453 "exceeds maximum object size %E",
3454 exp, func,
3455 range[0], range[1], maxobjsize)
3456 : warning_at (loc, opt,
3457 "%Kspecified size between %E and %E "
3458 "exceeds maximum object size %E",
3459 exp, range[0], range[1], maxobjsize));
781ff3d8
MS
3460 if (warned)
3461 TREE_NO_WARNING (exp) = true;
3462
ee92e7ba
MS
3463 return false;
3464 }
3465
cc8bea0a
MS
3466 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3467 constant, and in range of unsigned HOST_WIDE_INT. */
3468 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3469
ee92e7ba
MS
3470 /* Next check the number of bytes to be written against the destination
3471 object size. */
cc8bea0a 3472 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
ee92e7ba
MS
3473 {
3474 if (range[0]
bfb9bd47 3475 && TREE_CODE (range[0]) == INTEGER_CST
cc8bea0a
MS
3476 && ((tree_fits_uhwi_p (dstsize)
3477 && tree_int_cst_lt (dstsize, range[0]))
bfb9bd47
MS
3478 || (dstwrite
3479 && tree_fits_uhwi_p (dstwrite)
cc8bea0a 3480 && tree_int_cst_lt (dstwrite, range[0]))))
ee92e7ba 3481 {
e0676e2e
MS
3482 if (TREE_NO_WARNING (exp))
3483 return false;
3484
ee92e7ba 3485 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3486 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba 3487
54aa6b58 3488 bool warned = false;
cc8bea0a 3489 if (dstwrite == slen && at_least_one)
d9c5a8b9
MS
3490 {
3491 /* This is a call to strcpy with a destination of 0 size
3492 and a source of unknown length. The call will write
3493 at least one byte past the end of the destination. */
54aa6b58
MS
3494 warned = (func
3495 ? warning_at (loc, opt,
3496 "%K%qD writing %E or more bytes into "
3497 "a region of size %E overflows "
3498 "the destination",
3499 exp, func, range[0], dstsize)
3500 : warning_at (loc, opt,
3501 "%Kwriting %E or more bytes into "
3502 "a region of size %E overflows "
3503 "the destination",
3504 exp, range[0], dstsize));
d9c5a8b9
MS
3505 }
3506 else if (tree_int_cst_equal (range[0], range[1]))
54aa6b58
MS
3507 warned = (func
3508 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3509 "%K%qD writing %E byte into a region "
3510 "of size %E overflows the destination",
3511 "%K%qD writing %E bytes into a region "
3512 "of size %E overflows the destination",
3513 exp, func, range[0], dstsize)
3514 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3515 "%Kwriting %E byte into a region "
3516 "of size %E overflows the destination",
3517 "%Kwriting %E bytes into a region "
3518 "of size %E overflows the destination",
3519 exp, range[0], dstsize));
d9c5a8b9
MS
3520 else if (tree_int_cst_sign_bit (range[1]))
3521 {
3522 /* Avoid printing the upper bound if it's invalid. */
54aa6b58
MS
3523 warned = (func
3524 ? warning_at (loc, opt,
3525 "%K%qD writing %E or more bytes into "
3526 "a region of size %E overflows "
3527 "the destination",
3528 exp, func, range[0], dstsize)
3529 : warning_at (loc, opt,
3530 "%Kwriting %E or more bytes into "
3531 "a region of size %E overflows "
3532 "the destination",
3533 exp, range[0], dstsize));
d9c5a8b9 3534 }
ee92e7ba 3535 else
54aa6b58
MS
3536 warned = (func
3537 ? warning_at (loc, opt,
3538 "%K%qD writing between %E and %E bytes "
3539 "into a region of size %E overflows "
3540 "the destination",
3541 exp, func, range[0], range[1],
3542 dstsize)
3543 : warning_at (loc, opt,
3544 "%Kwriting between %E and %E bytes "
3545 "into a region of size %E overflows "
3546 "the destination",
3547 exp, range[0], range[1],
3548 dstsize));
3549 if (warned)
3550 TREE_NO_WARNING (exp) = true;
ee92e7ba
MS
3551
3552 /* Return error when an overflow has been detected. */
3553 return false;
3554 }
3555 }
3556
3557 /* Check the maximum length of the source sequence against the size
3558 of the destination object if known, or against the maximum size
3559 of an object. */
cc8bea0a 3560 if (maxread)
ee92e7ba 3561 {
cc8bea0a 3562 get_size_range (maxread, range);
cc8bea0a 3563 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
ee92e7ba
MS
3564 {
3565 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3566 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba
MS
3567
3568 if (tree_int_cst_lt (maxobjsize, range[0]))
3569 {
e0676e2e
MS
3570 if (TREE_NO_WARNING (exp))
3571 return false;
3572
54aa6b58
MS
3573 bool warned = false;
3574
ee92e7ba
MS
3575 /* Warn about crazy big sizes first since that's more
3576 likely to be meaningful than saying that the bound
3577 is greater than the object size if both are big. */
3578 if (range[0] == range[1])
54aa6b58
MS
3579 warned = (func
3580 ? warning_at (loc, opt,
3581 "%K%qD specified bound %E "
3582 "exceeds maximum object size %E",
3583 exp, func, range[0], maxobjsize)
3584 : warning_at (loc, opt,
3585 "%Kspecified bound %E "
3586 "exceeds maximum object size %E",
3587 exp, range[0], maxobjsize));
ee92e7ba 3588 else
54aa6b58
MS
3589 warned = (func
3590 ? warning_at (loc, opt,
3591 "%K%qD specified bound between "
3592 "%E and %E exceeds maximum object "
3593 "size %E",
3594 exp, func,
3595 range[0], range[1], maxobjsize)
3596 : warning_at (loc, opt,
3597 "%Kspecified bound between "
3598 "%E and %E exceeds maximum object "
3599 "size %E",
3600 exp, range[0], range[1], maxobjsize));
3601 if (warned)
3602 TREE_NO_WARNING (exp) = true;
ee92e7ba
MS
3603
3604 return false;
3605 }
3606
cc8bea0a 3607 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
ee92e7ba 3608 {
e0676e2e
MS
3609 if (TREE_NO_WARNING (exp))
3610 return false;
3611
54aa6b58
MS
3612 bool warned = false;
3613
d9c5a8b9 3614 if (tree_int_cst_equal (range[0], range[1]))
54aa6b58
MS
3615 warned = (func
3616 ? warning_at (loc, opt,
3617 "%K%qD specified bound %E "
3618 "exceeds destination size %E",
3619 exp, func,
3620 range[0], dstsize)
3621 : warning_at (loc, opt,
3622 "%Kspecified bound %E "
3623 "exceeds destination size %E",
3624 exp, range[0], dstsize));
ee92e7ba 3625 else
54aa6b58
MS
3626 warned = (func
3627 ? warning_at (loc, opt,
3628 "%K%qD specified bound between %E "
3629 "and %E exceeds destination size %E",
3630 exp, func,
3631 range[0], range[1], dstsize)
3632 : warning_at (loc, opt,
3633 "%Kspecified bound between %E "
3634 "and %E exceeds destination size %E",
3635 exp,
3636 range[0], range[1], dstsize));
3637 if (warned)
3638 TREE_NO_WARNING (exp) = true;
3639
ee92e7ba
MS
3640 return false;
3641 }
3642 }
3643 }
3644
cc8bea0a 3645 /* Check for reading past the end of SRC. */
d9c5a8b9 3646 if (slen
cc8bea0a
MS
3647 && slen == srcstr
3648 && dstwrite && range[0]
d9c5a8b9
MS
3649 && tree_int_cst_lt (slen, range[0]))
3650 {
e0676e2e
MS
3651 if (TREE_NO_WARNING (exp))
3652 return false;
3653
54aa6b58 3654 bool warned = false;
d9c5a8b9 3655 location_t loc = tree_nonartificial_location (exp);
54aa6b58 3656 loc = expansion_point_location_if_in_system_header (loc);
d9c5a8b9
MS
3657
3658 if (tree_int_cst_equal (range[0], range[1]))
54aa6b58
MS
3659 warned = (func
3660 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3661 "%K%qD reading %E byte from a region of size %E",
3662 "%K%qD reading %E bytes from a region of size %E",
3663 exp, func, range[0], slen)
3664 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3665 "%Kreading %E byte from a region of size %E",
3666 "%Kreading %E bytes from a region of size %E",
3667 exp, range[0], slen));
d9c5a8b9
MS
3668 else if (tree_int_cst_sign_bit (range[1]))
3669 {
3670 /* Avoid printing the upper bound if it's invalid. */
54aa6b58
MS
3671 warned = (func
3672 ? warning_at (loc, opt,
3673 "%K%qD reading %E or more bytes from a region "
3674 "of size %E",
3675 exp, func, range[0], slen)
3676 : warning_at (loc, opt,
3677 "%Kreading %E or more bytes from a region "
3678 "of size %E",
3679 exp, range[0], slen));
d9c5a8b9
MS
3680 }
3681 else
54aa6b58
MS
3682 warned = (func
3683 ? warning_at (loc, opt,
3684 "%K%qD reading between %E and %E bytes from "
3685 "a region of size %E",
3686 exp, func, range[0], range[1], slen)
3687 : warning_at (loc, opt,
3688 "%Kreading between %E and %E bytes from "
3689 "a region of size %E",
3690 exp, range[0], range[1], slen));
3691 if (warned)
3692 TREE_NO_WARNING (exp) = true;
3693
d9c5a8b9
MS
3694 return false;
3695 }
3696
ee92e7ba
MS
3697 return true;
3698}
3699
ef29b12c
MS
3700/* If STMT is a call to an allocation function, returns the constant
3701 size of the object allocated by the call represented as sizetype.
3702 If nonnull, sets RNG1[] to the range of the size. */
268209f3
MS
3703
3704tree
ef29b12c
MS
3705gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
3706 const vr_values *rvals /* = NULL */)
268209f3
MS
3707{
3708 if (!stmt)
3709 return NULL_TREE;
3710
3711 tree allocfntype;
3712 if (tree fndecl = gimple_call_fndecl (stmt))
3713 allocfntype = TREE_TYPE (fndecl);
3714 else
3715 allocfntype = gimple_call_fntype (stmt);
3716
3717 if (!allocfntype)
3718 return NULL_TREE;
3719
3720 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
3721 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
3722 if (!at)
3723 {
3724 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
3725 return NULL_TREE;
3726
3727 argidx1 = 0;
3728 }
3729
3730 unsigned nargs = gimple_call_num_args (stmt);
3731
3732 if (argidx1 == UINT_MAX)
3733 {
3734 tree atval = TREE_VALUE (at);
3735 if (!atval)
3736 return NULL_TREE;
3737
3738 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3739 if (nargs <= argidx1)
3740 return NULL_TREE;
3741
3742 atval = TREE_CHAIN (atval);
3743 if (atval)
3744 {
3745 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3746 if (nargs <= argidx2)
3747 return NULL_TREE;
3748 }
3749 }
3750
3751 tree size = gimple_call_arg (stmt, argidx1);
3752
ef29b12c
MS
3753 wide_int rng1_buf[2];
3754 /* If RNG1 is not set, use the buffer. */
3755 if (!rng1)
3756 rng1 = rng1_buf;
3757
3758 if (!get_range (size, rng1, rvals))
268209f3
MS
3759 return NULL_TREE;
3760
3761 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
a6ae300f 3762 return fold_convert (sizetype, size);
268209f3
MS
3763
3764 /* To handle ranges do the math in wide_int and return the product
3765 of the upper bounds as a constant. Ignore anti-ranges. */
3766 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
3767 wide_int rng2[2];
ef29b12c 3768 if (!get_range (n, rng2, rvals))
268209f3
MS
3769 return NULL_TREE;
3770
ef29b12c 3771 /* Extend to the maximum precision to avoid overflow. */
268209f3
MS
3772 const int prec = ADDR_MAX_PRECISION;
3773 rng1[0] = wide_int::from (rng1[0], prec, UNSIGNED);
3774 rng1[1] = wide_int::from (rng1[1], prec, UNSIGNED);
3775 rng2[0] = wide_int::from (rng2[0], prec, UNSIGNED);
3776 rng2[1] = wide_int::from (rng2[1], prec, UNSIGNED);
3777
ef29b12c
MS
3778 /* Compute products of both bounds for the caller but return the lesser
3779 of SIZE_MAX and the product of the upper bounds as a constant. */
268209f3
MS
3780 rng1[0] = rng1[0] * rng2[0];
3781 rng1[1] = rng1[1] * rng2[1];
3782 tree size_max = TYPE_MAX_VALUE (sizetype);
3783 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
3784 {
3785 rng1[1] = wi::to_wide (size_max);
3786 return size_max;
3787 }
3788
3789 return wide_int_to_tree (sizetype, rng1[1]);
3790}
3791
ef29b12c
MS
3792/* Helper for compute_objsize. Returns the constant size of the DEST
3793 if it refers to a variable or field and sets *PDECL to the DECL and
3794 *POFF to zero. Otherwise returns null for other nodes. */
3795
3796static tree
3797addr_decl_size (tree dest, tree *pdecl, tree *poff)
3798{
3799 if (TREE_CODE (dest) == ADDR_EXPR)
3800 dest = TREE_OPERAND (dest, 0);
3801
3802 if (DECL_P (dest))
3803 {
3804 *pdecl = dest;
3805 *poff = integer_zero_node;
3806 if (tree size = DECL_SIZE_UNIT (dest))
3807 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3808 }
3809
3810 if (TREE_CODE (dest) == COMPONENT_REF)
3811 {
3812 *pdecl = TREE_OPERAND (dest, 1);
3813 *poff = integer_zero_node;
3814 /* Only return constant sizes for now while callers depend on it. */
3815 if (tree size = component_ref_size (dest))
3816 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3817 }
3818
3819 return NULL_TREE;
3820}
3821
ee92e7ba 3822/* Helper to compute the size of the object referenced by the DEST
025d57f0 3823 expression which must have pointer type, using Object Size type
ef29b12c
MS
3824 OSTYPE (only the least significant 2 bits are used).
3825 Returns an estimate of the size of the object represented as
3826 a sizetype constant if successful or NULL when the size cannot
3827 be determined.
3828 When the referenced object involves a non-constant offset in some
3829 range the returned value represents the largest size given the
3830 smallest non-negative offset in the range.
3831 If nonnull, sets *PDECL to the decl of the referenced subobject
3832 if it can be determined, or to null otherwise. Likewise, when
3833 POFF is nonnull *POFF is set to the offset into *PDECL.
3834
464969eb
MS
3835 The function is intended for diagnostics and should not be used
3836 to influence code generation or optimization. */
ee92e7ba 3837
025d57f0 3838tree
268209f3 3839compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */,
ef29b12c 3840 tree *poff /* = NULL */, const vr_values *rvals /* = NULL */)
ee92e7ba 3841{
268209f3 3842 tree dummy_decl = NULL_TREE;
464969eb 3843 if (!pdecl)
268209f3
MS
3844 pdecl = &dummy_decl;
3845
ef29b12c 3846 tree dummy_off = NULL_TREE;
268209f3
MS
3847 if (!poff)
3848 poff = &dummy_off;
464969eb 3849
025d57f0
MS
3850 /* Only the two least significant bits are meaningful. */
3851 ostype &= 3;
3852
ef29b12c
MS
3853 if (ostype)
3854 /* Except for overly permissive calls to memcpy and other raw
3855 memory functions with zero OSTYPE, detect the size from simple
3856 DECLs first to more reliably than compute_builtin_object_size
3857 set *PDECL and *POFF. */
3858 if (tree size = addr_decl_size (dest, pdecl, poff))
3859 return size;
3860
3861 unsigned HOST_WIDE_INT size;
f7d86b5c 3862 if (compute_builtin_object_size (dest, ostype, &size, pdecl, poff))
ee92e7ba
MS
3863 return build_int_cst (sizetype, size);
3864
025d57f0
MS
3865 if (TREE_CODE (dest) == SSA_NAME)
3866 {
3867 gimple *stmt = SSA_NAME_DEF_STMT (dest);
268209f3
MS
3868 if (is_gimple_call (stmt))
3869 {
3870 /* If STMT is a call to an allocation function get the size
ef29b12c
MS
3871 from its argument(s). If successful, also set *PDECL to
3872 DEST for the caller to include in diagnostics. */
3873 if (tree size = gimple_call_alloc_size (stmt))
3874 {
3875 *pdecl = dest;
3876 *poff = integer_zero_node;
3877 return size;
3878 }
3879 return NULL_TREE;
268209f3
MS
3880 }
3881
025d57f0
MS
3882 if (!is_gimple_assign (stmt))
3883 return NULL_TREE;
3884
af3fa359
MS
3885 dest = gimple_assign_rhs1 (stmt);
3886
025d57f0 3887 tree_code code = gimple_assign_rhs_code (stmt);
af3fa359
MS
3888 if (code == POINTER_PLUS_EXPR)
3889 {
3890 /* compute_builtin_object_size fails for addresses with
3891 non-constant offsets. Try to determine the range of
e3329a78 3892 such an offset here and use it to adjust the constant
af3fa359
MS
3893 size. */
3894 tree off = gimple_assign_rhs2 (stmt);
e3329a78
MS
3895 if (TREE_CODE (off) == INTEGER_CST)
3896 {
268209f3 3897 if (tree size = compute_objsize (dest, ostype, pdecl, poff))
e3329a78
MS
3898 {
3899 wide_int wioff = wi::to_wide (off);
3900 wide_int wisiz = wi::to_wide (size);
3901
3902 /* Ignore negative offsets for now. For others,
3903 use the lower bound as the most optimistic
3904 estimate of the (remaining) size. */
ef29b12c 3905 if (wi::neg_p (wioff))
e3329a78 3906 ;
e3329a78 3907 else
268209f3 3908 {
ef29b12c
MS
3909 if (*poff)
3910 {
3911 *poff = fold_convert (ptrdiff_type_node, *poff);
3912 off = fold_convert (ptrdiff_type_node, *poff);
3913 *poff = size_binop (PLUS_EXPR, *poff, off);
3914 }
3915 else
3916 *poff = off;
3917 if (wi::ltu_p (wioff, wisiz))
3918 return wide_int_to_tree (TREE_TYPE (size),
3919 wi::sub (wisiz, wioff));
268209f3
MS
3920 return size_zero_node;
3921 }
e3329a78
MS
3922 }
3923 }
3924 else if (TREE_CODE (off) == SSA_NAME
f05b3724 3925 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
af3fa359
MS
3926 {
3927 wide_int min, max;
54994253 3928 enum value_range_kind rng = get_range_info (off, &min, &max);
af3fa359
MS
3929
3930 if (rng == VR_RANGE)
ef29b12c
MS
3931 if (tree size = compute_objsize (dest, ostype, pdecl, poff))
3932 {
3933 wide_int wisiz = wi::to_wide (size);
3934
3935 /* Ignore negative offsets for now. For others,
3936 use the lower bound as the most optimistic
3937 estimate of the (remaining)size. */
3938 if (wi::neg_p (min) || wi::neg_p (max))
3939 ;
3940 else
3941 {
3942 /* FIXME: For now, since the offset is non-constant,
3943 clear *POFF to keep it from being "misused."
3944 Eventually *POFF will need to become a range that
3945 can be properly added to the outer offset if it
3946 too is one. */
3947 *poff = NULL_TREE;
3948 if (wi::ltu_p (min, wisiz))
268209f3
MS
3949 return wide_int_to_tree (TREE_TYPE (size),
3950 wi::sub (wisiz, min));
ef29b12c
MS
3951 return size_zero_node;
3952 }
3953 }
af3fa359
MS
3954 }
3955 }
3956 else if (code != ADDR_EXPR)
025d57f0 3957 return NULL_TREE;
025d57f0
MS
3958 }
3959
af3fa359
MS
3960 /* Unless computing the largest size (for memcpy and other raw memory
3961 functions), try to determine the size of the object from its type. */
3962 if (!ostype)
3963 return NULL_TREE;
3964
464969eb
MS
3965 if (TREE_CODE (dest) == ARRAY_REF
3966 || TREE_CODE (dest) == MEM_REF)
b631bdb3
MS
3967 {
3968 tree ref = TREE_OPERAND (dest, 0);
3969 tree off = TREE_OPERAND (dest, 1);
268209f3 3970 if (tree size = compute_objsize (ref, ostype, pdecl, poff))
b631bdb3 3971 {
464969eb
MS
3972 /* If the declaration of the destination object is known
3973 to have zero size, return zero. */
268209f3
MS
3974 if (integer_zerop (size)
3975 && *pdecl && DECL_P (*pdecl)
3976 && *poff && integer_zerop (*poff))
f7d86b5c 3977 return size_zero_node;
464969eb 3978
ef29b12c
MS
3979 /* A valid offset into a declared object cannot be negative.
3980 A zero size with a zero "inner" offset is still zero size
3981 regardless of the "other" offset OFF. */
3982 if (*poff
3983 && ((integer_zerop (*poff) && integer_zerop (size))
3984 || (TREE_CODE (*poff) == INTEGER_CST
3985 && tree_int_cst_sgn (*poff) < 0)))
268209f3 3986 return size_zero_node;
464969eb 3987
ef29b12c
MS
3988 wide_int offrng[2];
3989 if (!get_range (off, offrng, rvals))
3990 return NULL_TREE;
3991
3992 /* Convert to the same precision to keep wide_int from "helpfully"
3993 crashing whenever it sees other arguments. */
3994 const unsigned sizprec = TYPE_PRECISION (sizetype);
3995 offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
3996 offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
3997
268209f3
MS
3998 /* Adjust SIZE either up or down by the sum of *POFF and OFF
3999 above. */
464969eb
MS
4000 if (TREE_CODE (dest) == ARRAY_REF)
4001 {
12603635
MS
4002 tree lowbnd = array_ref_low_bound (dest);
4003 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4004 {
4005 /* Adjust the offset by the low bound of the array
4006 domain (normally zero but 1 in Fortran). */
4007 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4008 offrng[0] -= lb;
4009 offrng[1] -= lb;
4010 }
4011
268209f3 4012 /* Convert the array index into a byte offset. */
464969eb 4013 tree eltype = TREE_TYPE (dest);
f05b3724
JJ
4014 tree tpsize = TYPE_SIZE_UNIT (eltype);
4015 if (tpsize && TREE_CODE (tpsize) == INTEGER_CST)
ef29b12c
MS
4016 {
4017 wide_int wsz = wi::to_wide (tpsize, offrng->get_precision ());
4018 offrng[0] *= wsz;
4019 offrng[1] *= wsz;
4020 }
464969eb
MS
4021 else
4022 return NULL_TREE;
4023 }
4024
ef29b12c
MS
4025 wide_int wisize = wi::to_wide (size);
4026
4027 if (!*poff)
268209f3 4028 {
ef29b12c
MS
4029 /* If the "inner" offset is unknown and the "outer" offset
4030 is either negative or less than SIZE, return the size
4031 minus the offset. This may be overly optimistic in
4032 the first case if the inner offset happens to be less
4033 than the absolute value of the outer offset. */
4034 if (wi::neg_p (offrng[0]))
4035 return size;
4036 if (wi::ltu_p (offrng[0], wisize))
4037 return build_int_cst (sizetype, (wisize - offrng[0]).to_uhwi ());
4038 return size_zero_node;
268209f3 4039 }
268209f3
MS
4040
4041 /* Convert to the same precision to keep wide_int from "helpfuly"
4042 crashing whenever it sees other argumments. */
ef29b12c
MS
4043 offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
4044 offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
268209f3
MS
4045
4046 tree dstoff = *poff;
4047 if (integer_zerop (*poff))
4048 *poff = off;
4049 else if (!integer_zerop (off))
4050 {
4051 *poff = fold_convert (ptrdiff_type_node, *poff);
4052 off = fold_convert (ptrdiff_type_node, off);
4053 *poff = size_binop (PLUS_EXPR, *poff, off);
4054 }
4055
ef29b12c 4056 if (!wi::neg_p (offrng[0]))
268209f3
MS
4057 {
4058 if (TREE_CODE (size) != INTEGER_CST)
4059 return NULL_TREE;
4060
4061 /* Return the difference between the size and the offset
4062 or zero if the offset is greater. */
ef29b12c 4063 wide_int wisize = wi::to_wide (size, sizprec);
268209f3
MS
4064 if (wi::ltu_p (wisize, offrng[0]))
4065 return size_zero_node;
4066
4067 return wide_int_to_tree (sizetype, wisize - offrng[0]);
4068 }
4069
4070 wide_int dstoffrng[2];
4071 if (TREE_CODE (dstoff) == INTEGER_CST)
4072 dstoffrng[0] = dstoffrng[1] = wi::to_wide (dstoff);
4073 else if (TREE_CODE (dstoff) == SSA_NAME)
4074 {
4075 enum value_range_kind rng
4076 = get_range_info (dstoff, dstoffrng, dstoffrng + 1);
4077 if (rng != VR_RANGE)
4078 return NULL_TREE;
4079 }
4080 else
4081 return NULL_TREE;
4082
ef29b12c
MS
4083 dstoffrng[0] = wide_int::from (dstoffrng[0], sizprec, SIGNED);
4084 dstoffrng[1] = wide_int::from (dstoffrng[1], sizprec, SIGNED);
268209f3 4085
ef29b12c
MS
4086 if (!wi::neg_p (dstoffrng[0]))
4087 wisize += dstoffrng[0];
268209f3
MS
4088
4089 offrng[1] += dstoffrng[1];
ef29b12c 4090 if (wi::neg_p (offrng[1]))
268209f3
MS
4091 return size_zero_node;
4092
ef29b12c 4093 return wide_int_to_tree (sizetype, wisize);
b631bdb3
MS
4094 }
4095
4096 return NULL_TREE;
4097 }
4098
ef29b12c
MS
4099 /* Try simple DECLs not handled above. */
4100 if (tree size = addr_decl_size (dest, pdecl, poff))
4101 return size;
464969eb 4102
025d57f0
MS
4103 tree type = TREE_TYPE (dest);
4104 if (TREE_CODE (type) == POINTER_TYPE)
4105 type = TREE_TYPE (type);
4106
4107 type = TYPE_MAIN_VARIANT (type);
268209f3
MS
4108 if (TREE_CODE (dest) == ADDR_EXPR)
4109 dest = TREE_OPERAND (dest, 0);
025d57f0
MS
4110
4111 if (TREE_CODE (type) == ARRAY_TYPE
268209f3 4112 && !array_at_struct_end_p (dest))
464969eb
MS
4113 {
4114 if (tree size = TYPE_SIZE_UNIT (type))
4115 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
025d57f0
MS
4116 }
4117
ee92e7ba
MS
4118 return NULL_TREE;
4119}
4120
4121/* Helper to determine and check the sizes of the source and the destination
d9c5a8b9
MS
4122 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
4123 call expression, DEST is the destination argument, SRC is the source
4124 argument or null, and LEN is the number of bytes. Use Object Size type-0
4125 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
ee92e7ba
MS
4126 (no overflow or invalid sizes), false otherwise. */
4127
4128static bool
cc8bea0a 4129check_memop_access (tree exp, tree dest, tree src, tree size)
ee92e7ba 4130{
ee92e7ba 4131 /* For functions like memset and memcpy that operate on raw memory
d9c5a8b9
MS
4132 try to determine the size of the largest source and destination
4133 object using type-0 Object Size regardless of the object size
4134 type specified by the option. */
4135 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
4136 tree dstsize = compute_objsize (dest, 0);
ee92e7ba 4137
cc8bea0a
MS
4138 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
4139 srcsize, dstsize);
d9c5a8b9
MS
4140}
4141
4142/* Validate memchr arguments without performing any expansion.
4143 Return NULL_RTX. */
4144
4145static rtx
4146expand_builtin_memchr (tree exp, rtx)
4147{
4148 if (!validate_arglist (exp,
4149 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4150 return NULL_RTX;
4151
4152 tree arg1 = CALL_EXPR_ARG (exp, 0);
4153 tree len = CALL_EXPR_ARG (exp, 2);
4154
4155 /* Diagnose calls where the specified length exceeds the size
4156 of the object. */
4157 if (warn_stringop_overflow)
4158 {
4159 tree size = compute_objsize (arg1, 0);
cc8bea0a
MS
4160 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4161 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
d9c5a8b9
MS
4162 }
4163
4164 return NULL_RTX;
ee92e7ba
MS
4165}
4166
5039610b
SL
4167/* Expand a call EXP to the memcpy builtin.
4168 Return NULL_RTX if we failed, the caller should emit a normal call,
9cb65f92 4169 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e 4170 mode MODE if that's convenient). */
5039610b 4171
28f4ec01 4172static rtx
44e10129 4173expand_builtin_memcpy (tree exp, rtx target)
28f4ec01 4174{
5039610b
SL
4175 if (!validate_arglist (exp,
4176 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4177 return NULL_RTX;
ee92e7ba
MS
4178
4179 tree dest = CALL_EXPR_ARG (exp, 0);
4180 tree src = CALL_EXPR_ARG (exp, 1);
4181 tree len = CALL_EXPR_ARG (exp, 2);
4182
cc8bea0a 4183 check_memop_access (exp, dest, src, len);
ee92e7ba 4184
671a00ee 4185 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
03a9b90a 4186 /*retmode=*/ RETURN_BEGIN, false);
edcf72f3 4187}
57814e5e 4188
e50d56a5
MS
4189/* Check a call EXP to the memmove built-in for validity.
4190 Return NULL_RTX on both success and failure. */
4191
4192static rtx
03a9b90a 4193expand_builtin_memmove (tree exp, rtx target)
e50d56a5
MS
4194{
4195 if (!validate_arglist (exp,
4196 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4197 return NULL_RTX;
4198
4199 tree dest = CALL_EXPR_ARG (exp, 0);
d9c5a8b9 4200 tree src = CALL_EXPR_ARG (exp, 1);
e50d56a5
MS
4201 tree len = CALL_EXPR_ARG (exp, 2);
4202
cc8bea0a 4203 check_memop_access (exp, dest, src, len);
e50d56a5 4204
03a9b90a
AS
4205 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4206 /*retmode=*/ RETURN_BEGIN, true);
e50d56a5
MS
4207}
4208
5039610b
SL
4209/* Expand a call EXP to the mempcpy builtin.
4210 Return NULL_RTX if we failed; the caller should emit a normal call,
e3e9f108 4211 otherwise try to get the result in TARGET, if convenient (and in
2ff5ffb6 4212 mode MODE if that's convenient). */
e3e9f108
JJ
4213
4214static rtx
671a00ee 4215expand_builtin_mempcpy (tree exp, rtx target)
e3e9f108 4216{
5039610b
SL
4217 if (!validate_arglist (exp,
4218 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4219 return NULL_RTX;
ee92e7ba
MS
4220
4221 tree dest = CALL_EXPR_ARG (exp, 0);
4222 tree src = CALL_EXPR_ARG (exp, 1);
4223 tree len = CALL_EXPR_ARG (exp, 2);
4224
af3fa359
MS
4225 /* Policy does not generally allow using compute_objsize (which
4226 is used internally by check_memop_size) to change code generation
4227 or drive optimization decisions.
4228
4229 In this instance it is safe because the code we generate has
4230 the same semantics regardless of the return value of
4231 check_memop_sizes. Exactly the same amount of data is copied
4232 and the return value is exactly the same in both cases.
4233
4234 Furthermore, check_memop_size always uses mode 0 for the call to
4235 compute_objsize, so the imprecise nature of compute_objsize is
4236 avoided. */
4237
ee92e7ba
MS
4238 /* Avoid expanding mempcpy into memcpy when the call is determined
4239 to overflow the buffer. This also prevents the same overflow
4240 from being diagnosed again when expanding memcpy. */
cc8bea0a 4241 if (!check_memop_access (exp, dest, src, len))
ee92e7ba
MS
4242 return NULL_RTX;
4243
4244 return expand_builtin_mempcpy_args (dest, src, len,
2ff5ffb6 4245 target, exp, /*retmode=*/ RETURN_END);
edcf72f3
IE
4246}
4247
671a00ee
ML
4248/* Helper function to do the actual work for expand of memory copy family
4249 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
2ff5ffb6
ML
4250 of memory from SRC to DEST and assign to TARGET if convenient. Return
4251 value is based on RETMODE argument. */
5039610b
SL
4252
4253static rtx
671a00ee 4254expand_builtin_memory_copy_args (tree dest, tree src, tree len,
03a9b90a
AS
4255 rtx target, tree exp, memop_ret retmode,
4256 bool might_overlap)
5039610b 4257{
671a00ee
ML
4258 const char *src_str;
4259 unsigned int src_align = get_pointer_alignment (src);
4260 unsigned int dest_align = get_pointer_alignment (dest);
4261 rtx dest_mem, src_mem, dest_addr, len_rtx;
4262 HOST_WIDE_INT expected_size = -1;
4263 unsigned int expected_align = 0;
4264 unsigned HOST_WIDE_INT min_size;
4265 unsigned HOST_WIDE_INT max_size;
4266 unsigned HOST_WIDE_INT probable_max_size;
edcf72f3 4267
db91c7cf
ML
4268 bool is_move_done;
4269
671a00ee
ML
4270 /* If DEST is not a pointer type, call the normal function. */
4271 if (dest_align == 0)
4272 return NULL_RTX;
c22cacf3 4273
671a00ee
ML
4274 /* If either SRC is not a pointer type, don't do this
4275 operation in-line. */
4276 if (src_align == 0)
4277 return NULL_RTX;
8fd3cf4e 4278
671a00ee
ML
4279 if (currently_expanding_gimple_stmt)
4280 stringop_block_profile (currently_expanding_gimple_stmt,
4281 &expected_align, &expected_size);
33521f7d 4282
671a00ee
ML
4283 if (expected_align < dest_align)
4284 expected_align = dest_align;
4285 dest_mem = get_memory_rtx (dest, len);
4286 set_mem_align (dest_mem, dest_align);
4287 len_rtx = expand_normal (len);
4288 determine_block_size (len, len_rtx, &min_size, &max_size,
4289 &probable_max_size);
4290 src_str = c_getstr (src);
e3e9f108 4291
03a9b90a
AS
4292 /* If SRC is a string constant and block move would be done by
4293 pieces, we can avoid loading the string from memory and only
4294 stored the computed constants. This works in the overlap
4295 (memmove) case as well because store_by_pieces just generates a
4296 series of stores of constants from the string constant returned
4297 by c_getstr(). */
671a00ee
ML
4298 if (src_str
4299 && CONST_INT_P (len_rtx)
4300 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
4301 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
4302 CONST_CAST (char *, src_str),
4303 dest_align, false))
4304 {
4305 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
4306 builtin_memcpy_read_str,
7d3eecca 4307 CONST_CAST (char *, src_str),
2ff5ffb6 4308 dest_align, false, retmode);
671a00ee
ML
4309 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4310 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4311 return dest_mem;
4312 }
e3e9f108 4313
671a00ee
ML
4314 src_mem = get_memory_rtx (src, len);
4315 set_mem_align (src_mem, src_align);
8fd3cf4e 4316
671a00ee 4317 /* Copy word part most expediently. */
fdd33254 4318 enum block_op_methods method = BLOCK_OP_NORMAL;
2ff5ffb6
ML
4319 if (CALL_EXPR_TAILCALL (exp)
4320 && (retmode == RETURN_BEGIN || target == const0_rtx))
fdd33254 4321 method = BLOCK_OP_TAILCALL;
db91c7cf
ML
4322 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
4323 && retmode == RETURN_END
03a9b90a 4324 && !might_overlap
db91c7cf
ML
4325 && target != const0_rtx);
4326 if (use_mempcpy_call)
fdd33254
ML
4327 method = BLOCK_OP_NO_LIBCALL_RET;
4328 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
671a00ee 4329 expected_align, expected_size,
db91c7cf 4330 min_size, max_size, probable_max_size,
03a9b90a 4331 use_mempcpy_call, &is_move_done, might_overlap);
db91c7cf
ML
4332
4333 /* Bail out when a mempcpy call would be expanded as libcall and when
4334 we have a target that provides a fast implementation
4335 of mempcpy routine. */
4336 if (!is_move_done)
4337 return NULL_RTX;
4338
fdd33254
ML
4339 if (dest_addr == pc_rtx)
4340 return NULL_RTX;
671a00ee
ML
4341
4342 if (dest_addr == 0)
4343 {
4344 dest_addr = force_operand (XEXP (dest_mem, 0), target);
4345 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4346 }
4347
2ff5ffb6 4348 if (retmode != RETURN_BEGIN && target != const0_rtx)
671a00ee
ML
4349 {
4350 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4351 /* stpcpy pointer to last byte. */
2ff5ffb6 4352 if (retmode == RETURN_END_MINUS_ONE)
671a00ee 4353 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
7d3eecca 4354 }
671a00ee
ML
4355
4356 return dest_addr;
4357}
4358
4359static rtx
4360expand_builtin_mempcpy_args (tree dest, tree src, tree len,
2ff5ffb6 4361 rtx target, tree orig_exp, memop_ret retmode)
671a00ee
ML
4362{
4363 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
03a9b90a 4364 retmode, false);
e3e9f108
JJ
4365}
4366
5039610b 4367/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
beed8fc0 4368 we failed, the caller should emit a normal call, otherwise try to
2ff5ffb6
ML
4369 get the result in TARGET, if convenient.
4370 Return value is based on RETMODE argument. */
beed8fc0
AO
4371
4372static rtx
2ff5ffb6 4373expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
beed8fc0 4374{
99b1c316 4375 class expand_operand ops[3];
beed8fc0
AO
4376 rtx dest_mem;
4377 rtx src_mem;
beed8fc0 4378
7cff0471 4379 if (!targetm.have_movstr ())
5039610b 4380 return NULL_RTX;
beed8fc0 4381
435bb2a1
JJ
4382 dest_mem = get_memory_rtx (dest, NULL);
4383 src_mem = get_memory_rtx (src, NULL);
2831adb5 4384 if (retmode == RETURN_BEGIN)
beed8fc0
AO
4385 {
4386 target = force_reg (Pmode, XEXP (dest_mem, 0));
4387 dest_mem = replace_equiv_address (dest_mem, target);
beed8fc0
AO
4388 }
4389
42bdb8f2
ML
4390 create_output_operand (&ops[0],
4391 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
a5c7d693
RS
4392 create_fixed_operand (&ops[1], dest_mem);
4393 create_fixed_operand (&ops[2], src_mem);
7cff0471 4394 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
7c5425fa 4395 return NULL_RTX;
beed8fc0 4396
2ff5ffb6 4397 if (retmode != RETURN_BEGIN && target != const0_rtx)
7ce3fc8f 4398 {
a5c7d693
RS
4399 target = ops[0].value;
4400 /* movstr is supposed to set end to the address of the NUL
4401 terminator. If the caller requested a mempcpy-like return value,
4402 adjust it. */
2ff5ffb6 4403 if (retmode == RETURN_END)
a5c7d693 4404 {
0a81f074
RS
4405 rtx tem = plus_constant (GET_MODE (target),
4406 gen_lowpart (GET_MODE (target), target), 1);
a5c7d693
RS
4407 emit_move_insn (target, force_operand (tem, NULL_RTX));
4408 }
7ce3fc8f 4409 }
beed8fc0
AO
4410 return target;
4411}
4412
ee92e7ba
MS
4413/* Do some very basic size validation of a call to the strcpy builtin
4414 given by EXP. Return NULL_RTX to have the built-in expand to a call
4415 to the library function. */
4416
4417static rtx
b5338fb3 4418expand_builtin_strcat (tree exp)
ee92e7ba
MS
4419{
4420 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4421 || !warn_stringop_overflow)
4422 return NULL_RTX;
4423
4424 tree dest = CALL_EXPR_ARG (exp, 0);
4425 tree src = CALL_EXPR_ARG (exp, 1);
4426
b5338fb3
MS
4427 /* Detect unterminated source (only). */
4428 if (!check_nul_terminated_array (exp, src))
4429 return NULL_RTX;
4430
ee92e7ba
MS
4431 /* There is no way here to determine the length of the string in
4432 the destination to which the SRC string is being appended so
4433 just diagnose cases when the souce string is longer than
4434 the destination object. */
4435
d9c5a8b9 4436 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba 4437
cc8bea0a
MS
4438 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4439 destsize);
ee92e7ba
MS
4440
4441 return NULL_RTX;
4442}
4443
b8698a0f
L
4444/* Expand expression EXP, which is a call to the strcpy builtin. Return
4445 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 4446 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 4447 convenient). */
fed3cef0 4448
28f4ec01 4449static rtx
44e10129 4450expand_builtin_strcpy (tree exp, rtx target)
28f4ec01 4451{
ee92e7ba
MS
4452 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4453 return NULL_RTX;
4454
4455 tree dest = CALL_EXPR_ARG (exp, 0);
4456 tree src = CALL_EXPR_ARG (exp, 1);
4457
4458 if (warn_stringop_overflow)
4459 {
d9c5a8b9 4460 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
cc8bea0a
MS
4461 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4462 src, destsize);
ee92e7ba
MS
4463 }
4464
e08341bb 4465 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
36537a1c
MS
4466 {
4467 /* Check to see if the argument was declared attribute nonstring
4468 and if so, issue a warning since at this point it's not known
4469 to be nul-terminated. */
4470 tree fndecl = get_callee_fndecl (exp);
4471 maybe_warn_nonstring_arg (fndecl, exp);
4472 return ret;
4473 }
4474
4475 return NULL_RTX;
5039610b
SL
4476}
4477
4478/* Helper function to do the actual work for expand_builtin_strcpy. The
4479 arguments to the builtin_strcpy call DEST and SRC are broken out
4480 so that this can also be called without constructing an actual CALL_EXPR.
4481 The other arguments and return value are the same as for
4482 expand_builtin_strcpy. */
4483
4484static rtx
e08341bb 4485expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
5039610b 4486{
e08341bb
MS
4487 /* Detect strcpy calls with unterminated arrays.. */
4488 if (tree nonstr = unterminated_array (src))
4489 {
4490 /* NONSTR refers to the non-nul terminated constant array. */
4491 if (!TREE_NO_WARNING (exp))
4492 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4493 return NULL_RTX;
4494 }
4495
2ff5ffb6 4496 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
28f4ec01
BS
4497}
4498
5039610b
SL
4499/* Expand a call EXP to the stpcpy builtin.
4500 Return NULL_RTX if we failed the caller should emit a normal call,
9cb65f92
KG
4501 otherwise try to get the result in TARGET, if convenient (and in
4502 mode MODE if that's convenient). */
4503
4504static rtx
3ce4cdb2 4505expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
9cb65f92 4506{
5039610b 4507 tree dst, src;
db3927fb 4508 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
4509
4510 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4511 return NULL_RTX;
4512
4513 dst = CALL_EXPR_ARG (exp, 0);
4514 src = CALL_EXPR_ARG (exp, 1);
4515
e50d56a5
MS
4516 if (warn_stringop_overflow)
4517 {
d9c5a8b9 4518 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
cc8bea0a
MS
4519 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4520 src, destsize);
e50d56a5
MS
4521 }
4522
beed8fc0 4523 /* If return value is ignored, transform stpcpy into strcpy. */
e79983f4 4524 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
ad4319ec 4525 {
e79983f4 4526 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
aa493694 4527 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
0d2a6e08 4528 return expand_expr (result, target, mode, EXPAND_NORMAL);
ad4319ec 4529 }
9cb65f92
KG
4530 else
4531 {
5039610b 4532 tree len, lenp1;
beed8fc0 4533 rtx ret;
e3e9f108 4534
8fd3cf4e 4535 /* Ensure we get an actual string whose length can be evaluated at
c22cacf3
MS
4536 compile-time, not an expression containing a string. This is
4537 because the latter will potentially produce pessimized code
4538 when used to produce the return value. */
e09aa5bd 4539 c_strlen_data lendata = { };
01b0acb7 4540 if (!c_getstr (src, NULL)
e09aa5bd 4541 || !(len = c_strlen (src, 0, &lendata, 1)))
2ff5ffb6
ML
4542 return expand_movstr (dst, src, target,
4543 /*retmode=*/ RETURN_END_MINUS_ONE);
9cb65f92 4544
e09aa5bd
MS
4545 if (lendata.decl && !TREE_NO_WARNING (exp))
4546 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
01b0acb7 4547
db3927fb 4548 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
44e10129 4549 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
2ff5ffb6
ML
4550 target, exp,
4551 /*retmode=*/ RETURN_END_MINUS_ONE);
beed8fc0
AO
4552
4553 if (ret)
4554 return ret;
4555
4556 if (TREE_CODE (len) == INTEGER_CST)
4557 {
84217346 4558 rtx len_rtx = expand_normal (len);
beed8fc0 4559
481683e1 4560 if (CONST_INT_P (len_rtx))
beed8fc0 4561 {
e08341bb 4562 ret = expand_builtin_strcpy_args (exp, dst, src, target);
beed8fc0
AO
4563
4564 if (ret)
4565 {
4566 if (! target)
58ec6ece
SE
4567 {
4568 if (mode != VOIDmode)
4569 target = gen_reg_rtx (mode);
4570 else
4571 target = gen_reg_rtx (GET_MODE (ret));
4572 }
beed8fc0
AO
4573 if (GET_MODE (target) != GET_MODE (ret))
4574 ret = gen_lowpart (GET_MODE (target), ret);
4575
0a81f074 4576 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
7ce3fc8f 4577 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
298e6adc 4578 gcc_assert (ret);
beed8fc0
AO
4579
4580 return target;
4581 }
4582 }
4583 }
4584
2ff5ffb6
ML
4585 return expand_movstr (dst, src, target,
4586 /*retmode=*/ RETURN_END_MINUS_ONE);
9cb65f92
KG
4587 }
4588}
4589
3ce4cdb2
MS
4590/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4591 arguments while being careful to avoid duplicate warnings (which could
4592 be issued if the expander were to expand the call, resulting in it
4593 being emitted in expand_call(). */
4594
4595static rtx
4596expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4597{
4598 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4599 {
4600 /* The call has been successfully expanded. Check for nonstring
4601 arguments and issue warnings as appropriate. */
4602 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4603 return ret;
4604 }
4605
4606 return NULL_RTX;
4607}
4608
e50d56a5
MS
4609/* Check a call EXP to the stpncpy built-in for validity.
4610 Return NULL_RTX on both success and failure. */
4611
4612static rtx
4613expand_builtin_stpncpy (tree exp, rtx)
4614{
4615 if (!validate_arglist (exp,
4616 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4617 || !warn_stringop_overflow)
4618 return NULL_RTX;
4619
c6c02519 4620 /* The source and destination of the call. */
e50d56a5
MS
4621 tree dest = CALL_EXPR_ARG (exp, 0);
4622 tree src = CALL_EXPR_ARG (exp, 1);
4623
c6c02519 4624 /* The exact number of bytes to write (not the maximum). */
e50d56a5 4625 tree len = CALL_EXPR_ARG (exp, 2);
b5338fb3
MS
4626 if (!check_nul_terminated_array (exp, src, len))
4627 return NULL_RTX;
e50d56a5 4628
c6c02519 4629 /* The size of the destination object. */
d9c5a8b9 4630 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
e50d56a5 4631
cc8bea0a 4632 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
e50d56a5
MS
4633
4634 return NULL_RTX;
4635}
4636
57814e5e
JJ
4637/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4638 bytes from constant string DATA + OFFSET and return it as target
4639 constant. */
4640
14a43348 4641rtx
4682ae04 4642builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 4643 scalar_int_mode mode)
57814e5e
JJ
4644{
4645 const char *str = (const char *) data;
4646
4647 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4648 return const0_rtx;
4649
4650 return c_readstr (str + offset, mode);
4651}
4652
ee92e7ba
MS
4653/* Helper to check the sizes of sequences and the destination of calls
4654 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4655 success (no overflow or invalid sizes), false otherwise. */
4656
4657static bool
4658check_strncat_sizes (tree exp, tree objsize)
4659{
4660 tree dest = CALL_EXPR_ARG (exp, 0);
4661 tree src = CALL_EXPR_ARG (exp, 1);
cc8bea0a 4662 tree maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
4663
4664 /* Try to determine the range of lengths that the source expression
4665 refers to. */
5d6655eb
MS
4666 c_strlen_data lendata = { };
4667 get_range_strlen (src, &lendata, /* eltsize = */ 1);
ee92e7ba
MS
4668
4669 /* Try to verify that the destination is big enough for the shortest
4670 string. */
4671
4672 if (!objsize && warn_stringop_overflow)
4673 {
4674 /* If it hasn't been provided by __strncat_chk, try to determine
4675 the size of the destination object into which the source is
4676 being copied. */
d9c5a8b9 4677 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
4678 }
4679
4680 /* Add one for the terminating nul. */
5d6655eb
MS
4681 tree srclen = (lendata.minlen
4682 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
ee92e7ba
MS
4683 size_one_node)
4684 : NULL_TREE);
4685
cc8bea0a
MS
4686 /* The strncat function copies at most MAXREAD bytes and always appends
4687 the terminating nul so the specified upper bound should never be equal
4688 to (or greater than) the size of the destination. */
4689 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4690 && tree_int_cst_equal (objsize, maxread))
ee92e7ba 4691 {
e50d56a5
MS
4692 location_t loc = tree_nonartificial_location (exp);
4693 loc = expansion_point_location_if_in_system_header (loc);
4694
4695 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 4696 "%K%qD specified bound %E equals destination size",
cc8bea0a 4697 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
4698
4699 return false;
4700 }
4701
4702 if (!srclen
cc8bea0a 4703 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 4704 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
4705 && tree_int_cst_lt (maxread, srclen)))
4706 srclen = maxread;
ee92e7ba 4707
cc8bea0a 4708 /* The number of bytes to write is LEN but check_access will also
ee92e7ba 4709 check SRCLEN if LEN's value isn't known. */
cc8bea0a
MS
4710 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4711 objsize);
ee92e7ba
MS
4712}
4713
4714/* Similar to expand_builtin_strcat, do some very basic size validation
4715 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4716 the built-in expand to a call to the library function. */
4717
4718static rtx
4719expand_builtin_strncat (tree exp, rtx)
4720{
4721 if (!validate_arglist (exp,
4722 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4723 || !warn_stringop_overflow)
4724 return NULL_RTX;
4725
4726 tree dest = CALL_EXPR_ARG (exp, 0);
4727 tree src = CALL_EXPR_ARG (exp, 1);
4728 /* The upper bound on the number of bytes to write. */
cc8bea0a 4729 tree maxread = CALL_EXPR_ARG (exp, 2);
b5338fb3
MS
4730
4731 /* Detect unterminated source (only). */
4732 if (!check_nul_terminated_array (exp, src, maxread))
4733 return NULL_RTX;
4734
ee92e7ba
MS
4735 /* The length of the source sequence. */
4736 tree slen = c_strlen (src, 1);
4737
4738 /* Try to determine the range of lengths that the source expression
5d6655eb
MS
4739 refers to. Since the lengths are only used for warning and not
4740 for code generation disable strict mode below. */
4741 tree maxlen = slen;
4742 if (!maxlen)
4743 {
4744 c_strlen_data lendata = { };
4745 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4746 maxlen = lendata.maxbound;
4747 }
ee92e7ba
MS
4748
4749 /* Try to verify that the destination is big enough for the shortest
4750 string. First try to determine the size of the destination object
4751 into which the source is being copied. */
d9c5a8b9 4752 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
4753
4754 /* Add one for the terminating nul. */
5d6655eb
MS
4755 tree srclen = (maxlen
4756 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
ee92e7ba
MS
4757 size_one_node)
4758 : NULL_TREE);
4759
cc8bea0a
MS
4760 /* The strncat function copies at most MAXREAD bytes and always appends
4761 the terminating nul so the specified upper bound should never be equal
4762 to (or greater than) the size of the destination. */
4763 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4764 && tree_int_cst_equal (destsize, maxread))
ee92e7ba 4765 {
e50d56a5
MS
4766 location_t loc = tree_nonartificial_location (exp);
4767 loc = expansion_point_location_if_in_system_header (loc);
4768
4769 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 4770 "%K%qD specified bound %E equals destination size",
cc8bea0a 4771 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
4772
4773 return NULL_RTX;
4774 }
4775
4776 if (!srclen
cc8bea0a 4777 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 4778 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
4779 && tree_int_cst_lt (maxread, srclen)))
4780 srclen = maxread;
ee92e7ba 4781
cc8bea0a
MS
4782 /* The number of bytes to write is SRCLEN. */
4783 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
ee92e7ba
MS
4784
4785 return NULL_RTX;
4786}
4787
b8698a0f 4788/* Expand expression EXP, which is a call to the strncpy builtin. Return
5039610b 4789 NULL_RTX if we failed the caller should emit a normal call. */
da9e9f08
KG
4790
4791static rtx
44e10129 4792expand_builtin_strncpy (tree exp, rtx target)
da9e9f08 4793{
db3927fb 4794 location_t loc = EXPR_LOCATION (exp);
5039610b 4795
b5338fb3
MS
4796 if (!validate_arglist (exp,
4797 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4798 return NULL_RTX;
4799 tree dest = CALL_EXPR_ARG (exp, 0);
4800 tree src = CALL_EXPR_ARG (exp, 1);
4801 /* The number of bytes to write (not the maximum). */
4802 tree len = CALL_EXPR_ARG (exp, 2);
57814e5e 4803
b5338fb3
MS
4804 if (!check_nul_terminated_array (exp, src, len))
4805 return NULL_RTX;
cc8bea0a 4806
b5338fb3
MS
4807 /* The length of the source sequence. */
4808 tree slen = c_strlen (src, 1);
ee92e7ba 4809
b5338fb3
MS
4810 if (warn_stringop_overflow)
4811 {
4812 tree destsize = compute_objsize (dest,
4813 warn_stringop_overflow - 1);
da9e9f08 4814
b5338fb3
MS
4815 /* The number of bytes to write is LEN but check_access will also
4816 check SLEN if LEN's value isn't known. */
4817 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4818 destsize);
4819 }
da9e9f08 4820
b5338fb3
MS
4821 /* We must be passed a constant len and src parameter. */
4822 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4823 return NULL_RTX;
57814e5e 4824
b5338fb3
MS
4825 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4826
4827 /* We're required to pad with trailing zeros if the requested
4828 len is greater than strlen(s2)+1. In that case try to
4829 use store_by_pieces, if it fails, punt. */
4830 if (tree_int_cst_lt (slen, len))
4831 {
4832 unsigned int dest_align = get_pointer_alignment (dest);
4833 const char *p = c_getstr (src);
4834 rtx dest_mem;
4835
4836 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4837 || !can_store_by_pieces (tree_to_uhwi (len),
4838 builtin_strncpy_read_str,
4839 CONST_CAST (char *, p),
4840 dest_align, false))
4841 return NULL_RTX;
4842
4843 dest_mem = get_memory_rtx (dest, len);
4844 store_by_pieces (dest_mem, tree_to_uhwi (len),
4845 builtin_strncpy_read_str,
4846 CONST_CAST (char *, p), dest_align, false,
4847 RETURN_BEGIN);
4848 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4849 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4850 return dest_mem;
da9e9f08 4851 }
b5338fb3 4852
5039610b 4853 return NULL_RTX;
da9e9f08
KG
4854}
4855
ab937357
JJ
4856/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4857 bytes from constant string DATA + OFFSET and return it as target
4858 constant. */
4859
34d85166 4860rtx
4682ae04 4861builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4862 scalar_int_mode mode)
ab937357
JJ
4863{
4864 const char *c = (const char *) data;
f883e0a7 4865 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ab937357
JJ
4866
4867 memset (p, *c, GET_MODE_SIZE (mode));
4868
4869 return c_readstr (p, mode);
4870}
4871
1a887f86
RS
4872/* Callback routine for store_by_pieces. Return the RTL of a register
4873 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4874 char value given in the RTL register data. For example, if mode is
4875 4 bytes wide, return the RTL for 0x01010101*data. */
4876
4877static rtx
4682ae04 4878builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4879 scalar_int_mode mode)
1a887f86
RS
4880{
4881 rtx target, coeff;
4882 size_t size;
4883 char *p;
4884
4885 size = GET_MODE_SIZE (mode);
5ab2f7b7
KH
4886 if (size == 1)
4887 return (rtx) data;
1a887f86 4888
f883e0a7 4889 p = XALLOCAVEC (char, size);
1a887f86
RS
4890 memset (p, 1, size);
4891 coeff = c_readstr (p, mode);
4892
5ab2f7b7 4893 target = convert_to_mode (mode, (rtx) data, 1);
1a887f86
RS
4894 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4895 return force_reg (mode, target);
4896}
4897
b8698a0f
L
4898/* Expand expression EXP, which is a call to the memset builtin. Return
4899 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 4900 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 4901 convenient). */
fed3cef0 4902
28f4ec01 4903static rtx
ef4bddc2 4904expand_builtin_memset (tree exp, rtx target, machine_mode mode)
28f4ec01 4905{
5039610b
SL
4906 if (!validate_arglist (exp,
4907 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4908 return NULL_RTX;
ee92e7ba
MS
4909
4910 tree dest = CALL_EXPR_ARG (exp, 0);
4911 tree val = CALL_EXPR_ARG (exp, 1);
4912 tree len = CALL_EXPR_ARG (exp, 2);
4913
cc8bea0a 4914 check_memop_access (exp, dest, NULL_TREE, len);
ee92e7ba
MS
4915
4916 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5039610b 4917}
28f4ec01 4918
5039610b
SL
4919/* Helper function to do the actual work for expand_builtin_memset. The
4920 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4921 so that this can also be called without constructing an actual CALL_EXPR.
4922 The other arguments and return value are the same as for
4923 expand_builtin_memset. */
880864cf 4924
5039610b
SL
4925static rtx
4926expand_builtin_memset_args (tree dest, tree val, tree len,
ef4bddc2 4927 rtx target, machine_mode mode, tree orig_exp)
5039610b
SL
4928{
4929 tree fndecl, fn;
4930 enum built_in_function fcode;
ef4bddc2 4931 machine_mode val_mode;
5039610b
SL
4932 char c;
4933 unsigned int dest_align;
4934 rtx dest_mem, dest_addr, len_rtx;
4935 HOST_WIDE_INT expected_size = -1;
4936 unsigned int expected_align = 0;
3918b108
JH
4937 unsigned HOST_WIDE_INT min_size;
4938 unsigned HOST_WIDE_INT max_size;
82bb7d4e 4939 unsigned HOST_WIDE_INT probable_max_size;
28f4ec01 4940
0eb77834 4941 dest_align = get_pointer_alignment (dest);
079a182e 4942
5039610b
SL
4943 /* If DEST is not a pointer type, don't do this operation in-line. */
4944 if (dest_align == 0)
4945 return NULL_RTX;
c2bd38e8 4946
a5883ba0
MM
4947 if (currently_expanding_gimple_stmt)
4948 stringop_block_profile (currently_expanding_gimple_stmt,
4949 &expected_align, &expected_size);
726a989a 4950
5039610b
SL
4951 if (expected_align < dest_align)
4952 expected_align = dest_align;
880864cf 4953
5039610b
SL
4954 /* If the LEN parameter is zero, return DEST. */
4955 if (integer_zerop (len))
4956 {
4957 /* Evaluate and ignore VAL in case it has side-effects. */
4958 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4959 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4960 }
57e84f18 4961
5039610b
SL
4962 /* Stabilize the arguments in case we fail. */
4963 dest = builtin_save_expr (dest);
4964 val = builtin_save_expr (val);
4965 len = builtin_save_expr (len);
1a887f86 4966
5039610b 4967 len_rtx = expand_normal (len);
82bb7d4e
JH
4968 determine_block_size (len, len_rtx, &min_size, &max_size,
4969 &probable_max_size);
5039610b 4970 dest_mem = get_memory_rtx (dest, len);
8a445129 4971 val_mode = TYPE_MODE (unsigned_char_type_node);
1a887f86 4972
5039610b
SL
4973 if (TREE_CODE (val) != INTEGER_CST)
4974 {
4975 rtx val_rtx;
1a887f86 4976
5039610b 4977 val_rtx = expand_normal (val);
8a445129 4978 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
28f4ec01 4979
5039610b
SL
4980 /* Assume that we can memset by pieces if we can store
4981 * the coefficients by pieces (in the required modes).
4982 * We can't pass builtin_memset_gen_str as that emits RTL. */
4983 c = 1;
cc269bb6 4984 if (tree_fits_uhwi_p (len)
ae7e9ddd 4985 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
4986 builtin_memset_read_str, &c, dest_align,
4987 true))
5039610b 4988 {
8a445129 4989 val_rtx = force_reg (val_mode, val_rtx);
ae7e9ddd 4990 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150 4991 builtin_memset_gen_str, val_rtx, dest_align,
2ff5ffb6 4992 true, RETURN_BEGIN);
5039610b
SL
4993 }
4994 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4995 dest_align, expected_align,
82bb7d4e
JH
4996 expected_size, min_size, max_size,
4997 probable_max_size))
880864cf 4998 goto do_libcall;
b8698a0f 4999
5039610b
SL
5000 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5001 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5002 return dest_mem;
5003 }
28f4ec01 5004
5039610b
SL
5005 if (target_char_cast (val, &c))
5006 goto do_libcall;
ab937357 5007
5039610b
SL
5008 if (c)
5009 {
cc269bb6 5010 if (tree_fits_uhwi_p (len)
ae7e9ddd 5011 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
5012 builtin_memset_read_str, &c, dest_align,
5013 true))
ae7e9ddd 5014 store_by_pieces (dest_mem, tree_to_uhwi (len),
2ff5ffb6
ML
5015 builtin_memset_read_str, &c, dest_align, true,
5016 RETURN_BEGIN);
8a445129
RS
5017 else if (!set_storage_via_setmem (dest_mem, len_rtx,
5018 gen_int_mode (c, val_mode),
5039610b 5019 dest_align, expected_align,
82bb7d4e
JH
5020 expected_size, min_size, max_size,
5021 probable_max_size))
5039610b 5022 goto do_libcall;
b8698a0f 5023
5039610b
SL
5024 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5025 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5026 return dest_mem;
5027 }
ab937357 5028
5039610b
SL
5029 set_mem_align (dest_mem, dest_align);
5030 dest_addr = clear_storage_hints (dest_mem, len_rtx,
5031 CALL_EXPR_TAILCALL (orig_exp)
5032 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3918b108 5033 expected_align, expected_size,
82bb7d4e
JH
5034 min_size, max_size,
5035 probable_max_size);
28f4ec01 5036
5039610b
SL
5037 if (dest_addr == 0)
5038 {
5039 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5040 dest_addr = convert_memory_address (ptr_mode, dest_addr);
5041 }
28f4ec01 5042
5039610b 5043 return dest_addr;
880864cf 5044
5039610b
SL
5045 do_libcall:
5046 fndecl = get_callee_fndecl (orig_exp);
5047 fcode = DECL_FUNCTION_CODE (fndecl);
31db0fe0 5048 if (fcode == BUILT_IN_MEMSET)
aa493694
JJ
5049 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
5050 dest, val, len);
5039610b 5051 else if (fcode == BUILT_IN_BZERO)
aa493694
JJ
5052 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
5053 dest, len);
5039610b
SL
5054 else
5055 gcc_unreachable ();
44e10129
MM
5056 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5057 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5039610b 5058 return expand_call (fn, target, target == const0_rtx);
28f4ec01
BS
5059}
5060
b8698a0f 5061/* Expand expression EXP, which is a call to the bzero builtin. Return
5039610b 5062 NULL_RTX if we failed the caller should emit a normal call. */
5197bd50 5063
e3a709be 5064static rtx
8148fe65 5065expand_builtin_bzero (tree exp)
e3a709be 5066{
5039610b 5067 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3477addf 5068 return NULL_RTX;
e3a709be 5069
ee92e7ba
MS
5070 tree dest = CALL_EXPR_ARG (exp, 0);
5071 tree size = CALL_EXPR_ARG (exp, 1);
5072
cc8bea0a 5073 check_memop_access (exp, dest, NULL_TREE, size);
8d51ecf8 5074
3477addf 5075 /* New argument list transforming bzero(ptr x, int y) to
c2bd38e8
RS
5076 memset(ptr x, int 0, size_t y). This is done this way
5077 so that if it isn't expanded inline, we fallback to
5078 calling bzero instead of memset. */
8d51ecf8 5079
ee92e7ba
MS
5080 location_t loc = EXPR_LOCATION (exp);
5081
5039610b 5082 return expand_builtin_memset_args (dest, integer_zero_node,
0d82a1c8
RG
5083 fold_convert_loc (loc,
5084 size_type_node, size),
5039610b 5085 const0_rtx, VOIDmode, exp);
e3a709be
KG
5086}
5087
a666df60
RS
5088/* Try to expand cmpstr operation ICODE with the given operands.
5089 Return the result rtx on success, otherwise return null. */
5090
5091static rtx
5092expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
5093 HOST_WIDE_INT align)
5094{
5095 machine_mode insn_mode = insn_data[icode].operand[0].mode;
5096
5097 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
5098 target = NULL_RTX;
5099
99b1c316 5100 class expand_operand ops[4];
a666df60
RS
5101 create_output_operand (&ops[0], target, insn_mode);
5102 create_fixed_operand (&ops[1], arg1_rtx);
5103 create_fixed_operand (&ops[2], arg2_rtx);
5104 create_integer_operand (&ops[3], align);
5105 if (maybe_expand_insn (icode, 4, ops))
5106 return ops[0].value;
5107 return NULL_RTX;
5108}
5109
2be3b5ce 5110/* Expand expression EXP, which is a call to the memcmp built-in function.
9b0f6f5e 5111 Return NULL_RTX if we failed and the caller should emit a normal call,
36b85e43
BS
5112 otherwise try to get the result in TARGET, if convenient.
5113 RESULT_EQ is true if we can relax the returned value to be either zero
5114 or nonzero, without caring about the sign. */
5197bd50 5115
28f4ec01 5116static rtx
36b85e43 5117expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
28f4ec01 5118{
5039610b
SL
5119 if (!validate_arglist (exp,
5120 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5121 return NULL_RTX;
c2bd38e8 5122
7f9f48be
RS
5123 tree arg1 = CALL_EXPR_ARG (exp, 0);
5124 tree arg2 = CALL_EXPR_ARG (exp, 1);
5125 tree len = CALL_EXPR_ARG (exp, 2);
b2272b13
QZ
5126 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
5127 bool no_overflow = true;
d9c5a8b9
MS
5128
5129 /* Diagnose calls where the specified length exceeds the size of either
5130 object. */
b2272b13
QZ
5131 tree size = compute_objsize (arg1, 0);
5132 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5133 len, /*maxread=*/NULL_TREE, size,
5134 /*objsize=*/NULL_TREE);
10a0e2a9 5135 if (no_overflow)
b2272b13
QZ
5136 {
5137 size = compute_objsize (arg2, 0);
5138 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5139 len, /*maxread=*/NULL_TREE, size,
5140 /*objsize=*/NULL_TREE);
10a0e2a9 5141 }
b2272b13 5142
b99d7d97
QZ
5143 /* If the specified length exceeds the size of either object,
5144 call the function. */
5145 if (!no_overflow)
5146 return NULL_RTX;
5147
10a0e2a9 5148 /* Due to the performance benefit, always inline the calls first
b2272b13
QZ
5149 when result_eq is false. */
5150 rtx result = NULL_RTX;
10a0e2a9 5151
b99d7d97 5152 if (!result_eq && fcode != BUILT_IN_BCMP)
d9c5a8b9 5153 {
523a59ff 5154 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
5155 if (result)
5156 return result;
d9c5a8b9
MS
5157 }
5158
36b85e43
BS
5159 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5160 location_t loc = EXPR_LOCATION (exp);
358b8f01 5161
7f9f48be
RS
5162 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5163 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
28f4ec01 5164
7f9f48be
RS
5165 /* If we don't have POINTER_TYPE, call the function. */
5166 if (arg1_align == 0 || arg2_align == 0)
5167 return NULL_RTX;
28f4ec01 5168
7f9f48be
RS
5169 rtx arg1_rtx = get_memory_rtx (arg1, len);
5170 rtx arg2_rtx = get_memory_rtx (arg2, len);
36b85e43 5171 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
28f4ec01 5172
7f9f48be 5173 /* Set MEM_SIZE as appropriate. */
36b85e43 5174 if (CONST_INT_P (len_rtx))
7f9f48be 5175 {
36b85e43
BS
5176 set_mem_size (arg1_rtx, INTVAL (len_rtx));
5177 set_mem_size (arg2_rtx, INTVAL (len_rtx));
7f9f48be 5178 }
6cbaec9e 5179
36b85e43
BS
5180 by_pieces_constfn constfn = NULL;
5181
d0d7f887
BS
5182 const char *src_str = c_getstr (arg2);
5183 if (result_eq && src_str == NULL)
5184 {
5185 src_str = c_getstr (arg1);
5186 if (src_str != NULL)
4f353581 5187 std::swap (arg1_rtx, arg2_rtx);
d0d7f887 5188 }
36b85e43
BS
5189
5190 /* If SRC is a string constant and block move would be done
5191 by pieces, we can avoid loading the string from memory
5192 and only stored the computed constants. */
5193 if (src_str
5194 && CONST_INT_P (len_rtx)
5195 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
5196 constfn = builtin_memcpy_read_str;
5197
b2272b13
QZ
5198 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
5199 TREE_TYPE (len), target,
5200 result_eq, constfn,
5201 CONST_CAST (char *, src_str));
36b85e43 5202
7f9f48be
RS
5203 if (result)
5204 {
5205 /* Return the value in the proper mode for this function. */
5206 if (GET_MODE (result) == mode)
5207 return result;
6cbaec9e 5208
7f9f48be
RS
5209 if (target != 0)
5210 {
5211 convert_move (target, result, 0);
5212 return target;
5213 }
8878e913 5214
28f4ec01 5215 return convert_to_mode (mode, result, 0);
7f9f48be 5216 }
28f4ec01 5217
ee516de9 5218 return NULL_RTX;
c2bd38e8
RS
5219}
5220
5039610b 5221/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
28f4ec01
BS
5222 if we failed the caller should emit a normal call, otherwise try to get
5223 the result in TARGET, if convenient. */
fed3cef0 5224
28f4ec01 5225static rtx
44e10129 5226expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
28f4ec01 5227{
5039610b
SL
5228 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5229 return NULL_RTX;
8d51ecf8 5230
b5338fb3
MS
5231 tree arg1 = CALL_EXPR_ARG (exp, 0);
5232 tree arg2 = CALL_EXPR_ARG (exp, 1);
5233
5234 if (!check_nul_terminated_array (exp, arg1)
5235 || !check_nul_terminated_array (exp, arg2))
5236 return NULL_RTX;
5237
b2272b13
QZ
5238 /* Due to the performance benefit, always inline the calls first. */
5239 rtx result = NULL_RTX;
523a59ff 5240 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
5241 if (result)
5242 return result;
5243
a666df60
RS
5244 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
5245 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
5246 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
5247 return NULL_RTX;
c22cacf3 5248
16155777
MS
5249 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5250 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
2be3b5ce 5251
16155777
MS
5252 /* If we don't have POINTER_TYPE, call the function. */
5253 if (arg1_align == 0 || arg2_align == 0)
5254 return NULL_RTX;
2be3b5ce 5255
16155777
MS
5256 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
5257 arg1 = builtin_save_expr (arg1);
5258 arg2 = builtin_save_expr (arg2);
28f4ec01 5259
16155777
MS
5260 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
5261 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
a666df60 5262
16155777
MS
5263 /* Try to call cmpstrsi. */
5264 if (cmpstr_icode != CODE_FOR_nothing)
5265 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
5266 MIN (arg1_align, arg2_align));
40c1d5f8 5267
16155777
MS
5268 /* Try to determine at least one length and call cmpstrnsi. */
5269 if (!result && cmpstrn_icode != CODE_FOR_nothing)
5270 {
5271 tree len;
5272 rtx arg3_rtx;
5273
5274 tree len1 = c_strlen (arg1, 1);
5275 tree len2 = c_strlen (arg2, 1);
5276
5277 if (len1)
5278 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5279 if (len2)
5280 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5281
5282 /* If we don't have a constant length for the first, use the length
5283 of the second, if we know it. We don't require a constant for
5284 this case; some cost analysis could be done if both are available
5285 but neither is constant. For now, assume they're equally cheap,
5286 unless one has side effects. If both strings have constant lengths,
5287 use the smaller. */
5288
5289 if (!len1)
5290 len = len2;
5291 else if (!len2)
5292 len = len1;
5293 else if (TREE_SIDE_EFFECTS (len1))
5294 len = len2;
5295 else if (TREE_SIDE_EFFECTS (len2))
5296 len = len1;
5297 else if (TREE_CODE (len1) != INTEGER_CST)
5298 len = len2;
5299 else if (TREE_CODE (len2) != INTEGER_CST)
5300 len = len1;
5301 else if (tree_int_cst_lt (len1, len2))
5302 len = len1;
5303 else
5304 len = len2;
c43fa1f5 5305
16155777
MS
5306 /* If both arguments have side effects, we cannot optimize. */
5307 if (len && !TREE_SIDE_EFFECTS (len))
40c1d5f8 5308 {
16155777
MS
5309 arg3_rtx = expand_normal (len);
5310 result = expand_cmpstrn_or_cmpmem
5311 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5312 arg3_rtx, MIN (arg1_align, arg2_align));
40c1d5f8 5313 }
16155777
MS
5314 }
5315
16155777 5316 tree fndecl = get_callee_fndecl (exp);
16155777
MS
5317 if (result)
5318 {
36537a1c
MS
5319 /* Check to see if the argument was declared attribute nonstring
5320 and if so, issue a warning since at this point it's not known
5321 to be nul-terminated. */
5322 maybe_warn_nonstring_arg (fndecl, exp);
5323
16155777
MS
5324 /* Return the value in the proper mode for this function. */
5325 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5326 if (GET_MODE (result) == mode)
5327 return result;
5328 if (target == 0)
5329 return convert_to_mode (mode, result, 0);
5330 convert_move (target, result, 0);
5331 return target;
40c1d5f8 5332 }
16155777
MS
5333
5334 /* Expand the library call ourselves using a stabilized argument
5335 list to avoid re-evaluating the function's arguments twice. */
5336 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5337 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5338 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5339 return expand_call (fn, target, target == const0_rtx);
2dee4af1 5340}
28f4ec01 5341
b8698a0f 5342/* Expand expression EXP, which is a call to the strncmp builtin. Return
5039610b 5343 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
da9e9f08 5344 the result in TARGET, if convenient. */
5197bd50 5345
da9e9f08 5346static rtx
44e10129 5347expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
ef4bddc2 5348 ATTRIBUTE_UNUSED machine_mode mode)
da9e9f08 5349{
5039610b
SL
5350 if (!validate_arglist (exp,
5351 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5352 return NULL_RTX;
da9e9f08 5353
b5338fb3
MS
5354 tree arg1 = CALL_EXPR_ARG (exp, 0);
5355 tree arg2 = CALL_EXPR_ARG (exp, 1);
5356 tree arg3 = CALL_EXPR_ARG (exp, 2);
5357
5358 if (!check_nul_terminated_array (exp, arg1, arg3)
5359 || !check_nul_terminated_array (exp, arg2, arg3))
5360 return NULL_RTX;
5361
b2272b13
QZ
5362 /* Due to the performance benefit, always inline the calls first. */
5363 rtx result = NULL_RTX;
523a59ff 5364 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
5365 if (result)
5366 return result;
5367
819c1488 5368 /* If c_strlen can determine an expression for one of the string
40c1d5f8 5369 lengths, and it doesn't have side effects, then emit cmpstrnsi
2be3b5ce 5370 using length MIN(strlen(string)+1, arg3). */
a666df60 5371 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
5372 if (cmpstrn_icode == CODE_FOR_nothing)
5373 return NULL_RTX;
5197bd50 5374
16155777
MS
5375 tree len;
5376
16155777
MS
5377 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5378 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5379
5380 tree len1 = c_strlen (arg1, 1);
5381 tree len2 = c_strlen (arg2, 1);
5382
5383 location_t loc = EXPR_LOCATION (exp);
5384
5385 if (len1)
5386 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5387 if (len2)
5388 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5389
5390 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5391
5392 /* If we don't have a constant length for the first, use the length
5393 of the second, if we know it. If neither string is constant length,
5394 use the given length argument. We don't require a constant for
5395 this case; some cost analysis could be done if both are available
5396 but neither is constant. For now, assume they're equally cheap,
5397 unless one has side effects. If both strings have constant lengths,
5398 use the smaller. */
5399
5400 if (!len1 && !len2)
5401 len = len3;
5402 else if (!len1)
5403 len = len2;
5404 else if (!len2)
5405 len = len1;
5406 else if (TREE_SIDE_EFFECTS (len1))
5407 len = len2;
5408 else if (TREE_SIDE_EFFECTS (len2))
5409 len = len1;
5410 else if (TREE_CODE (len1) != INTEGER_CST)
5411 len = len2;
5412 else if (TREE_CODE (len2) != INTEGER_CST)
5413 len = len1;
5414 else if (tree_int_cst_lt (len1, len2))
5415 len = len1;
5416 else
5417 len = len2;
5418
5419 /* If we are not using the given length, we must incorporate it here.
5420 The actual new length parameter will be MIN(len,arg3) in this case. */
5421 if (len != len3)
75e96bc8
MS
5422 {
5423 len = fold_convert_loc (loc, sizetype, len);
5424 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5425 }
16155777
MS
5426 rtx arg1_rtx = get_memory_rtx (arg1, len);
5427 rtx arg2_rtx = get_memory_rtx (arg2, len);
5428 rtx arg3_rtx = expand_normal (len);
b2272b13
QZ
5429 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5430 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5431 MIN (arg1_align, arg2_align));
16155777 5432
16155777 5433 tree fndecl = get_callee_fndecl (exp);
16155777
MS
5434 if (result)
5435 {
36537a1c
MS
5436 /* Check to see if the argument was declared attribute nonstring
5437 and if so, issue a warning since at this point it's not known
5438 to be nul-terminated. */
5439 maybe_warn_nonstring_arg (fndecl, exp);
5440
16155777
MS
5441 /* Return the value in the proper mode for this function. */
5442 mode = TYPE_MODE (TREE_TYPE (exp));
5443 if (GET_MODE (result) == mode)
5444 return result;
5445 if (target == 0)
5446 return convert_to_mode (mode, result, 0);
5447 convert_move (target, result, 0);
5448 return target;
5449 }
5450
5451 /* Expand the library call ourselves using a stabilized argument
5452 list to avoid re-evaluating the function's arguments twice. */
5453 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5454 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5455 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5456 return expand_call (fn, target, target == const0_rtx);
d118937d
KG
5457}
5458
d3707adb
RH
5459/* Expand a call to __builtin_saveregs, generating the result in TARGET,
5460 if that's convenient. */
fed3cef0 5461
d3707adb 5462rtx
4682ae04 5463expand_builtin_saveregs (void)
28f4ec01 5464{
58f4cf2a
DM
5465 rtx val;
5466 rtx_insn *seq;
28f4ec01
BS
5467
5468 /* Don't do __builtin_saveregs more than once in a function.
5469 Save the result of the first call and reuse it. */
5470 if (saveregs_value != 0)
5471 return saveregs_value;
28f4ec01 5472
d3707adb
RH
5473 /* When this function is called, it means that registers must be
5474 saved on entry to this function. So we migrate the call to the
5475 first insn of this function. */
5476
5477 start_sequence ();
28f4ec01 5478
d3707adb 5479 /* Do whatever the machine needs done in this case. */
61f71b34 5480 val = targetm.calls.expand_builtin_saveregs ();
28f4ec01 5481
d3707adb
RH
5482 seq = get_insns ();
5483 end_sequence ();
28f4ec01 5484
d3707adb 5485 saveregs_value = val;
28f4ec01 5486
2f937369
DM
5487 /* Put the insns after the NOTE that starts the function. If this
5488 is inside a start_sequence, make the outer-level insn chain current, so
d3707adb
RH
5489 the code is placed at the start of the function. */
5490 push_topmost_sequence ();
242229bb 5491 emit_insn_after (seq, entry_of_function ());
d3707adb
RH
5492 pop_topmost_sequence ();
5493
5494 return val;
28f4ec01
BS
5495}
5496
8870e212 5497/* Expand a call to __builtin_next_arg. */
5197bd50 5498
28f4ec01 5499static rtx
8870e212 5500expand_builtin_next_arg (void)
28f4ec01 5501{
8870e212
JJ
5502 /* Checking arguments is already done in fold_builtin_next_arg
5503 that must be called before this function. */
4319e38c 5504 return expand_binop (ptr_mode, add_optab,
38173d38
JH
5505 crtl->args.internal_arg_pointer,
5506 crtl->args.arg_offset_rtx,
28f4ec01
BS
5507 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5508}
5509
d3707adb
RH
5510/* Make it easier for the backends by protecting the valist argument
5511 from multiple evaluations. */
5512
5513static tree
db3927fb 5514stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
d3707adb 5515{
35cbb299
KT
5516 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5517
70f34814
RG
5518 /* The current way of determining the type of valist is completely
5519 bogus. We should have the information on the va builtin instead. */
5520 if (!vatype)
5521 vatype = targetm.fn_abi_va_list (cfun->decl);
35cbb299
KT
5522
5523 if (TREE_CODE (vatype) == ARRAY_TYPE)
d3707adb 5524 {
9f720c3e
GK
5525 if (TREE_SIDE_EFFECTS (valist))
5526 valist = save_expr (valist);
8ebecc3b 5527
9f720c3e 5528 /* For this case, the backends will be expecting a pointer to
35cbb299
KT
5529 vatype, but it's possible we've actually been given an array
5530 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
9f720c3e
GK
5531 So fix it. */
5532 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
daf68dd7 5533 {
35cbb299 5534 tree p1 = build_pointer_type (TREE_TYPE (vatype));
db3927fb 5535 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
daf68dd7 5536 }
d3707adb 5537 }
8ebecc3b 5538 else
d3707adb 5539 {
70f34814 5540 tree pt = build_pointer_type (vatype);
8ebecc3b 5541
9f720c3e
GK
5542 if (! needs_lvalue)
5543 {
8ebecc3b
RH
5544 if (! TREE_SIDE_EFFECTS (valist))
5545 return valist;
8d51ecf8 5546
db3927fb 5547 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
d3707adb 5548 TREE_SIDE_EFFECTS (valist) = 1;
d3707adb 5549 }
9f720c3e 5550
8ebecc3b 5551 if (TREE_SIDE_EFFECTS (valist))
9f720c3e 5552 valist = save_expr (valist);
70f34814
RG
5553 valist = fold_build2_loc (loc, MEM_REF,
5554 vatype, valist, build_int_cst (pt, 0));
d3707adb
RH
5555 }
5556
5557 return valist;
5558}
5559
c35d187f
RH
5560/* The "standard" definition of va_list is void*. */
5561
5562tree
5563std_build_builtin_va_list (void)
5564{
5565 return ptr_type_node;
5566}
5567
35cbb299
KT
5568/* The "standard" abi va_list is va_list_type_node. */
5569
5570tree
5571std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5572{
5573 return va_list_type_node;
5574}
5575
5576/* The "standard" type of va_list is va_list_type_node. */
5577
5578tree
5579std_canonical_va_list_type (tree type)
5580{
5581 tree wtype, htype;
5582
35cbb299
KT
5583 wtype = va_list_type_node;
5584 htype = type;
431e31a9
TV
5585
5586 if (TREE_CODE (wtype) == ARRAY_TYPE)
35cbb299
KT
5587 {
5588 /* If va_list is an array type, the argument may have decayed
5589 to a pointer type, e.g. by being passed to another function.
5590 In that case, unwrap both types so that we can compare the
5591 underlying records. */
5592 if (TREE_CODE (htype) == ARRAY_TYPE
5593 || POINTER_TYPE_P (htype))
5594 {
5595 wtype = TREE_TYPE (wtype);
5596 htype = TREE_TYPE (htype);
5597 }
5598 }
5599 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5600 return va_list_type_node;
5601
5602 return NULL_TREE;
5603}
5604
d3707adb
RH
5605/* The "standard" implementation of va_start: just assign `nextarg' to
5606 the variable. */
5197bd50 5607
d3707adb 5608void
4682ae04 5609std_expand_builtin_va_start (tree valist, rtx nextarg)
d3707adb 5610{
508dabda
ILT
5611 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5612 convert_move (va_r, nextarg, 0);
d3707adb
RH
5613}
5614
5039610b 5615/* Expand EXP, a call to __builtin_va_start. */
5197bd50 5616
d3707adb 5617static rtx
5039610b 5618expand_builtin_va_start (tree exp)
d3707adb
RH
5619{
5620 rtx nextarg;
5039610b 5621 tree valist;
db3927fb 5622 location_t loc = EXPR_LOCATION (exp);
d3707adb 5623
5039610b 5624 if (call_expr_nargs (exp) < 2)
c69c9b36 5625 {
db3927fb 5626 error_at (loc, "too few arguments to function %<va_start%>");
c69c9b36
JM
5627 return const0_rtx;
5628 }
d3707adb 5629
5039610b 5630 if (fold_builtin_next_arg (exp, true))
8870e212 5631 return const0_rtx;
d3147f64 5632
8870e212 5633 nextarg = expand_builtin_next_arg ();
db3927fb 5634 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
d3707adb 5635
d7bd8aeb
JJ
5636 if (targetm.expand_builtin_va_start)
5637 targetm.expand_builtin_va_start (valist, nextarg);
5638 else
5639 std_expand_builtin_va_start (valist, nextarg);
d3707adb
RH
5640
5641 return const0_rtx;
5642}
5643
5039610b 5644/* Expand EXP, a call to __builtin_va_end. */
3bdf5ad1 5645
d3707adb 5646static rtx
5039610b 5647expand_builtin_va_end (tree exp)
d3707adb 5648{
5039610b 5649 tree valist = CALL_EXPR_ARG (exp, 0);
daf68dd7 5650
daf68dd7
RH
5651 /* Evaluate for side effects, if needed. I hate macros that don't
5652 do that. */
5653 if (TREE_SIDE_EFFECTS (valist))
5654 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
d3707adb
RH
5655
5656 return const0_rtx;
5657}
5658
5039610b 5659/* Expand EXP, a call to __builtin_va_copy. We do this as a
d3707adb
RH
5660 builtin rather than just as an assignment in stdarg.h because of the
5661 nastiness of array-type va_list types. */
3bdf5ad1 5662
d3707adb 5663static rtx
5039610b 5664expand_builtin_va_copy (tree exp)
d3707adb
RH
5665{
5666 tree dst, src, t;
db3927fb 5667 location_t loc = EXPR_LOCATION (exp);
d3707adb 5668
5039610b
SL
5669 dst = CALL_EXPR_ARG (exp, 0);
5670 src = CALL_EXPR_ARG (exp, 1);
d3707adb 5671
db3927fb
AH
5672 dst = stabilize_va_list_loc (loc, dst, 1);
5673 src = stabilize_va_list_loc (loc, src, 0);
d3707adb 5674
35cbb299
KT
5675 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5676
5677 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
d3707adb 5678 {
35cbb299 5679 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
d3707adb
RH
5680 TREE_SIDE_EFFECTS (t) = 1;
5681 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5682 }
5683 else
5684 {
8ebecc3b
RH
5685 rtx dstb, srcb, size;
5686
5687 /* Evaluate to pointers. */
5688 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5689 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
35cbb299
KT
5690 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5691 NULL_RTX, VOIDmode, EXPAND_NORMAL);
8ebecc3b 5692
5ae6cd0d
MM
5693 dstb = convert_memory_address (Pmode, dstb);
5694 srcb = convert_memory_address (Pmode, srcb);
ce2d32cd 5695
8ebecc3b
RH
5696 /* "Dereference" to BLKmode memories. */
5697 dstb = gen_rtx_MEM (BLKmode, dstb);
ba4828e0 5698 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
35cbb299 5699 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b 5700 srcb = gen_rtx_MEM (BLKmode, srcb);
ba4828e0 5701 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
35cbb299 5702 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b
RH
5703
5704 /* Copy. */
44bb111a 5705 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
d3707adb
RH
5706 }
5707
5708 return const0_rtx;
5709}
5710
28f4ec01
BS
5711/* Expand a call to one of the builtin functions __builtin_frame_address or
5712 __builtin_return_address. */
5197bd50 5713
28f4ec01 5714static rtx
5039610b 5715expand_builtin_frame_address (tree fndecl, tree exp)
28f4ec01 5716{
28f4ec01
BS
5717 /* The argument must be a nonnegative integer constant.
5718 It counts the number of frames to scan up the stack.
8423e57c
MS
5719 The value is either the frame pointer value or the return
5720 address saved in that frame. */
5039610b 5721 if (call_expr_nargs (exp) == 0)
28f4ec01
BS
5722 /* Warning about missing arg was already issued. */
5723 return const0_rtx;
cc269bb6 5724 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
28f4ec01 5725 {
8423e57c 5726 error ("invalid argument to %qD", fndecl);
28f4ec01
BS
5727 return const0_rtx;
5728 }
5729 else
5730 {
8423e57c
MS
5731 /* Number of frames to scan up the stack. */
5732 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5733
5734 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
28f4ec01
BS
5735
5736 /* Some ports cannot access arbitrary stack frames. */
5737 if (tem == NULL)
5738 {
8423e57c 5739 warning (0, "unsupported argument to %qD", fndecl);
28f4ec01
BS
5740 return const0_rtx;
5741 }
5742
8423e57c
MS
5743 if (count)
5744 {
5745 /* Warn since no effort is made to ensure that any frame
5746 beyond the current one exists or can be safely reached. */
5747 warning (OPT_Wframe_address, "calling %qD with "
5748 "a nonzero argument is unsafe", fndecl);
5749 }
5750
28f4ec01
BS
5751 /* For __builtin_frame_address, return what we've got. */
5752 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5753 return tem;
5754
f8cfc6aa 5755 if (!REG_P (tem)
28f4ec01 5756 && ! CONSTANT_P (tem))
18ae1560 5757 tem = copy_addr_to_reg (tem);
28f4ec01
BS
5758 return tem;
5759 }
5760}
5761
d3c12306 5762/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
b7e52782 5763 failed and the caller should emit a normal call. */
d5457140 5764
28f4ec01 5765static rtx
b7e52782 5766expand_builtin_alloca (tree exp)
28f4ec01
BS
5767{
5768 rtx op0;
d5457140 5769 rtx result;
13e49da9 5770 unsigned int align;
8bd9f164 5771 tree fndecl = get_callee_fndecl (exp);
9e878cf1
EB
5772 HOST_WIDE_INT max_size;
5773 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
b7e52782 5774 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
8bd9f164 5775 bool valid_arglist
9e878cf1
EB
5776 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5777 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5778 VOID_TYPE)
5779 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5780 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5781 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
13e49da9
TV
5782
5783 if (!valid_arglist)
5039610b 5784 return NULL_RTX;
28f4ec01 5785
00abf86c
MS
5786 if ((alloca_for_var
5787 && warn_vla_limit >= HOST_WIDE_INT_MAX
5788 && warn_alloc_size_limit < warn_vla_limit)
5789 || (!alloca_for_var
5790 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5791 && warn_alloc_size_limit < warn_alloca_limit
5792 ))
8bd9f164 5793 {
00abf86c
MS
5794 /* -Walloca-larger-than and -Wvla-larger-than settings of
5795 less than HOST_WIDE_INT_MAX override the more general
5796 -Walloc-size-larger-than so unless either of the former
5797 options is smaller than the last one (wchich would imply
5798 that the call was already checked), check the alloca
5799 arguments for overflow. */
8bd9f164
MS
5800 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5801 int idx[] = { 0, -1 };
5802 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5803 }
5804
28f4ec01 5805 /* Compute the argument. */
5039610b 5806 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
28f4ec01 5807
13e49da9 5808 /* Compute the alignment. */
9e878cf1
EB
5809 align = (fcode == BUILT_IN_ALLOCA
5810 ? BIGGEST_ALIGNMENT
5811 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5812
5813 /* Compute the maximum size. */
5814 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5815 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5816 : -1);
13e49da9 5817
b7e52782
EB
5818 /* Allocate the desired space. If the allocation stems from the declaration
5819 of a variable-sized object, it cannot accumulate. */
9e878cf1
EB
5820 result
5821 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5ae6cd0d 5822 result = convert_memory_address (ptr_mode, result);
d5457140 5823
3cf3da88
EB
5824 /* Dynamic allocations for variables are recorded during gimplification. */
5825 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5826 record_dynamic_alloc (exp);
5827
d5457140 5828 return result;
28f4ec01
BS
5829}
5830
7504c3bf
JJ
5831/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5832 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5833 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5834 handle_builtin_stack_restore function. */
e3174bdf
MO
5835
5836static rtx
5837expand_asan_emit_allocas_unpoison (tree exp)
5838{
5839 tree arg0 = CALL_EXPR_ARG (exp, 0);
7504c3bf 5840 tree arg1 = CALL_EXPR_ARG (exp, 1);
8f4956ca 5841 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7504c3bf
JJ
5842 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5843 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5844 stack_pointer_rtx, NULL_RTX, 0,
5845 OPTAB_LIB_WIDEN);
5846 off = convert_modes (ptr_mode, Pmode, off, 0);
5847 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5848 OPTAB_LIB_WIDEN);
e3174bdf 5849 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
db69559b
RS
5850 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5851 top, ptr_mode, bot, ptr_mode);
e3174bdf
MO
5852 return ret;
5853}
5854
ac868f29
EB
5855/* Expand a call to bswap builtin in EXP.
5856 Return NULL_RTX if a normal call should be emitted rather than expanding the
5857 function in-line. If convenient, the result should be placed in TARGET.
5858 SUBTARGET may be used as the target for computing one of EXP's operands. */
167fa32c
EC
5859
5860static rtx
ef4bddc2 5861expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
ac868f29 5862 rtx subtarget)
167fa32c 5863{
167fa32c
EC
5864 tree arg;
5865 rtx op0;
5866
5039610b
SL
5867 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5868 return NULL_RTX;
167fa32c 5869
5039610b 5870 arg = CALL_EXPR_ARG (exp, 0);
ac868f29
EB
5871 op0 = expand_expr (arg,
5872 subtarget && GET_MODE (subtarget) == target_mode
5873 ? subtarget : NULL_RTX,
5874 target_mode, EXPAND_NORMAL);
5875 if (GET_MODE (op0) != target_mode)
5876 op0 = convert_to_mode (target_mode, op0, 1);
167fa32c 5877
ac868f29 5878 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
167fa32c
EC
5879
5880 gcc_assert (target);
5881
ac868f29 5882 return convert_to_mode (target_mode, target, 1);
167fa32c
EC
5883}
5884
5039610b
SL
5885/* Expand a call to a unary builtin in EXP.
5886 Return NULL_RTX if a normal call should be emitted rather than expanding the
28f4ec01
BS
5887 function in-line. If convenient, the result should be placed in TARGET.
5888 SUBTARGET may be used as the target for computing one of EXP's operands. */
d5457140 5889
28f4ec01 5890static rtx
ef4bddc2 5891expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4682ae04 5892 rtx subtarget, optab op_optab)
28f4ec01
BS
5893{
5894 rtx op0;
5039610b
SL
5895
5896 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5897 return NULL_RTX;
28f4ec01
BS
5898
5899 /* Compute the argument. */
4359dc2a
JJ
5900 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5901 (subtarget
5902 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5903 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
49452c07 5904 VOIDmode, EXPAND_NORMAL);
2928cd7a 5905 /* Compute op, into TARGET if possible.
28f4ec01 5906 Set TARGET to wherever the result comes back. */
5039610b 5907 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
146aef0b 5908 op_optab, op0, target, op_optab != clrsb_optab);
298e6adc 5909 gcc_assert (target);
5906d013 5910
6c537d03 5911 return convert_to_mode (target_mode, target, 0);
28f4ec01 5912}
994a57cd 5913
b8698a0f 5914/* Expand a call to __builtin_expect. We just return our argument
ef950eba
JH
5915 as the builtin_expect semantic should've been already executed by
5916 tree branch prediction pass. */
994a57cd
RH
5917
5918static rtx
5039610b 5919expand_builtin_expect (tree exp, rtx target)
994a57cd 5920{
451409e4 5921 tree arg;
994a57cd 5922
5039610b 5923 if (call_expr_nargs (exp) < 2)
994a57cd 5924 return const0_rtx;
5039610b 5925 arg = CALL_EXPR_ARG (exp, 0);
994a57cd 5926
5039610b 5927 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
ef950eba 5928 /* When guessing was done, the hints should be already stripped away. */
1d8381f1 5929 gcc_assert (!flag_guess_branch_prob
1da2ed5f 5930 || optimize == 0 || seen_error ());
994a57cd
RH
5931 return target;
5932}
5f2d6cfa 5933
1e9168b2
ML
5934/* Expand a call to __builtin_expect_with_probability. We just return our
5935 argument as the builtin_expect semantic should've been already executed by
5936 tree branch prediction pass. */
5937
5938static rtx
5939expand_builtin_expect_with_probability (tree exp, rtx target)
5940{
5941 tree arg;
5942
5943 if (call_expr_nargs (exp) < 3)
5944 return const0_rtx;
5945 arg = CALL_EXPR_ARG (exp, 0);
5946
5947 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5948 /* When guessing was done, the hints should be already stripped away. */
5949 gcc_assert (!flag_guess_branch_prob
5950 || optimize == 0 || seen_error ());
5951 return target;
5952}
5953
5954
45d439ac
JJ
5955/* Expand a call to __builtin_assume_aligned. We just return our first
5956 argument as the builtin_assume_aligned semantic should've been already
5957 executed by CCP. */
5958
5959static rtx
5960expand_builtin_assume_aligned (tree exp, rtx target)
5961{
5962 if (call_expr_nargs (exp) < 2)
5963 return const0_rtx;
5964 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5965 EXPAND_NORMAL);
5966 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5967 && (call_expr_nargs (exp) < 3
5968 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5969 return target;
5970}
5971
1e188d1e 5972void
4682ae04 5973expand_builtin_trap (void)
9602f5a0 5974{
eb6f47fb 5975 if (targetm.have_trap ())
206604dc 5976 {
eb6f47fb 5977 rtx_insn *insn = emit_insn (targetm.gen_trap ());
206604dc
JJ
5978 /* For trap insns when not accumulating outgoing args force
5979 REG_ARGS_SIZE note to prevent crossjumping of calls with
5980 different args sizes. */
5981 if (!ACCUMULATE_OUTGOING_ARGS)
68184180 5982 add_args_size_note (insn, stack_pointer_delta);
206604dc 5983 }
9602f5a0 5984 else
ee516de9
EB
5985 {
5986 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5987 tree call_expr = build_call_expr (fn, 0);
5988 expand_call (call_expr, NULL_RTX, false);
5989 }
5990
9602f5a0
RH
5991 emit_barrier ();
5992}
075ec276 5993
468059bc
DD
5994/* Expand a call to __builtin_unreachable. We do nothing except emit
5995 a barrier saying that control flow will not pass here.
5996
5997 It is the responsibility of the program being compiled to ensure
5998 that control flow does never reach __builtin_unreachable. */
5999static void
6000expand_builtin_unreachable (void)
6001{
6002 emit_barrier ();
6003}
6004
5039610b
SL
6005/* Expand EXP, a call to fabs, fabsf or fabsl.
6006 Return NULL_RTX if a normal call should be emitted rather than expanding
075ec276
RS
6007 the function inline. If convenient, the result should be placed
6008 in TARGET. SUBTARGET may be used as the target for computing
6009 the operand. */
6010
6011static rtx
5039610b 6012expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
075ec276 6013{
ef4bddc2 6014 machine_mode mode;
075ec276
RS
6015 tree arg;
6016 rtx op0;
6017
5039610b
SL
6018 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6019 return NULL_RTX;
075ec276 6020
5039610b 6021 arg = CALL_EXPR_ARG (exp, 0);
4cd8e76f 6022 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
075ec276 6023 mode = TYPE_MODE (TREE_TYPE (arg));
49452c07 6024 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
075ec276
RS
6025 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6026}
6027
5039610b 6028/* Expand EXP, a call to copysign, copysignf, or copysignl.
046625fa
RH
6029 Return NULL is a normal call should be emitted rather than expanding the
6030 function inline. If convenient, the result should be placed in TARGET.
6031 SUBTARGET may be used as the target for computing the operand. */
6032
6033static rtx
5039610b 6034expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
046625fa
RH
6035{
6036 rtx op0, op1;
6037 tree arg;
6038
5039610b
SL
6039 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6040 return NULL_RTX;
046625fa 6041
5039610b 6042 arg = CALL_EXPR_ARG (exp, 0);
84217346 6043 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
046625fa 6044
5039610b 6045 arg = CALL_EXPR_ARG (exp, 1);
84217346 6046 op1 = expand_normal (arg);
046625fa
RH
6047
6048 return expand_copysign (op0, op1, target);
6049}
6050
677feb77
DD
6051/* Expand a call to __builtin___clear_cache. */
6052
6053static rtx
f2cf13bd 6054expand_builtin___clear_cache (tree exp)
677feb77 6055{
f2cf13bd
RS
6056 if (!targetm.code_for_clear_cache)
6057 {
677feb77 6058#ifdef CLEAR_INSN_CACHE
f2cf13bd
RS
6059 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6060 does something. Just do the default expansion to a call to
6061 __clear_cache(). */
6062 return NULL_RTX;
677feb77 6063#else
f2cf13bd
RS
6064 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6065 does nothing. There is no need to call it. Do nothing. */
6066 return const0_rtx;
677feb77 6067#endif /* CLEAR_INSN_CACHE */
f2cf13bd
RS
6068 }
6069
677feb77
DD
6070 /* We have a "clear_cache" insn, and it will handle everything. */
6071 tree begin, end;
6072 rtx begin_rtx, end_rtx;
677feb77
DD
6073
6074 /* We must not expand to a library call. If we did, any
6075 fallback library function in libgcc that might contain a call to
6076 __builtin___clear_cache() would recurse infinitely. */
6077 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6078 {
6079 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6080 return const0_rtx;
6081 }
6082
f2cf13bd 6083 if (targetm.have_clear_cache ())
677feb77 6084 {
99b1c316 6085 class expand_operand ops[2];
677feb77
DD
6086
6087 begin = CALL_EXPR_ARG (exp, 0);
6088 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77
DD
6089
6090 end = CALL_EXPR_ARG (exp, 1);
6091 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77 6092
a5c7d693
RS
6093 create_address_operand (&ops[0], begin_rtx);
6094 create_address_operand (&ops[1], end_rtx);
f2cf13bd 6095 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
a5c7d693 6096 return const0_rtx;
677feb77
DD
6097 }
6098 return const0_rtx;
677feb77
DD
6099}
6100
6de9cd9a
DN
6101/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6102
6103static rtx
6104round_trampoline_addr (rtx tramp)
6105{
6106 rtx temp, addend, mask;
6107
6108 /* If we don't need too much alignment, we'll have been guaranteed
6109 proper alignment by get_trampoline_type. */
6110 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6111 return tramp;
6112
6113 /* Round address up to desired boundary. */
6114 temp = gen_reg_rtx (Pmode);
2f1cd2eb
RS
6115 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6116 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6de9cd9a
DN
6117
6118 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6119 temp, 0, OPTAB_LIB_WIDEN);
6120 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6121 temp, 0, OPTAB_LIB_WIDEN);
6122
6123 return tramp;
6124}
6125
6126static rtx
183dd130 6127expand_builtin_init_trampoline (tree exp, bool onstack)
6de9cd9a
DN
6128{
6129 tree t_tramp, t_func, t_chain;
531ca746 6130 rtx m_tramp, r_tramp, r_chain, tmp;
6de9cd9a 6131
5039610b 6132 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6de9cd9a
DN
6133 POINTER_TYPE, VOID_TYPE))
6134 return NULL_RTX;
6135
5039610b
SL
6136 t_tramp = CALL_EXPR_ARG (exp, 0);
6137 t_func = CALL_EXPR_ARG (exp, 1);
6138 t_chain = CALL_EXPR_ARG (exp, 2);
6de9cd9a 6139
84217346 6140 r_tramp = expand_normal (t_tramp);
531ca746
RH
6141 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6142 MEM_NOTRAP_P (m_tramp) = 1;
6143
183dd130
ILT
6144 /* If ONSTACK, the TRAMP argument should be the address of a field
6145 within the local function's FRAME decl. Either way, let's see if
6146 we can fill in the MEM_ATTRs for this memory. */
531ca746 6147 if (TREE_CODE (t_tramp) == ADDR_EXPR)
ad2e5b71 6148 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
531ca746 6149
183dd130
ILT
6150 /* Creator of a heap trampoline is responsible for making sure the
6151 address is aligned to at least STACK_BOUNDARY. Normally malloc
6152 will ensure this anyhow. */
531ca746
RH
6153 tmp = round_trampoline_addr (r_tramp);
6154 if (tmp != r_tramp)
6155 {
6156 m_tramp = change_address (m_tramp, BLKmode, tmp);
6157 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
f5541398 6158 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
531ca746
RH
6159 }
6160
6161 /* The FUNC argument should be the address of the nested function.
6162 Extract the actual function decl to pass to the hook. */
6163 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6164 t_func = TREE_OPERAND (t_func, 0);
6165 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6166
84217346 6167 r_chain = expand_normal (t_chain);
6de9cd9a
DN
6168
6169 /* Generate insns to initialize the trampoline. */
531ca746 6170 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6de9cd9a 6171
183dd130
ILT
6172 if (onstack)
6173 {
6174 trampolines_created = 1;
8ffadef9 6175
4c640e26
EB
6176 if (targetm.calls.custom_function_descriptors != 0)
6177 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6178 "trampoline generated for nested function %qD", t_func);
183dd130 6179 }
8ffadef9 6180
6de9cd9a
DN
6181 return const0_rtx;
6182}
6183
6184static rtx
5039610b 6185expand_builtin_adjust_trampoline (tree exp)
6de9cd9a
DN
6186{
6187 rtx tramp;
6188
5039610b 6189 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
6190 return NULL_RTX;
6191
5039610b 6192 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6de9cd9a 6193 tramp = round_trampoline_addr (tramp);
531ca746
RH
6194 if (targetm.calls.trampoline_adjust_address)
6195 tramp = targetm.calls.trampoline_adjust_address (tramp);
6de9cd9a
DN
6196
6197 return tramp;
6198}
6199
4c640e26
EB
6200/* Expand a call to the builtin descriptor initialization routine.
6201 A descriptor is made up of a couple of pointers to the static
6202 chain and the code entry in this order. */
6203
6204static rtx
6205expand_builtin_init_descriptor (tree exp)
6206{
6207 tree t_descr, t_func, t_chain;
6208 rtx m_descr, r_descr, r_func, r_chain;
6209
6210 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6211 VOID_TYPE))
6212 return NULL_RTX;
6213
6214 t_descr = CALL_EXPR_ARG (exp, 0);
6215 t_func = CALL_EXPR_ARG (exp, 1);
6216 t_chain = CALL_EXPR_ARG (exp, 2);
6217
6218 r_descr = expand_normal (t_descr);
6219 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6220 MEM_NOTRAP_P (m_descr) = 1;
0bdf9f92 6221 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
4c640e26
EB
6222
6223 r_func = expand_normal (t_func);
6224 r_chain = expand_normal (t_chain);
6225
6226 /* Generate insns to initialize the descriptor. */
6227 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6228 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6229 POINTER_SIZE / BITS_PER_UNIT), r_func);
6230
6231 return const0_rtx;
6232}
6233
6234/* Expand a call to the builtin descriptor adjustment routine. */
6235
6236static rtx
6237expand_builtin_adjust_descriptor (tree exp)
6238{
6239 rtx tramp;
6240
6241 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6242 return NULL_RTX;
6243
6244 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6245
6246 /* Unalign the descriptor to allow runtime identification. */
6247 tramp = plus_constant (ptr_mode, tramp,
6248 targetm.calls.custom_function_descriptors);
6249
6250 return force_operand (tramp, NULL_RTX);
6251}
6252
0f67fa83
WG
6253/* Expand the call EXP to the built-in signbit, signbitf or signbitl
6254 function. The function first checks whether the back end provides
6255 an insn to implement signbit for the respective mode. If not, it
6256 checks whether the floating point format of the value is such that
61717a45
FXC
6257 the sign bit can be extracted. If that is not the case, error out.
6258 EXP is the expression that is a call to the builtin function; if
6259 convenient, the result should be placed in TARGET. */
ef79730c
RS
6260static rtx
6261expand_builtin_signbit (tree exp, rtx target)
6262{
6263 const struct real_format *fmt;
b5f2d801 6264 scalar_float_mode fmode;
095a2d76 6265 scalar_int_mode rmode, imode;
5039610b 6266 tree arg;
e4fbead1 6267 int word, bitpos;
d0c9d431 6268 enum insn_code icode;
ef79730c 6269 rtx temp;
db3927fb 6270 location_t loc = EXPR_LOCATION (exp);
ef79730c 6271
5039610b
SL
6272 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6273 return NULL_RTX;
ef79730c 6274
5039610b 6275 arg = CALL_EXPR_ARG (exp, 0);
b5f2d801 6276 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
7a504f33 6277 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
ef79730c
RS
6278 fmt = REAL_MODE_FORMAT (fmode);
6279
0f67fa83
WG
6280 arg = builtin_save_expr (arg);
6281
6282 /* Expand the argument yielding a RTX expression. */
6283 temp = expand_normal (arg);
6284
6285 /* Check if the back end provides an insn that handles signbit for the
6286 argument's mode. */
947131ba 6287 icode = optab_handler (signbit_optab, fmode);
d0c9d431 6288 if (icode != CODE_FOR_nothing)
0f67fa83 6289 {
58f4cf2a 6290 rtx_insn *last = get_last_insn ();
0f67fa83 6291 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8a0b1aa4
MM
6292 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
6293 return target;
6294 delete_insns_since (last);
0f67fa83
WG
6295 }
6296
ef79730c
RS
6297 /* For floating point formats without a sign bit, implement signbit
6298 as "ARG < 0.0". */
b87a0206 6299 bitpos = fmt->signbit_ro;
e4fbead1 6300 if (bitpos < 0)
ef79730c
RS
6301 {
6302 /* But we can't do this if the format supports signed zero. */
61717a45 6303 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
ef79730c 6304
db3927fb 6305 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
987b67bc 6306 build_real (TREE_TYPE (arg), dconst0));
ef79730c
RS
6307 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6308 }
6309
e4fbead1 6310 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
ef79730c 6311 {
304b9962 6312 imode = int_mode_for_mode (fmode).require ();
e4fbead1 6313 temp = gen_lowpart (imode, temp);
254878ea
RS
6314 }
6315 else
6316 {
e4fbead1
RS
6317 imode = word_mode;
6318 /* Handle targets with different FP word orders. */
6319 if (FLOAT_WORDS_BIG_ENDIAN)
c22cacf3 6320 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
e4fbead1 6321 else
c22cacf3 6322 word = bitpos / BITS_PER_WORD;
e4fbead1
RS
6323 temp = operand_subword_force (temp, word, fmode);
6324 bitpos = bitpos % BITS_PER_WORD;
6325 }
6326
210e1852
RS
6327 /* Force the intermediate word_mode (or narrower) result into a
6328 register. This avoids attempting to create paradoxical SUBREGs
6329 of floating point modes below. */
6330 temp = force_reg (imode, temp);
6331
e4fbead1
RS
6332 /* If the bitpos is within the "result mode" lowpart, the operation
6333 can be implement with a single bitwise AND. Otherwise, we need
6334 a right shift and an AND. */
6335
6336 if (bitpos < GET_MODE_BITSIZE (rmode))
6337 {
807e902e 6338 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
ef79730c 6339
515e442a 6340 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
e4fbead1 6341 temp = gen_lowpart (rmode, temp);
254878ea 6342 temp = expand_binop (rmode, and_optab, temp,
807e902e 6343 immed_wide_int_const (mask, rmode),
e4fbead1 6344 NULL_RTX, 1, OPTAB_LIB_WIDEN);
ef79730c 6345 }
e4fbead1
RS
6346 else
6347 {
6348 /* Perform a logical right shift to place the signbit in the least
c22cacf3 6349 significant bit, then truncate the result to the desired mode
e4fbead1 6350 and mask just this bit. */
eb6c3df1 6351 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
e4fbead1
RS
6352 temp = gen_lowpart (rmode, temp);
6353 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6354 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6355 }
6356
ef79730c
RS
6357 return temp;
6358}
d1c38823
ZD
6359
6360/* Expand fork or exec calls. TARGET is the desired target of the
5039610b 6361 call. EXP is the call. FN is the
d1c38823
ZD
6362 identificator of the actual function. IGNORE is nonzero if the
6363 value is to be ignored. */
6364
6365static rtx
5039610b 6366expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
d1c38823
ZD
6367{
6368 tree id, decl;
6369 tree call;
6370
b5338fb3
MS
6371 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
6372 {
6373 /* Detect unterminated path. */
6374 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6375 return NULL_RTX;
6376
6377 /* Also detect unterminated first argument. */
6378 switch (DECL_FUNCTION_CODE (fn))
6379 {
6380 case BUILT_IN_EXECL:
6381 case BUILT_IN_EXECLE:
6382 case BUILT_IN_EXECLP:
6383 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6384 return NULL_RTX;
6385 default:
6386 break;
6387 }
6388 }
6389
6390
d1c38823
ZD
6391 /* If we are not profiling, just call the function. */
6392 if (!profile_arc_flag)
6393 return NULL_RTX;
6394
6395 /* Otherwise call the wrapper. This should be equivalent for the rest of
6396 compiler, so the code does not diverge, and the wrapper may run the
2b8a92de 6397 code necessary for keeping the profiling sane. */
d1c38823
ZD
6398
6399 switch (DECL_FUNCTION_CODE (fn))
6400 {
6401 case BUILT_IN_FORK:
6402 id = get_identifier ("__gcov_fork");
6403 break;
6404
6405 case BUILT_IN_EXECL:
6406 id = get_identifier ("__gcov_execl");
6407 break;
6408
6409 case BUILT_IN_EXECV:
6410 id = get_identifier ("__gcov_execv");
6411 break;
6412
6413 case BUILT_IN_EXECLP:
6414 id = get_identifier ("__gcov_execlp");
6415 break;
6416
6417 case BUILT_IN_EXECLE:
6418 id = get_identifier ("__gcov_execle");
6419 break;
6420
6421 case BUILT_IN_EXECVP:
6422 id = get_identifier ("__gcov_execvp");
6423 break;
6424
6425 case BUILT_IN_EXECVE:
6426 id = get_identifier ("__gcov_execve");
6427 break;
6428
6429 default:
298e6adc 6430 gcc_unreachable ();
d1c38823
ZD
6431 }
6432
c2255bc4
AH
6433 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6434 FUNCTION_DECL, id, TREE_TYPE (fn));
d1c38823
ZD
6435 DECL_EXTERNAL (decl) = 1;
6436 TREE_PUBLIC (decl) = 1;
6437 DECL_ARTIFICIAL (decl) = 1;
6438 TREE_NOTHROW (decl) = 1;
ac382b62
JM
6439 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6440 DECL_VISIBILITY_SPECIFIED (decl) = 1;
db3927fb 6441 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
d1c38823 6442 return expand_call (call, target, ignore);
5039610b 6443 }
b8698a0f 6444
48ae6c13
RH
6445
6446\f
02ee605c
RH
6447/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6448 the pointer in these functions is void*, the tree optimizers may remove
6449 casts. The mode computed in expand_builtin isn't reliable either, due
6450 to __sync_bool_compare_and_swap.
6451
6452 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6453 group of builtins. This gives us log2 of the mode size. */
6454
ef4bddc2 6455static inline machine_mode
02ee605c
RH
6456get_builtin_sync_mode (int fcode_diff)
6457{
2de0aa52
HPN
6458 /* The size is not negotiable, so ask not to get BLKmode in return
6459 if the target indicates that a smaller size would be better. */
f4b31647 6460 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
02ee605c
RH
6461}
6462
1387fef3
AS
6463/* Expand the memory expression LOC and return the appropriate memory operand
6464 for the builtin_sync operations. */
6465
6466static rtx
ef4bddc2 6467get_builtin_sync_mem (tree loc, machine_mode mode)
1387fef3
AS
6468{
6469 rtx addr, mem;
b6895597
AS
6470 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6471 ? TREE_TYPE (TREE_TYPE (loc))
6472 : TREE_TYPE (loc));
6473 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
1387fef3 6474
b6895597 6475 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
1413a419 6476 addr = convert_memory_address (addr_mode, addr);
1387fef3
AS
6477
6478 /* Note that we explicitly do not want any alias information for this
6479 memory, so that we kill all other live memories. Otherwise we don't
6480 satisfy the full barrier semantics of the intrinsic. */
b6895597
AS
6481 mem = gen_rtx_MEM (mode, addr);
6482
6483 set_mem_addr_space (mem, addr_space);
6484
6485 mem = validize_mem (mem);
1387fef3 6486
1be38ccb
RG
6487 /* The alignment needs to be at least according to that of the mode. */
6488 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
0eb77834 6489 get_pointer_alignment (loc)));
9cd9e512 6490 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
1387fef3
AS
6491 MEM_VOLATILE_P (mem) = 1;
6492
6493 return mem;
6494}
6495
86951993
AM
6496/* Make sure an argument is in the right mode.
6497 EXP is the tree argument.
6498 MODE is the mode it should be in. */
6499
6500static rtx
ef4bddc2 6501expand_expr_force_mode (tree exp, machine_mode mode)
86951993
AM
6502{
6503 rtx val;
ef4bddc2 6504 machine_mode old_mode;
86951993
AM
6505
6506 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6507 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6508 of CONST_INTs, where we know the old_mode only from the call argument. */
6509
6510 old_mode = GET_MODE (val);
6511 if (old_mode == VOIDmode)
6512 old_mode = TYPE_MODE (TREE_TYPE (exp));
6513 val = convert_modes (mode, old_mode, val, 1);
6514 return val;
6515}
6516
6517
48ae6c13 6518/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5039610b 6519 EXP is the CALL_EXPR. CODE is the rtx code
48ae6c13
RH
6520 that corresponds to the arithmetic or logical operation from the name;
6521 an exception here is that NOT actually means NAND. TARGET is an optional
6522 place for us to store the results; AFTER is true if this is the
86951993 6523 fetch_and_xxx form. */
48ae6c13
RH
6524
6525static rtx
ef4bddc2 6526expand_builtin_sync_operation (machine_mode mode, tree exp,
02ee605c 6527 enum rtx_code code, bool after,
86951993 6528 rtx target)
48ae6c13 6529{
1387fef3 6530 rtx val, mem;
c2255bc4 6531 location_t loc = EXPR_LOCATION (exp);
48ae6c13 6532
23462d4d
UB
6533 if (code == NOT && warn_sync_nand)
6534 {
6535 tree fndecl = get_callee_fndecl (exp);
6536 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6537
6538 static bool warned_f_a_n, warned_n_a_f;
6539
6540 switch (fcode)
6541 {
e0a8ecf2
AM
6542 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6543 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6544 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6545 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6546 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
23462d4d
UB
6547 if (warned_f_a_n)
6548 break;
6549
e79983f4 6550 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
c2255bc4 6551 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
6552 warned_f_a_n = true;
6553 break;
6554
e0a8ecf2
AM
6555 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6556 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6557 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6558 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6559 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
23462d4d
UB
6560 if (warned_n_a_f)
6561 break;
6562
e79983f4 6563 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
c2255bc4 6564 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
6565 warned_n_a_f = true;
6566 break;
6567
6568 default:
6569 gcc_unreachable ();
6570 }
6571 }
6572
48ae6c13 6573 /* Expand the operands. */
5039610b 6574 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993 6575 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
48ae6c13 6576
46b35980 6577 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
86951993 6578 after);
48ae6c13
RH
6579}
6580
6581/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5039610b 6582 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
48ae6c13
RH
6583 true if this is the boolean form. TARGET is a place for us to store the
6584 results; this is NOT optional if IS_BOOL is true. */
6585
6586static rtx
ef4bddc2 6587expand_builtin_compare_and_swap (machine_mode mode, tree exp,
02ee605c 6588 bool is_bool, rtx target)
48ae6c13 6589{
1387fef3 6590 rtx old_val, new_val, mem;
f0409b19 6591 rtx *pbool, *poval;
48ae6c13
RH
6592
6593 /* Expand the operands. */
5039610b 6594 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
6595 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6596 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
48ae6c13 6597
f0409b19
RH
6598 pbool = poval = NULL;
6599 if (target != const0_rtx)
6600 {
6601 if (is_bool)
6602 pbool = &target;
6603 else
6604 poval = &target;
6605 }
6606 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
46b35980
AM
6607 false, MEMMODEL_SYNC_SEQ_CST,
6608 MEMMODEL_SYNC_SEQ_CST))
86951993 6609 return NULL_RTX;
5039610b 6610
86951993 6611 return target;
48ae6c13
RH
6612}
6613
6614/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6615 general form is actually an atomic exchange, and some targets only
6616 support a reduced form with the second argument being a constant 1.
b8698a0f 6617 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5039610b 6618 the results. */
48ae6c13
RH
6619
6620static rtx
ef4bddc2 6621expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
86951993 6622 rtx target)
48ae6c13 6623{
1387fef3 6624 rtx val, mem;
48ae6c13
RH
6625
6626 /* Expand the operands. */
5039610b 6627 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
6628 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6629
744accb2 6630 return expand_sync_lock_test_and_set (target, mem, val);
86951993
AM
6631}
6632
6633/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6634
6635static void
ef4bddc2 6636expand_builtin_sync_lock_release (machine_mode mode, tree exp)
86951993
AM
6637{
6638 rtx mem;
6639
6640 /* Expand the operands. */
6641 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6642
46b35980 6643 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
86951993
AM
6644}
6645
6646/* Given an integer representing an ``enum memmodel'', verify its
6647 correctness and return the memory model enum. */
6648
6649static enum memmodel
6650get_memmodel (tree exp)
6651{
6652 rtx op;
5dcfdccd 6653 unsigned HOST_WIDE_INT val;
620e594b 6654 location_t loc
8d9fdb49 6655 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
6656
6657 /* If the parameter is not a constant, it's a run time value so we'll just
6658 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6659 if (TREE_CODE (exp) != INTEGER_CST)
6660 return MEMMODEL_SEQ_CST;
6661
6662 op = expand_normal (exp);
5dcfdccd
KY
6663
6664 val = INTVAL (op);
6665 if (targetm.memmodel_check)
6666 val = targetm.memmodel_check (val);
6667 else if (val & ~MEMMODEL_MASK)
6668 {
8d9fdb49
MP
6669 warning_at (loc, OPT_Winvalid_memory_model,
6670 "unknown architecture specifier in memory model to builtin");
5dcfdccd
KY
6671 return MEMMODEL_SEQ_CST;
6672 }
6673
46b35980
AM
6674 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6675 if (memmodel_base (val) >= MEMMODEL_LAST)
86951993 6676 {
8d9fdb49
MP
6677 warning_at (loc, OPT_Winvalid_memory_model,
6678 "invalid memory model argument to builtin");
86951993
AM
6679 return MEMMODEL_SEQ_CST;
6680 }
5dcfdccd 6681
8673b671
AM
6682 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6683 be conservative and promote consume to acquire. */
6684 if (val == MEMMODEL_CONSUME)
6685 val = MEMMODEL_ACQUIRE;
6686
5dcfdccd 6687 return (enum memmodel) val;
86951993
AM
6688}
6689
6690/* Expand the __atomic_exchange intrinsic:
6691 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6692 EXP is the CALL_EXPR.
6693 TARGET is an optional place for us to store the results. */
6694
6695static rtx
ef4bddc2 6696expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
86951993
AM
6697{
6698 rtx val, mem;
6699 enum memmodel model;
6700
6701 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
86951993
AM
6702
6703 if (!flag_inline_atomics)
6704 return NULL_RTX;
6705
6706 /* Expand the operands. */
6707 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6708 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6709
744accb2 6710 return expand_atomic_exchange (target, mem, val, model);
86951993
AM
6711}
6712
6713/* Expand the __atomic_compare_exchange intrinsic:
6714 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6715 TYPE desired, BOOL weak,
6716 enum memmodel success,
6717 enum memmodel failure)
6718 EXP is the CALL_EXPR.
6719 TARGET is an optional place for us to store the results. */
6720
6721static rtx
ef4bddc2 6722expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
86951993
AM
6723 rtx target)
6724{
58f4cf2a
DM
6725 rtx expect, desired, mem, oldval;
6726 rtx_code_label *label;
86951993
AM
6727 enum memmodel success, failure;
6728 tree weak;
6729 bool is_weak;
620e594b 6730 location_t loc
8d9fdb49 6731 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
6732
6733 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6734 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6735
77df5327
AM
6736 if (failure > success)
6737 {
8d9fdb49
MP
6738 warning_at (loc, OPT_Winvalid_memory_model,
6739 "failure memory model cannot be stronger than success "
6740 "memory model for %<__atomic_compare_exchange%>");
77df5327
AM
6741 success = MEMMODEL_SEQ_CST;
6742 }
6743
46b35980 6744 if (is_mm_release (failure) || is_mm_acq_rel (failure))
86951993 6745 {
8d9fdb49
MP
6746 warning_at (loc, OPT_Winvalid_memory_model,
6747 "invalid failure memory model for "
6748 "%<__atomic_compare_exchange%>");
77df5327
AM
6749 failure = MEMMODEL_SEQ_CST;
6750 success = MEMMODEL_SEQ_CST;
86951993
AM
6751 }
6752
77df5327 6753
86951993
AM
6754 if (!flag_inline_atomics)
6755 return NULL_RTX;
6756
6757 /* Expand the operands. */
6758 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6759
6760 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6761 expect = convert_memory_address (Pmode, expect);
215770ad 6762 expect = gen_rtx_MEM (mode, expect);
86951993
AM
6763 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6764
6765 weak = CALL_EXPR_ARG (exp, 3);
6766 is_weak = false;
9439e9a1 6767 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
86951993
AM
6768 is_weak = true;
6769
672ce939
RH
6770 if (target == const0_rtx)
6771 target = NULL;
672ce939 6772
2fdc29e8
RH
6773 /* Lest the rtl backend create a race condition with an imporoper store
6774 to memory, always create a new pseudo for OLDVAL. */
6775 oldval = NULL;
6776
6777 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
f0409b19 6778 is_weak, success, failure))
86951993
AM
6779 return NULL_RTX;
6780
672ce939
RH
6781 /* Conditionally store back to EXPECT, lest we create a race condition
6782 with an improper store to memory. */
6783 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6784 the normal case where EXPECT is totally private, i.e. a register. At
6785 which point the store can be unconditional. */
6786 label = gen_label_rtx ();
f8940d4a
JG
6787 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6788 GET_MODE (target), 1, label);
672ce939
RH
6789 emit_move_insn (expect, oldval);
6790 emit_label (label);
215770ad 6791
86951993
AM
6792 return target;
6793}
6794
849a76a5
JJ
6795/* Helper function for expand_ifn_atomic_compare_exchange - expand
6796 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6797 call. The weak parameter must be dropped to match the expected parameter
6798 list and the expected argument changed from value to pointer to memory
6799 slot. */
6800
6801static void
6802expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6803{
6804 unsigned int z;
6805 vec<tree, va_gc> *vec;
6806
6807 vec_alloc (vec, 5);
6808 vec->quick_push (gimple_call_arg (call, 0));
6809 tree expected = gimple_call_arg (call, 1);
6810 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6811 TREE_TYPE (expected));
6812 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6813 if (expd != x)
6814 emit_move_insn (x, expd);
6815 tree v = make_tree (TREE_TYPE (expected), x);
6816 vec->quick_push (build1 (ADDR_EXPR,
6817 build_pointer_type (TREE_TYPE (expected)), v));
6818 vec->quick_push (gimple_call_arg (call, 2));
6819 /* Skip the boolean weak parameter. */
6820 for (z = 4; z < 6; z++)
6821 vec->quick_push (gimple_call_arg (call, z));
4871e1ed 6822 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
cf098191 6823 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
4871e1ed 6824 gcc_assert (bytes_log2 < 5);
849a76a5
JJ
6825 built_in_function fncode
6826 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
4871e1ed 6827 + bytes_log2);
849a76a5
JJ
6828 tree fndecl = builtin_decl_explicit (fncode);
6829 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6830 fndecl);
6831 tree exp = build_call_vec (boolean_type_node, fn, vec);
6832 tree lhs = gimple_call_lhs (call);
6833 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6834 if (lhs)
6835 {
6836 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6837 if (GET_MODE (boolret) != mode)
6838 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6839 x = force_reg (mode, x);
6840 write_complex_part (target, boolret, true);
6841 write_complex_part (target, x, false);
6842 }
6843}
6844
6845/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6846
6847void
6848expand_ifn_atomic_compare_exchange (gcall *call)
6849{
6850 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6851 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
f4b31647 6852 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
849a76a5
JJ
6853 rtx expect, desired, mem, oldval, boolret;
6854 enum memmodel success, failure;
6855 tree lhs;
6856 bool is_weak;
620e594b 6857 location_t loc
849a76a5
JJ
6858 = expansion_point_location_if_in_system_header (gimple_location (call));
6859
6860 success = get_memmodel (gimple_call_arg (call, 4));
6861 failure = get_memmodel (gimple_call_arg (call, 5));
6862
6863 if (failure > success)
6864 {
6865 warning_at (loc, OPT_Winvalid_memory_model,
6866 "failure memory model cannot be stronger than success "
6867 "memory model for %<__atomic_compare_exchange%>");
6868 success = MEMMODEL_SEQ_CST;
6869 }
6870
6871 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6872 {
6873 warning_at (loc, OPT_Winvalid_memory_model,
6874 "invalid failure memory model for "
6875 "%<__atomic_compare_exchange%>");
6876 failure = MEMMODEL_SEQ_CST;
6877 success = MEMMODEL_SEQ_CST;
6878 }
6879
6880 if (!flag_inline_atomics)
6881 {
6882 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6883 return;
6884 }
6885
6886 /* Expand the operands. */
6887 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6888
6889 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6890 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6891
6892 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6893
6894 boolret = NULL;
6895 oldval = NULL;
6896
6897 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6898 is_weak, success, failure))
6899 {
6900 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6901 return;
6902 }
6903
6904 lhs = gimple_call_lhs (call);
6905 if (lhs)
6906 {
6907 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6908 if (GET_MODE (boolret) != mode)
6909 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6910 write_complex_part (target, boolret, true);
6911 write_complex_part (target, oldval, false);
6912 }
6913}
6914
86951993
AM
6915/* Expand the __atomic_load intrinsic:
6916 TYPE __atomic_load (TYPE *object, enum memmodel)
6917 EXP is the CALL_EXPR.
6918 TARGET is an optional place for us to store the results. */
6919
6920static rtx
ef4bddc2 6921expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
86951993
AM
6922{
6923 rtx mem;
6924 enum memmodel model;
6925
6926 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
46b35980 6927 if (is_mm_release (model) || is_mm_acq_rel (model))
86951993 6928 {
620e594b 6929 location_t loc
8d9fdb49
MP
6930 = expansion_point_location_if_in_system_header (input_location);
6931 warning_at (loc, OPT_Winvalid_memory_model,
6932 "invalid memory model for %<__atomic_load%>");
77df5327 6933 model = MEMMODEL_SEQ_CST;
86951993
AM
6934 }
6935
6936 if (!flag_inline_atomics)
6937 return NULL_RTX;
6938
6939 /* Expand the operand. */
6940 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6941
6942 return expand_atomic_load (target, mem, model);
6943}
6944
6945
6946/* Expand the __atomic_store intrinsic:
6947 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6948 EXP is the CALL_EXPR.
6949 TARGET is an optional place for us to store the results. */
6950
6951static rtx
ef4bddc2 6952expand_builtin_atomic_store (machine_mode mode, tree exp)
86951993
AM
6953{
6954 rtx mem, val;
6955 enum memmodel model;
6956
6957 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
46b35980
AM
6958 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6959 || is_mm_release (model)))
86951993 6960 {
620e594b 6961 location_t loc
8d9fdb49
MP
6962 = expansion_point_location_if_in_system_header (input_location);
6963 warning_at (loc, OPT_Winvalid_memory_model,
6964 "invalid memory model for %<__atomic_store%>");
77df5327 6965 model = MEMMODEL_SEQ_CST;
86951993
AM
6966 }
6967
6968 if (!flag_inline_atomics)
6969 return NULL_RTX;
6970
6971 /* Expand the operands. */
6972 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6973 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6974
0669295b 6975 return expand_atomic_store (mem, val, model, false);
86951993
AM
6976}
6977
6978/* Expand the __atomic_fetch_XXX intrinsic:
6979 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6980 EXP is the CALL_EXPR.
6981 TARGET is an optional place for us to store the results.
6982 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6983 FETCH_AFTER is true if returning the result of the operation.
6984 FETCH_AFTER is false if returning the value before the operation.
6985 IGNORE is true if the result is not used.
6986 EXT_CALL is the correct builtin for an external call if this cannot be
6987 resolved to an instruction sequence. */
6988
6989static rtx
ef4bddc2 6990expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
86951993
AM
6991 enum rtx_code code, bool fetch_after,
6992 bool ignore, enum built_in_function ext_call)
6993{
6994 rtx val, mem, ret;
6995 enum memmodel model;
6996 tree fndecl;
6997 tree addr;
6998
6999 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7000
7001 /* Expand the operands. */
7002 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7003 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7004
7005 /* Only try generating instructions if inlining is turned on. */
7006 if (flag_inline_atomics)
7007 {
7008 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
7009 if (ret)
7010 return ret;
7011 }
7012
7013 /* Return if a different routine isn't needed for the library call. */
7014 if (ext_call == BUILT_IN_NONE)
7015 return NULL_RTX;
7016
7017 /* Change the call to the specified function. */
7018 fndecl = get_callee_fndecl (exp);
7019 addr = CALL_EXPR_FN (exp);
7020 STRIP_NOPS (addr);
7021
7022 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
c3284718 7023 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
86951993 7024
67914693 7025 /* If we will emit code after the call, the call cannot be a tail call.
08c273bb
SB
7026 If it is emitted as a tail call, a barrier is emitted after it, and
7027 then all trailing code is removed. */
7028 if (!ignore)
7029 CALL_EXPR_TAILCALL (exp) = 0;
7030
86951993
AM
7031 /* Expand the call here so we can emit trailing code. */
7032 ret = expand_call (exp, target, ignore);
7033
7034 /* Replace the original function just in case it matters. */
7035 TREE_OPERAND (addr, 0) = fndecl;
7036
7037 /* Then issue the arithmetic correction to return the right result. */
7038 if (!ignore)
154b68db
AM
7039 {
7040 if (code == NOT)
7041 {
7042 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
7043 OPTAB_LIB_WIDEN);
7044 ret = expand_simple_unop (mode, NOT, ret, target, true);
7045 }
7046 else
7047 ret = expand_simple_binop (mode, code, ret, val, target, true,
7048 OPTAB_LIB_WIDEN);
7049 }
86951993
AM
7050 return ret;
7051}
7052
adedd5c1
JJ
7053/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7054
7055void
7056expand_ifn_atomic_bit_test_and (gcall *call)
7057{
7058 tree ptr = gimple_call_arg (call, 0);
7059 tree bit = gimple_call_arg (call, 1);
7060 tree flag = gimple_call_arg (call, 2);
7061 tree lhs = gimple_call_lhs (call);
7062 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7063 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7064 enum rtx_code code;
7065 optab optab;
99b1c316 7066 class expand_operand ops[5];
adedd5c1
JJ
7067
7068 gcc_assert (flag_inline_atomics);
7069
7070 if (gimple_call_num_args (call) == 4)
7071 model = get_memmodel (gimple_call_arg (call, 3));
7072
7073 rtx mem = get_builtin_sync_mem (ptr, mode);
7074 rtx val = expand_expr_force_mode (bit, mode);
7075
7076 switch (gimple_call_internal_fn (call))
7077 {
7078 case IFN_ATOMIC_BIT_TEST_AND_SET:
7079 code = IOR;
7080 optab = atomic_bit_test_and_set_optab;
7081 break;
7082 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7083 code = XOR;
7084 optab = atomic_bit_test_and_complement_optab;
7085 break;
7086 case IFN_ATOMIC_BIT_TEST_AND_RESET:
7087 code = AND;
7088 optab = atomic_bit_test_and_reset_optab;
7089 break;
7090 default:
7091 gcc_unreachable ();
7092 }
7093
7094 if (lhs == NULL_TREE)
7095 {
7096 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7097 val, NULL_RTX, true, OPTAB_DIRECT);
7098 if (code == AND)
7099 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7100 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
7101 return;
7102 }
7103
7104 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7105 enum insn_code icode = direct_optab_handler (optab, mode);
7106 gcc_assert (icode != CODE_FOR_nothing);
7107 create_output_operand (&ops[0], target, mode);
7108 create_fixed_operand (&ops[1], mem);
7109 create_convert_operand_to (&ops[2], val, mode, true);
7110 create_integer_operand (&ops[3], model);
7111 create_integer_operand (&ops[4], integer_onep (flag));
7112 if (maybe_expand_insn (icode, 5, ops))
7113 return;
7114
7115 rtx bitval = val;
7116 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7117 val, NULL_RTX, true, OPTAB_DIRECT);
7118 rtx maskval = val;
7119 if (code == AND)
7120 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7121 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7122 code, model, false);
7123 if (integer_onep (flag))
7124 {
7125 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7126 NULL_RTX, true, OPTAB_DIRECT);
7127 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7128 true, OPTAB_DIRECT);
7129 }
7130 else
7131 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7132 OPTAB_DIRECT);
7133 if (result != target)
7134 emit_move_insn (target, result);
7135}
7136
d660c35e
AM
7137/* Expand an atomic clear operation.
7138 void _atomic_clear (BOOL *obj, enum memmodel)
7139 EXP is the call expression. */
7140
7141static rtx
7142expand_builtin_atomic_clear (tree exp)
7143{
ef4bddc2 7144 machine_mode mode;
d660c35e
AM
7145 rtx mem, ret;
7146 enum memmodel model;
7147
f4b31647 7148 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
7149 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7150 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7151
46b35980 7152 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
d660c35e 7153 {
620e594b 7154 location_t loc
8d9fdb49
MP
7155 = expansion_point_location_if_in_system_header (input_location);
7156 warning_at (loc, OPT_Winvalid_memory_model,
7157 "invalid memory model for %<__atomic_store%>");
77df5327 7158 model = MEMMODEL_SEQ_CST;
d660c35e
AM
7159 }
7160
7161 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7162 Failing that, a store is issued by __atomic_store. The only way this can
7163 fail is if the bool type is larger than a word size. Unlikely, but
7164 handle it anyway for completeness. Assume a single threaded model since
7165 there is no atomic support in this case, and no barriers are required. */
7166 ret = expand_atomic_store (mem, const0_rtx, model, true);
7167 if (!ret)
7168 emit_move_insn (mem, const0_rtx);
7169 return const0_rtx;
7170}
7171
7172/* Expand an atomic test_and_set operation.
7173 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7174 EXP is the call expression. */
7175
7176static rtx
744accb2 7177expand_builtin_atomic_test_and_set (tree exp, rtx target)
d660c35e 7178{
744accb2 7179 rtx mem;
d660c35e 7180 enum memmodel model;
ef4bddc2 7181 machine_mode mode;
d660c35e 7182
f4b31647 7183 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
7184 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7185 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7186
744accb2 7187 return expand_atomic_test_and_set (target, mem, model);
d660c35e
AM
7188}
7189
7190
86951993
AM
7191/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7192 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7193
7194static tree
7195fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7196{
7197 int size;
ef4bddc2 7198 machine_mode mode;
86951993
AM
7199 unsigned int mode_align, type_align;
7200
7201 if (TREE_CODE (arg0) != INTEGER_CST)
7202 return NULL_TREE;
48ae6c13 7203
f4b31647 7204 /* We need a corresponding integer mode for the access to be lock-free. */
86951993 7205 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
f4b31647
RS
7206 if (!int_mode_for_size (size, 0).exists (&mode))
7207 return boolean_false_node;
7208
86951993
AM
7209 mode_align = GET_MODE_ALIGNMENT (mode);
7210
310055e7
JW
7211 if (TREE_CODE (arg1) == INTEGER_CST)
7212 {
7213 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7214
7215 /* Either this argument is null, or it's a fake pointer encoding
7216 the alignment of the object. */
146ec50f 7217 val = least_bit_hwi (val);
310055e7
JW
7218 val *= BITS_PER_UNIT;
7219
7220 if (val == 0 || mode_align < val)
7221 type_align = mode_align;
7222 else
7223 type_align = val;
7224 }
86951993
AM
7225 else
7226 {
7227 tree ttype = TREE_TYPE (arg1);
7228
7229 /* This function is usually invoked and folded immediately by the front
7230 end before anything else has a chance to look at it. The pointer
7231 parameter at this point is usually cast to a void *, so check for that
7232 and look past the cast. */
7d9cf801
JJ
7233 if (CONVERT_EXPR_P (arg1)
7234 && POINTER_TYPE_P (ttype)
7235 && VOID_TYPE_P (TREE_TYPE (ttype))
7236 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
86951993
AM
7237 arg1 = TREE_OPERAND (arg1, 0);
7238
7239 ttype = TREE_TYPE (arg1);
7240 gcc_assert (POINTER_TYPE_P (ttype));
7241
7242 /* Get the underlying type of the object. */
7243 ttype = TREE_TYPE (ttype);
7244 type_align = TYPE_ALIGN (ttype);
7245 }
7246
026c3cfd 7247 /* If the object has smaller alignment, the lock free routines cannot
86951993
AM
7248 be used. */
7249 if (type_align < mode_align)
58d38fd2 7250 return boolean_false_node;
86951993
AM
7251
7252 /* Check if a compare_and_swap pattern exists for the mode which represents
7253 the required size. The pattern is not allowed to fail, so the existence
969a32ce
TR
7254 of the pattern indicates support is present. Also require that an
7255 atomic load exists for the required size. */
7256 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
58d38fd2 7257 return boolean_true_node;
86951993 7258 else
58d38fd2 7259 return boolean_false_node;
86951993
AM
7260}
7261
7262/* Return true if the parameters to call EXP represent an object which will
7263 always generate lock free instructions. The first argument represents the
7264 size of the object, and the second parameter is a pointer to the object
7265 itself. If NULL is passed for the object, then the result is based on
7266 typical alignment for an object of the specified size. Otherwise return
7267 false. */
7268
7269static rtx
7270expand_builtin_atomic_always_lock_free (tree exp)
7271{
7272 tree size;
7273 tree arg0 = CALL_EXPR_ARG (exp, 0);
7274 tree arg1 = CALL_EXPR_ARG (exp, 1);
7275
7276 if (TREE_CODE (arg0) != INTEGER_CST)
7277 {
a9c697b8 7278 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
86951993
AM
7279 return const0_rtx;
7280 }
7281
7282 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
58d38fd2 7283 if (size == boolean_true_node)
86951993
AM
7284 return const1_rtx;
7285 return const0_rtx;
7286}
7287
7288/* Return a one or zero if it can be determined that object ARG1 of size ARG
7289 is lock free on this architecture. */
7290
7291static tree
7292fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7293{
7294 if (!flag_inline_atomics)
7295 return NULL_TREE;
7296
7297 /* If it isn't always lock free, don't generate a result. */
58d38fd2
JJ
7298 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7299 return boolean_true_node;
86951993
AM
7300
7301 return NULL_TREE;
7302}
7303
7304/* Return true if the parameters to call EXP represent an object which will
7305 always generate lock free instructions. The first argument represents the
7306 size of the object, and the second parameter is a pointer to the object
7307 itself. If NULL is passed for the object, then the result is based on
7308 typical alignment for an object of the specified size. Otherwise return
7309 NULL*/
7310
7311static rtx
7312expand_builtin_atomic_is_lock_free (tree exp)
7313{
7314 tree size;
7315 tree arg0 = CALL_EXPR_ARG (exp, 0);
7316 tree arg1 = CALL_EXPR_ARG (exp, 1);
7317
7318 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7319 {
a9c697b8 7320 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
86951993
AM
7321 return NULL_RTX;
7322 }
7323
7324 if (!flag_inline_atomics)
7325 return NULL_RTX;
7326
7327 /* If the value is known at compile time, return the RTX for it. */
7328 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
58d38fd2 7329 if (size == boolean_true_node)
86951993
AM
7330 return const1_rtx;
7331
7332 return NULL_RTX;
7333}
7334
86951993
AM
7335/* Expand the __atomic_thread_fence intrinsic:
7336 void __atomic_thread_fence (enum memmodel)
7337 EXP is the CALL_EXPR. */
7338
7339static void
7340expand_builtin_atomic_thread_fence (tree exp)
7341{
c39169c8
RH
7342 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7343 expand_mem_thread_fence (model);
86951993
AM
7344}
7345
7346/* Expand the __atomic_signal_fence intrinsic:
7347 void __atomic_signal_fence (enum memmodel)
7348 EXP is the CALL_EXPR. */
7349
7350static void
7351expand_builtin_atomic_signal_fence (tree exp)
7352{
c39169c8
RH
7353 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7354 expand_mem_signal_fence (model);
48ae6c13
RH
7355}
7356
7357/* Expand the __sync_synchronize intrinsic. */
7358
7359static void
e0a8ecf2 7360expand_builtin_sync_synchronize (void)
48ae6c13 7361{
46b35980 7362 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
48ae6c13
RH
7363}
7364
f959607b
CLT
7365static rtx
7366expand_builtin_thread_pointer (tree exp, rtx target)
7367{
7368 enum insn_code icode;
7369 if (!validate_arglist (exp, VOID_TYPE))
7370 return const0_rtx;
7371 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7372 if (icode != CODE_FOR_nothing)
7373 {
99b1c316 7374 class expand_operand op;
b8a542c6
AP
7375 /* If the target is not sutitable then create a new target. */
7376 if (target == NULL_RTX
7377 || !REG_P (target)
7378 || GET_MODE (target) != Pmode)
f959607b
CLT
7379 target = gen_reg_rtx (Pmode);
7380 create_output_operand (&op, target, Pmode);
7381 expand_insn (icode, 1, &op);
7382 return target;
7383 }
a3f9f006 7384 error ("%<__builtin_thread_pointer%> is not supported on this target");
f959607b
CLT
7385 return const0_rtx;
7386}
7387
7388static void
7389expand_builtin_set_thread_pointer (tree exp)
7390{
7391 enum insn_code icode;
7392 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7393 return;
7394 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7395 if (icode != CODE_FOR_nothing)
7396 {
99b1c316 7397 class expand_operand op;
f959607b
CLT
7398 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7399 Pmode, EXPAND_NORMAL);
5440a1b0 7400 create_input_operand (&op, val, Pmode);
f959607b
CLT
7401 expand_insn (icode, 1, &op);
7402 return;
7403 }
a3f9f006 7404 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
f959607b
CLT
7405}
7406
28f4ec01 7407\f
862d0b35
DN
7408/* Emit code to restore the current value of stack. */
7409
7410static void
7411expand_stack_restore (tree var)
7412{
58f4cf2a
DM
7413 rtx_insn *prev;
7414 rtx sa = expand_normal (var);
862d0b35
DN
7415
7416 sa = convert_memory_address (Pmode, sa);
7417
7418 prev = get_last_insn ();
7419 emit_stack_restore (SAVE_BLOCK, sa);
d33606c3
EB
7420
7421 record_new_stack_level ();
7422
862d0b35
DN
7423 fixup_args_size_notes (prev, get_last_insn (), 0);
7424}
7425
862d0b35
DN
7426/* Emit code to save the current value of stack. */
7427
7428static rtx
7429expand_stack_save (void)
7430{
7431 rtx ret = NULL_RTX;
7432
862d0b35
DN
7433 emit_stack_save (SAVE_BLOCK, &ret);
7434 return ret;
7435}
7436
1f62d637
TV
7437/* Emit code to get the openacc gang, worker or vector id or size. */
7438
7439static rtx
7440expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7441{
7442 const char *name;
7443 rtx fallback_retval;
7444 rtx_insn *(*gen_fn) (rtx, rtx);
7445 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7446 {
7447 case BUILT_IN_GOACC_PARLEVEL_ID:
7448 name = "__builtin_goacc_parlevel_id";
7449 fallback_retval = const0_rtx;
7450 gen_fn = targetm.gen_oacc_dim_pos;
7451 break;
7452 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7453 name = "__builtin_goacc_parlevel_size";
7454 fallback_retval = const1_rtx;
7455 gen_fn = targetm.gen_oacc_dim_size;
7456 break;
7457 default:
7458 gcc_unreachable ();
7459 }
7460
7461 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7462 {
7463 error ("%qs only supported in OpenACC code", name);
7464 return const0_rtx;
7465 }
7466
7467 tree arg = CALL_EXPR_ARG (exp, 0);
7468 if (TREE_CODE (arg) != INTEGER_CST)
7469 {
7470 error ("non-constant argument 0 to %qs", name);
7471 return const0_rtx;
7472 }
7473
7474 int dim = TREE_INT_CST_LOW (arg);
7475 switch (dim)
7476 {
7477 case GOMP_DIM_GANG:
7478 case GOMP_DIM_WORKER:
7479 case GOMP_DIM_VECTOR:
7480 break;
7481 default:
7482 error ("illegal argument 0 to %qs", name);
7483 return const0_rtx;
7484 }
7485
7486 if (ignore)
7487 return target;
7488
39bc9f83
TV
7489 if (target == NULL_RTX)
7490 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7491
1f62d637
TV
7492 if (!targetm.have_oacc_dim_size ())
7493 {
7494 emit_move_insn (target, fallback_retval);
7495 return target;
7496 }
7497
7498 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7499 emit_insn (gen_fn (reg, GEN_INT (dim)));
7500 if (reg != target)
7501 emit_move_insn (target, reg);
7502
7503 return target;
7504}
41dbbb37 7505
10a0e2a9 7506/* Expand a string compare operation using a sequence of char comparison
b2272b13
QZ
7507 to get rid of the calling overhead, with result going to TARGET if
7508 that's convenient.
7509
7510 VAR_STR is the variable string source;
7511 CONST_STR is the constant string source;
7512 LENGTH is the number of chars to compare;
7513 CONST_STR_N indicates which source string is the constant string;
7514 IS_MEMCMP indicates whether it's a memcmp or strcmp.
10a0e2a9 7515
b2272b13
QZ
7516 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7517
523a59ff
QZ
7518 target = (int) (unsigned char) var_str[0]
7519 - (int) (unsigned char) const_str[0];
b2272b13
QZ
7520 if (target != 0)
7521 goto ne_label;
7522 ...
523a59ff
QZ
7523 target = (int) (unsigned char) var_str[length - 2]
7524 - (int) (unsigned char) const_str[length - 2];
b2272b13
QZ
7525 if (target != 0)
7526 goto ne_label;
523a59ff
QZ
7527 target = (int) (unsigned char) var_str[length - 1]
7528 - (int) (unsigned char) const_str[length - 1];
b2272b13
QZ
7529 ne_label:
7530 */
7531
7532static rtx
10a0e2a9 7533inline_string_cmp (rtx target, tree var_str, const char *const_str,
b2272b13 7534 unsigned HOST_WIDE_INT length,
523a59ff 7535 int const_str_n, machine_mode mode)
b2272b13
QZ
7536{
7537 HOST_WIDE_INT offset = 0;
10a0e2a9 7538 rtx var_rtx_array
b2272b13
QZ
7539 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7540 rtx var_rtx = NULL_RTX;
10a0e2a9
JJ
7541 rtx const_rtx = NULL_RTX;
7542 rtx result = target ? target : gen_reg_rtx (mode);
7543 rtx_code_label *ne_label = gen_label_rtx ();
523a59ff 7544 tree unit_type_node = unsigned_char_type_node;
10a0e2a9
JJ
7545 scalar_int_mode unit_mode
7546 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
b2272b13
QZ
7547
7548 start_sequence ();
7549
7550 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7551 {
10a0e2a9 7552 var_rtx
b2272b13 7553 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
10a0e2a9 7554 const_rtx = c_readstr (const_str + offset, unit_mode);
b2272b13
QZ
7555 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7556 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
10a0e2a9 7557
523a59ff
QZ
7558 op0 = convert_modes (mode, unit_mode, op0, 1);
7559 op1 = convert_modes (mode, unit_mode, op1, 1);
10a0e2a9 7560 result = expand_simple_binop (mode, MINUS, op0, op1,
523a59ff 7561 result, 1, OPTAB_WIDEN);
10a0e2a9
JJ
7562 if (i < length - 1)
7563 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7564 mode, true, ne_label);
7565 offset += GET_MODE_SIZE (unit_mode);
b2272b13
QZ
7566 }
7567
7568 emit_label (ne_label);
7569 rtx_insn *insns = get_insns ();
7570 end_sequence ();
7571 emit_insn (insns);
7572
7573 return result;
7574}
7575
10a0e2a9 7576/* Inline expansion a call to str(n)cmp, with result going to
b2272b13
QZ
7577 TARGET if that's convenient.
7578 If the call is not been inlined, return NULL_RTX. */
7579static rtx
523a59ff 7580inline_expand_builtin_string_cmp (tree exp, rtx target)
b2272b13
QZ
7581{
7582 tree fndecl = get_callee_fndecl (exp);
7583 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7584 unsigned HOST_WIDE_INT length = 0;
7585 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7586
3d592d2d
QZ
7587 /* Do NOT apply this inlining expansion when optimizing for size or
7588 optimization level below 2. */
7589 if (optimize < 2 || optimize_insn_for_size_p ())
7590 return NULL_RTX;
7591
b2272b13 7592 gcc_checking_assert (fcode == BUILT_IN_STRCMP
10a0e2a9 7593 || fcode == BUILT_IN_STRNCMP
b2272b13
QZ
7594 || fcode == BUILT_IN_MEMCMP);
7595
523a59ff
QZ
7596 /* On a target where the type of the call (int) has same or narrower presicion
7597 than unsigned char, give up the inlining expansion. */
7598 if (TYPE_PRECISION (unsigned_char_type_node)
7599 >= TYPE_PRECISION (TREE_TYPE (exp)))
7600 return NULL_RTX;
7601
b2272b13
QZ
7602 tree arg1 = CALL_EXPR_ARG (exp, 0);
7603 tree arg2 = CALL_EXPR_ARG (exp, 1);
7604 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7605
7606 unsigned HOST_WIDE_INT len1 = 0;
7607 unsigned HOST_WIDE_INT len2 = 0;
7608 unsigned HOST_WIDE_INT len3 = 0;
7609
7610 const char *src_str1 = c_getstr (arg1, &len1);
7611 const char *src_str2 = c_getstr (arg2, &len2);
10a0e2a9 7612
b2272b13
QZ
7613 /* If neither strings is constant string, the call is not qualify. */
7614 if (!src_str1 && !src_str2)
7615 return NULL_RTX;
7616
7617 /* For strncmp, if the length is not a const, not qualify. */
6aa2e42c
ML
7618 if (is_ncmp)
7619 {
7620 if (!tree_fits_uhwi_p (len3_tree))
7621 return NULL_RTX;
7622 else
7623 len3 = tree_to_uhwi (len3_tree);
7624 }
7625
7626 if (src_str1 != NULL)
7627 len1 = strnlen (src_str1, len1) + 1;
7628
7629 if (src_str2 != NULL)
7630 len2 = strnlen (src_str2, len2) + 1;
b2272b13
QZ
7631
7632 int const_str_n = 0;
7633 if (!len1)
7634 const_str_n = 2;
7635 else if (!len2)
7636 const_str_n = 1;
7637 else if (len2 > len1)
7638 const_str_n = 1;
7639 else
7640 const_str_n = 2;
7641
7642 gcc_checking_assert (const_str_n > 0);
7643 length = (const_str_n == 1) ? len1 : len2;
7644
6aa2e42c 7645 if (is_ncmp && len3 < length)
b2272b13
QZ
7646 length = len3;
7647
10a0e2a9 7648 /* If the length of the comparision is larger than the threshold,
b2272b13 7649 do nothing. */
10a0e2a9 7650 if (length > (unsigned HOST_WIDE_INT)
028d4092 7651 param_builtin_string_cmp_inline_length)
b2272b13
QZ
7652 return NULL_RTX;
7653
7654 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7655
7656 /* Now, start inline expansion the call. */
10a0e2a9 7657 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
b2272b13 7658 (const_str_n == 1) ? src_str1 : src_str2, length,
523a59ff 7659 const_str_n, mode);
b2272b13
QZ
7660}
7661
425fc685
RE
7662/* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7663 represents the size of the first argument to that call, or VOIDmode
7664 if the argument is a pointer. IGNORE will be true if the result
7665 isn't used. */
7666static rtx
7667expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7668 bool ignore)
7669{
7670 rtx val, failsafe;
7671 unsigned nargs = call_expr_nargs (exp);
7672
7673 tree arg0 = CALL_EXPR_ARG (exp, 0);
7674
7675 if (mode == VOIDmode)
7676 {
7677 mode = TYPE_MODE (TREE_TYPE (arg0));
7678 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7679 }
7680
7681 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7682
7683 /* An optional second argument can be used as a failsafe value on
7684 some machines. If it isn't present, then the failsafe value is
7685 assumed to be 0. */
7686 if (nargs > 1)
7687 {
7688 tree arg1 = CALL_EXPR_ARG (exp, 1);
7689 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7690 }
7691 else
7692 failsafe = const0_rtx;
7693
7694 /* If the result isn't used, the behavior is undefined. It would be
7695 nice to emit a warning here, but path splitting means this might
7696 happen with legitimate code. So simply drop the builtin
7697 expansion in that case; we've handled any side-effects above. */
7698 if (ignore)
7699 return const0_rtx;
7700
7701 /* If we don't have a suitable target, create one to hold the result. */
7702 if (target == NULL || GET_MODE (target) != mode)
7703 target = gen_reg_rtx (mode);
7704
7705 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7706 val = convert_modes (mode, VOIDmode, val, false);
7707
7708 return targetm.speculation_safe_value (mode, target, val, failsafe);
7709}
7710
28f4ec01
BS
7711/* Expand an expression EXP that calls a built-in function,
7712 with result going to TARGET if that's convenient
7713 (and in mode MODE if that's convenient).
7714 SUBTARGET may be used as the target for computing one of EXP's operands.
7715 IGNORE is nonzero if the value is to be ignored. */
7716
7717rtx
ef4bddc2 7718expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
4682ae04 7719 int ignore)
28f4ec01 7720{
2f503025 7721 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 7722 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9e3920e9 7723 int flags;
28f4ec01 7724
d51151b2
JJ
7725 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7726 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7727
bdea98ca
MO
7728 /* When ASan is enabled, we don't want to expand some memory/string
7729 builtins and rely on libsanitizer's hooks. This allows us to avoid
7730 redundant checks and be sure, that possible overflow will be detected
7731 by ASan. */
7732
4d732405 7733 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
bdea98ca
MO
7734 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7735 return expand_call (exp, target, ignore);
7736
28f4ec01
BS
7737 /* When not optimizing, generate calls to library functions for a certain
7738 set of builtins. */
d25225de 7739 if (!optimize
48ae6c13 7740 && !called_as_built_in (fndecl)
63bf9a90
JH
7741 && fcode != BUILT_IN_FORK
7742 && fcode != BUILT_IN_EXECL
7743 && fcode != BUILT_IN_EXECV
7744 && fcode != BUILT_IN_EXECLP
7745 && fcode != BUILT_IN_EXECLE
7746 && fcode != BUILT_IN_EXECVP
7747 && fcode != BUILT_IN_EXECVE
9e878cf1 7748 && !ALLOCA_FUNCTION_CODE_P (fcode)
31db0fe0 7749 && fcode != BUILT_IN_FREE)
d25225de 7750 return expand_call (exp, target, ignore);
28f4ec01 7751
0a45ec5c
RS
7752 /* The built-in function expanders test for target == const0_rtx
7753 to determine whether the function's result will be ignored. */
7754 if (ignore)
7755 target = const0_rtx;
7756
7757 /* If the result of a pure or const built-in function is ignored, and
7758 none of its arguments are volatile, we can avoid expanding the
7759 built-in call and just evaluate the arguments for side-effects. */
7760 if (target == const0_rtx
9e3920e9
JJ
7761 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7762 && !(flags & ECF_LOOPING_CONST_OR_PURE))
0a45ec5c
RS
7763 {
7764 bool volatilep = false;
7765 tree arg;
5039610b 7766 call_expr_arg_iterator iter;
0a45ec5c 7767
5039610b
SL
7768 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7769 if (TREE_THIS_VOLATILE (arg))
0a45ec5c
RS
7770 {
7771 volatilep = true;
7772 break;
7773 }
7774
7775 if (! volatilep)
7776 {
5039610b
SL
7777 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7778 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a45ec5c
RS
7779 return const0_rtx;
7780 }
7781 }
7782
28f4ec01
BS
7783 switch (fcode)
7784 {
ea6a6627 7785 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 7786 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
7787 case BUILT_IN_FABSD32:
7788 case BUILT_IN_FABSD64:
7789 case BUILT_IN_FABSD128:
5039610b 7790 target = expand_builtin_fabs (exp, target, subtarget);
075ec276 7791 if (target)
c22cacf3 7792 return target;
075ec276
RS
7793 break;
7794
ea6a6627 7795 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6dc198e3 7796 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5039610b 7797 target = expand_builtin_copysign (exp, target, subtarget);
046625fa
RH
7798 if (target)
7799 return target;
7800 break;
7801
5906d013
EC
7802 /* Just do a normal library call if we were unable to fold
7803 the values. */
ea6a6627 7804 CASE_FLT_FN (BUILT_IN_CABS):
075ec276 7805 break;
28f4ec01 7806
1b1562a5 7807 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 7808 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
7809 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7810 if (target)
7811 return target;
7812 break;
7813
eaee4464
UB
7814 CASE_FLT_FN (BUILT_IN_ILOGB):
7815 if (! flag_unsafe_math_optimizations)
7816 break;
903c723b
TC
7817 gcc_fallthrough ();
7818 CASE_FLT_FN (BUILT_IN_ISINF):
7819 CASE_FLT_FN (BUILT_IN_FINITE):
7820 case BUILT_IN_ISFINITE:
7821 case BUILT_IN_ISNORMAL:
4359dc2a 7822 target = expand_builtin_interclass_mathfn (exp, target);
eaee4464
UB
7823 if (target)
7824 return target;
7825 break;
7826
6c32ee74 7827 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
7828 CASE_FLT_FN (BUILT_IN_LCEIL):
7829 CASE_FLT_FN (BUILT_IN_LLCEIL):
7830 CASE_FLT_FN (BUILT_IN_LFLOOR):
6c32ee74 7831 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627 7832 CASE_FLT_FN (BUILT_IN_LLFLOOR):
1856c8dc 7833 target = expand_builtin_int_roundingfn (exp, target);
d8b42d06
UB
7834 if (target)
7835 return target;
7836 break;
7837
6c32ee74 7838 CASE_FLT_FN (BUILT_IN_IRINT):
0bfa1541
RG
7839 CASE_FLT_FN (BUILT_IN_LRINT):
7840 CASE_FLT_FN (BUILT_IN_LLRINT):
6c32ee74 7841 CASE_FLT_FN (BUILT_IN_IROUND):
4d81bf84
RG
7842 CASE_FLT_FN (BUILT_IN_LROUND):
7843 CASE_FLT_FN (BUILT_IN_LLROUND):
1856c8dc 7844 target = expand_builtin_int_roundingfn_2 (exp, target);
0bfa1541
RG
7845 if (target)
7846 return target;
7847 break;
7848
ea6a6627 7849 CASE_FLT_FN (BUILT_IN_POWI):
4359dc2a 7850 target = expand_builtin_powi (exp, target);
17684d46
RG
7851 if (target)
7852 return target;
7853 break;
7854
75c7c595 7855 CASE_FLT_FN (BUILT_IN_CEXPI):
4359dc2a 7856 target = expand_builtin_cexpi (exp, target);
75c7c595
RG
7857 gcc_assert (target);
7858 return target;
7859
ea6a6627
VR
7860 CASE_FLT_FN (BUILT_IN_SIN):
7861 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
7862 if (! flag_unsafe_math_optimizations)
7863 break;
7864 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7865 if (target)
7866 return target;
7867 break;
7868
403e54f0
RG
7869 CASE_FLT_FN (BUILT_IN_SINCOS):
7870 if (! flag_unsafe_math_optimizations)
7871 break;
7872 target = expand_builtin_sincos (exp);
7873 if (target)
7874 return target;
7875 break;
7876
28f4ec01
BS
7877 case BUILT_IN_APPLY_ARGS:
7878 return expand_builtin_apply_args ();
7879
7880 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7881 FUNCTION with a copy of the parameters described by
7882 ARGUMENTS, and ARGSIZE. It returns a block of memory
7883 allocated on the stack into which is stored all the registers
7884 that might possibly be used for returning the result of a
7885 function. ARGUMENTS is the value returned by
7886 __builtin_apply_args. ARGSIZE is the number of bytes of
7887 arguments that must be copied. ??? How should this value be
7888 computed? We'll also need a safe worst case value for varargs
7889 functions. */
7890 case BUILT_IN_APPLY:
5039610b 7891 if (!validate_arglist (exp, POINTER_TYPE,
019fa094 7892 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5039610b 7893 && !validate_arglist (exp, REFERENCE_TYPE,
019fa094 7894 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01
BS
7895 return const0_rtx;
7896 else
7897 {
28f4ec01
BS
7898 rtx ops[3];
7899
5039610b
SL
7900 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7901 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7902 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
28f4ec01
BS
7903
7904 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7905 }
7906
7907 /* __builtin_return (RESULT) causes the function to return the
7908 value described by RESULT. RESULT is address of the block of
7909 memory returned by __builtin_apply. */
7910 case BUILT_IN_RETURN:
5039610b
SL
7911 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7912 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
28f4ec01
BS
7913 return const0_rtx;
7914
7915 case BUILT_IN_SAVEREGS:
d3707adb 7916 return expand_builtin_saveregs ();
28f4ec01 7917
6ef5231b
JJ
7918 case BUILT_IN_VA_ARG_PACK:
7919 /* All valid uses of __builtin_va_arg_pack () are removed during
7920 inlining. */
c94ed7a1 7921 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6ef5231b
JJ
7922 return const0_rtx;
7923
ab0e176c
JJ
7924 case BUILT_IN_VA_ARG_PACK_LEN:
7925 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7926 inlining. */
c94ed7a1 7927 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
ab0e176c
JJ
7928 return const0_rtx;
7929
28f4ec01
BS
7930 /* Return the address of the first anonymous stack arg. */
7931 case BUILT_IN_NEXT_ARG:
5039610b 7932 if (fold_builtin_next_arg (exp, false))
c22cacf3 7933 return const0_rtx;
8870e212 7934 return expand_builtin_next_arg ();
28f4ec01 7935
677feb77
DD
7936 case BUILT_IN_CLEAR_CACHE:
7937 target = expand_builtin___clear_cache (exp);
7938 if (target)
7939 return target;
7940 break;
7941
28f4ec01 7942 case BUILT_IN_CLASSIFY_TYPE:
5039610b 7943 return expand_builtin_classify_type (exp);
28f4ec01
BS
7944
7945 case BUILT_IN_CONSTANT_P:
6de9cd9a 7946 return const0_rtx;
28f4ec01
BS
7947
7948 case BUILT_IN_FRAME_ADDRESS:
7949 case BUILT_IN_RETURN_ADDRESS:
5039610b 7950 return expand_builtin_frame_address (fndecl, exp);
28f4ec01
BS
7951
7952 /* Returns the address of the area where the structure is returned.
7953 0 otherwise. */
7954 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5039610b 7955 if (call_expr_nargs (exp) != 0
ca7fd9cd 7956 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3c0cb5de 7957 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
ca7fd9cd 7958 return const0_rtx;
28f4ec01 7959 else
ca7fd9cd 7960 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
28f4ec01 7961
9e878cf1 7962 CASE_BUILT_IN_ALLOCA:
b7e52782 7963 target = expand_builtin_alloca (exp);
28f4ec01
BS
7964 if (target)
7965 return target;
7966 break;
7967
e3174bdf
MO
7968 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7969 return expand_asan_emit_allocas_unpoison (exp);
7970
6de9cd9a
DN
7971 case BUILT_IN_STACK_SAVE:
7972 return expand_stack_save ();
7973
7974 case BUILT_IN_STACK_RESTORE:
5039610b 7975 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6de9cd9a
DN
7976 return const0_rtx;
7977
ac868f29 7978 case BUILT_IN_BSWAP16:
167fa32c
EC
7979 case BUILT_IN_BSWAP32:
7980 case BUILT_IN_BSWAP64:
ac868f29 7981 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
167fa32c
EC
7982 if (target)
7983 return target;
7984 break;
7985
ea6a6627 7986 CASE_INT_FN (BUILT_IN_FFS):
5039610b 7987 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7988 subtarget, ffs_optab);
2928cd7a
RH
7989 if (target)
7990 return target;
7991 break;
7992
ea6a6627 7993 CASE_INT_FN (BUILT_IN_CLZ):
5039610b 7994 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7995 subtarget, clz_optab);
2928cd7a
RH
7996 if (target)
7997 return target;
7998 break;
7999
ea6a6627 8000 CASE_INT_FN (BUILT_IN_CTZ):
5039610b 8001 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 8002 subtarget, ctz_optab);
2928cd7a
RH
8003 if (target)
8004 return target;
8005 break;
8006
3801c801 8007 CASE_INT_FN (BUILT_IN_CLRSB):
3801c801
BS
8008 target = expand_builtin_unop (target_mode, exp, target,
8009 subtarget, clrsb_optab);
8010 if (target)
8011 return target;
8012 break;
8013
ea6a6627 8014 CASE_INT_FN (BUILT_IN_POPCOUNT):
5039610b 8015 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 8016 subtarget, popcount_optab);
2928cd7a
RH
8017 if (target)
8018 return target;
8019 break;
8020
ea6a6627 8021 CASE_INT_FN (BUILT_IN_PARITY):
5039610b 8022 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 8023 subtarget, parity_optab);
28f4ec01
BS
8024 if (target)
8025 return target;
8026 break;
8027
8028 case BUILT_IN_STRLEN:
5039610b 8029 target = expand_builtin_strlen (exp, target, target_mode);
28f4ec01
BS
8030 if (target)
8031 return target;
8032 break;
8033
781ff3d8
MS
8034 case BUILT_IN_STRNLEN:
8035 target = expand_builtin_strnlen (exp, target, target_mode);
8036 if (target)
8037 return target;
8038 break;
8039
ee92e7ba 8040 case BUILT_IN_STRCAT:
b5338fb3 8041 target = expand_builtin_strcat (exp);
ee92e7ba
MS
8042 if (target)
8043 return target;
8044 break;
8045
b5338fb3
MS
8046 case BUILT_IN_GETTEXT:
8047 case BUILT_IN_PUTS:
8048 case BUILT_IN_PUTS_UNLOCKED:
8049 case BUILT_IN_STRDUP:
8050 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8051 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8052 break;
8053
8054 case BUILT_IN_INDEX:
8055 case BUILT_IN_RINDEX:
8056 case BUILT_IN_STRCHR:
8057 case BUILT_IN_STRRCHR:
8058 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8059 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8060 break;
8061
8062 case BUILT_IN_FPUTS:
8063 case BUILT_IN_FPUTS_UNLOCKED:
8064 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8065 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8066 break;
8067
8068 case BUILT_IN_STRNDUP:
8069 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8070 check_nul_terminated_array (exp,
8071 CALL_EXPR_ARG (exp, 0),
8072 CALL_EXPR_ARG (exp, 1));
8073 break;
8074
8075 case BUILT_IN_STRCASECMP:
8076 case BUILT_IN_STRSTR:
8077 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8078 {
8079 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8080 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 1));
8081 }
8082 break;
8083
28f4ec01 8084 case BUILT_IN_STRCPY:
44e10129 8085 target = expand_builtin_strcpy (exp, target);
28f4ec01
BS
8086 if (target)
8087 return target;
8088 break;
8d51ecf8 8089
ee92e7ba
MS
8090 case BUILT_IN_STRNCAT:
8091 target = expand_builtin_strncat (exp, target);
8092 if (target)
8093 return target;
8094 break;
8095
da9e9f08 8096 case BUILT_IN_STRNCPY:
44e10129 8097 target = expand_builtin_strncpy (exp, target);
da9e9f08
KG
8098 if (target)
8099 return target;
8100 break;
8d51ecf8 8101
9cb65f92 8102 case BUILT_IN_STPCPY:
609ae0e2 8103 target = expand_builtin_stpcpy (exp, target, mode);
9cb65f92
KG
8104 if (target)
8105 return target;
8106 break;
8107
e50d56a5
MS
8108 case BUILT_IN_STPNCPY:
8109 target = expand_builtin_stpncpy (exp, target);
8110 if (target)
8111 return target;
8112 break;
8113
d9c5a8b9
MS
8114 case BUILT_IN_MEMCHR:
8115 target = expand_builtin_memchr (exp, target);
8116 if (target)
8117 return target;
8118 break;
8119
28f4ec01 8120 case BUILT_IN_MEMCPY:
44e10129 8121 target = expand_builtin_memcpy (exp, target);
9cb65f92
KG
8122 if (target)
8123 return target;
8124 break;
8125
e50d56a5
MS
8126 case BUILT_IN_MEMMOVE:
8127 target = expand_builtin_memmove (exp, target);
8128 if (target)
8129 return target;
8130 break;
8131
9cb65f92 8132 case BUILT_IN_MEMPCPY:
671a00ee 8133 target = expand_builtin_mempcpy (exp, target);
28f4ec01
BS
8134 if (target)
8135 return target;
8136 break;
8137
8138 case BUILT_IN_MEMSET:
5039610b 8139 target = expand_builtin_memset (exp, target, mode);
28f4ec01
BS
8140 if (target)
8141 return target;
8142 break;
8143
e3a709be 8144 case BUILT_IN_BZERO:
8148fe65 8145 target = expand_builtin_bzero (exp);
e3a709be
KG
8146 if (target)
8147 return target;
8148 break;
8149
10a0e2a9 8150 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8b0b334a
QZ
8151 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8152 when changing it to a strcmp call. */
8153 case BUILT_IN_STRCMP_EQ:
8154 target = expand_builtin_memcmp (exp, target, true);
8155 if (target)
8156 return target;
8157
8158 /* Change this call back to a BUILT_IN_STRCMP. */
10a0e2a9 8159 TREE_OPERAND (exp, 1)
8b0b334a
QZ
8160 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8161
8162 /* Delete the last parameter. */
8163 unsigned int i;
8164 vec<tree, va_gc> *arg_vec;
8165 vec_alloc (arg_vec, 2);
8166 for (i = 0; i < 2; i++)
8167 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8168 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8169 /* FALLTHROUGH */
8170
28f4ec01 8171 case BUILT_IN_STRCMP:
44e10129 8172 target = expand_builtin_strcmp (exp, target);
28f4ec01
BS
8173 if (target)
8174 return target;
8175 break;
8176
8b0b334a
QZ
8177 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8178 back to a BUILT_IN_STRNCMP. */
8179 case BUILT_IN_STRNCMP_EQ:
8180 target = expand_builtin_memcmp (exp, target, true);
8181 if (target)
8182 return target;
8183
8184 /* Change it back to a BUILT_IN_STRNCMP. */
10a0e2a9 8185 TREE_OPERAND (exp, 1)
8b0b334a
QZ
8186 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8187 /* FALLTHROUGH */
8188
da9e9f08
KG
8189 case BUILT_IN_STRNCMP:
8190 target = expand_builtin_strncmp (exp, target, mode);
8191 if (target)
8192 return target;
8193 break;
8194
4b2a62db 8195 case BUILT_IN_BCMP:
28f4ec01 8196 case BUILT_IN_MEMCMP:
36b85e43
BS
8197 case BUILT_IN_MEMCMP_EQ:
8198 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
28f4ec01
BS
8199 if (target)
8200 return target;
36b85e43
BS
8201 if (fcode == BUILT_IN_MEMCMP_EQ)
8202 {
8203 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8204 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8205 }
28f4ec01 8206 break;
28f4ec01
BS
8207
8208 case BUILT_IN_SETJMP:
903c723b 8209 /* This should have been lowered to the builtins below. */
4f6c2131
EB
8210 gcc_unreachable ();
8211
8212 case BUILT_IN_SETJMP_SETUP:
8213 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8214 and the receiver label. */
5039610b 8215 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4f6c2131 8216 {
5039610b 8217 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4f6c2131 8218 VOIDmode, EXPAND_NORMAL);
5039610b 8219 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
e67d1102 8220 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
8221
8222 /* This is copied from the handling of non-local gotos. */
8223 expand_builtin_setjmp_setup (buf_addr, label_r);
8224 nonlocal_goto_handler_labels
b5241a5a 8225 = gen_rtx_INSN_LIST (VOIDmode, label_r,
4f6c2131
EB
8226 nonlocal_goto_handler_labels);
8227 /* ??? Do not let expand_label treat us as such since we would
8228 not want to be both on the list of non-local labels and on
8229 the list of forced labels. */
8230 FORCED_LABEL (label) = 0;
8231 return const0_rtx;
8232 }
8233 break;
8234
4f6c2131
EB
8235 case BUILT_IN_SETJMP_RECEIVER:
8236 /* __builtin_setjmp_receiver is passed the receiver label. */
5039610b 8237 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4f6c2131 8238 {
5039610b 8239 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
e67d1102 8240 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
8241
8242 expand_builtin_setjmp_receiver (label_r);
8243 return const0_rtx;
8244 }
250d07b6 8245 break;
28f4ec01
BS
8246
8247 /* __builtin_longjmp is passed a pointer to an array of five words.
8248 It's similar to the C library longjmp function but works with
8249 __builtin_setjmp above. */
8250 case BUILT_IN_LONGJMP:
5039610b 8251 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01 8252 {
5039610b 8253 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
84217346 8254 VOIDmode, EXPAND_NORMAL);
5039610b 8255 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
28f4ec01
BS
8256
8257 if (value != const1_rtx)
8258 {
9e637a26 8259 error ("%<__builtin_longjmp%> second argument must be 1");
28f4ec01
BS
8260 return const0_rtx;
8261 }
8262
8263 expand_builtin_longjmp (buf_addr, value);
8264 return const0_rtx;
8265 }
4f6c2131 8266 break;
28f4ec01 8267
6de9cd9a 8268 case BUILT_IN_NONLOCAL_GOTO:
5039610b 8269 target = expand_builtin_nonlocal_goto (exp);
6de9cd9a
DN
8270 if (target)
8271 return target;
8272 break;
8273
2b92e7f5
RK
8274 /* This updates the setjmp buffer that is its argument with the value
8275 of the current stack pointer. */
8276 case BUILT_IN_UPDATE_SETJMP_BUF:
5039610b 8277 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2b92e7f5
RK
8278 {
8279 rtx buf_addr
5039610b 8280 = expand_normal (CALL_EXPR_ARG (exp, 0));
2b92e7f5
RK
8281
8282 expand_builtin_update_setjmp_buf (buf_addr);
8283 return const0_rtx;
8284 }
8285 break;
8286
28f4ec01 8287 case BUILT_IN_TRAP:
9602f5a0 8288 expand_builtin_trap ();
28f4ec01
BS
8289 return const0_rtx;
8290
468059bc
DD
8291 case BUILT_IN_UNREACHABLE:
8292 expand_builtin_unreachable ();
8293 return const0_rtx;
8294
ea6a6627 8295 CASE_FLT_FN (BUILT_IN_SIGNBIT):
44aea9ac
JJ
8296 case BUILT_IN_SIGNBITD32:
8297 case BUILT_IN_SIGNBITD64:
8298 case BUILT_IN_SIGNBITD128:
ef79730c
RS
8299 target = expand_builtin_signbit (exp, target);
8300 if (target)
8301 return target;
8302 break;
8303
28f4ec01
BS
8304 /* Various hooks for the DWARF 2 __throw routine. */
8305 case BUILT_IN_UNWIND_INIT:
8306 expand_builtin_unwind_init ();
8307 return const0_rtx;
8308 case BUILT_IN_DWARF_CFA:
8309 return virtual_cfa_rtx;
8310#ifdef DWARF2_UNWIND_INFO
9c80ff25
RH
8311 case BUILT_IN_DWARF_SP_COLUMN:
8312 return expand_builtin_dwarf_sp_column ();
d9d5c9de 8313 case BUILT_IN_INIT_DWARF_REG_SIZES:
5039610b 8314 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
d9d5c9de 8315 return const0_rtx;
28f4ec01
BS
8316#endif
8317 case BUILT_IN_FROB_RETURN_ADDR:
5039610b 8318 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 8319 case BUILT_IN_EXTRACT_RETURN_ADDR:
5039610b 8320 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 8321 case BUILT_IN_EH_RETURN:
5039610b
SL
8322 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8323 CALL_EXPR_ARG (exp, 1));
28f4ec01 8324 return const0_rtx;
52a11cbf 8325 case BUILT_IN_EH_RETURN_DATA_REGNO:
5039610b 8326 return expand_builtin_eh_return_data_regno (exp);
c76362b4 8327 case BUILT_IN_EXTEND_POINTER:
5039610b 8328 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
1d65f45c
RH
8329 case BUILT_IN_EH_POINTER:
8330 return expand_builtin_eh_pointer (exp);
8331 case BUILT_IN_EH_FILTER:
8332 return expand_builtin_eh_filter (exp);
8333 case BUILT_IN_EH_COPY_VALUES:
8334 return expand_builtin_eh_copy_values (exp);
c76362b4 8335
6c535c69 8336 case BUILT_IN_VA_START:
5039610b 8337 return expand_builtin_va_start (exp);
d3707adb 8338 case BUILT_IN_VA_END:
5039610b 8339 return expand_builtin_va_end (exp);
d3707adb 8340 case BUILT_IN_VA_COPY:
5039610b 8341 return expand_builtin_va_copy (exp);
994a57cd 8342 case BUILT_IN_EXPECT:
5039610b 8343 return expand_builtin_expect (exp, target);
1e9168b2
ML
8344 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8345 return expand_builtin_expect_with_probability (exp, target);
45d439ac
JJ
8346 case BUILT_IN_ASSUME_ALIGNED:
8347 return expand_builtin_assume_aligned (exp, target);
a9ccbb60 8348 case BUILT_IN_PREFETCH:
5039610b 8349 expand_builtin_prefetch (exp);
a9ccbb60
JJ
8350 return const0_rtx;
8351
6de9cd9a 8352 case BUILT_IN_INIT_TRAMPOLINE:
183dd130
ILT
8353 return expand_builtin_init_trampoline (exp, true);
8354 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8355 return expand_builtin_init_trampoline (exp, false);
6de9cd9a 8356 case BUILT_IN_ADJUST_TRAMPOLINE:
5039610b 8357 return expand_builtin_adjust_trampoline (exp);
6de9cd9a 8358
4c640e26
EB
8359 case BUILT_IN_INIT_DESCRIPTOR:
8360 return expand_builtin_init_descriptor (exp);
8361 case BUILT_IN_ADJUST_DESCRIPTOR:
8362 return expand_builtin_adjust_descriptor (exp);
8363
d1c38823
ZD
8364 case BUILT_IN_FORK:
8365 case BUILT_IN_EXECL:
8366 case BUILT_IN_EXECV:
8367 case BUILT_IN_EXECLP:
8368 case BUILT_IN_EXECLE:
8369 case BUILT_IN_EXECVP:
8370 case BUILT_IN_EXECVE:
5039610b 8371 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
d1c38823
ZD
8372 if (target)
8373 return target;
8374 break;
28f4ec01 8375
e0a8ecf2
AM
8376 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8377 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8378 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8379 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8380 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8381 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
86951993 8382 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
48ae6c13
RH
8383 if (target)
8384 return target;
8385 break;
8386
e0a8ecf2
AM
8387 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8388 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8389 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8390 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8391 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8392 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
86951993 8393 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
48ae6c13
RH
8394 if (target)
8395 return target;
8396 break;
8397
e0a8ecf2
AM
8398 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8399 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8400 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8401 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8402 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8403 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
86951993 8404 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
48ae6c13
RH
8405 if (target)
8406 return target;
8407 break;
8408
e0a8ecf2
AM
8409 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8410 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8411 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8412 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8413 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8414 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
86951993 8415 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
48ae6c13
RH
8416 if (target)
8417 return target;
8418 break;
8419
e0a8ecf2
AM
8420 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8421 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8422 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8423 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8424 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8425 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
86951993 8426 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
48ae6c13
RH
8427 if (target)
8428 return target;
8429 break;
8430
e0a8ecf2
AM
8431 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8432 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8433 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8434 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8435 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8436 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
86951993 8437 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
48ae6c13
RH
8438 if (target)
8439 return target;
8440 break;
8441
e0a8ecf2
AM
8442 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8443 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8444 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8445 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8446 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8447 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
86951993 8448 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
48ae6c13
RH
8449 if (target)
8450 return target;
8451 break;
8452
e0a8ecf2
AM
8453 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8454 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8455 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8456 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8457 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8458 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
86951993 8459 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
48ae6c13
RH
8460 if (target)
8461 return target;
8462 break;
8463
e0a8ecf2
AM
8464 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8465 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8466 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8467 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8468 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8469 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
86951993 8470 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
48ae6c13
RH
8471 if (target)
8472 return target;
8473 break;
8474
e0a8ecf2
AM
8475 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8476 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8477 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8478 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8479 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8480 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
86951993 8481 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
48ae6c13
RH
8482 if (target)
8483 return target;
8484 break;
8485
e0a8ecf2
AM
8486 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8487 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8488 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8489 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8490 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8491 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
86951993 8492 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
48ae6c13
RH
8493 if (target)
8494 return target;
8495 break;
8496
e0a8ecf2
AM
8497 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8498 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8499 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8500 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8501 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8502 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
86951993 8503 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
48ae6c13
RH
8504 if (target)
8505 return target;
8506 break;
8507
e0a8ecf2
AM
8508 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8509 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8510 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8511 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8512 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
5b5513d0
RH
8513 if (mode == VOIDmode)
8514 mode = TYPE_MODE (boolean_type_node);
48ae6c13
RH
8515 if (!target || !register_operand (target, mode))
8516 target = gen_reg_rtx (mode);
02ee605c 8517
e0a8ecf2
AM
8518 mode = get_builtin_sync_mode
8519 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
5039610b 8520 target = expand_builtin_compare_and_swap (mode, exp, true, target);
48ae6c13
RH
8521 if (target)
8522 return target;
8523 break;
8524
e0a8ecf2
AM
8525 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8526 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8527 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8528 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8529 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8530 mode = get_builtin_sync_mode
8531 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
5039610b 8532 target = expand_builtin_compare_and_swap (mode, exp, false, target);
48ae6c13
RH
8533 if (target)
8534 return target;
8535 break;
8536
e0a8ecf2
AM
8537 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8538 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8539 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8540 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8541 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8542 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8543 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
48ae6c13
RH
8544 if (target)
8545 return target;
8546 break;
8547
e0a8ecf2
AM
8548 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8549 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8550 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8551 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8552 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8553 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8554 expand_builtin_sync_lock_release (mode, exp);
48ae6c13
RH
8555 return const0_rtx;
8556
e0a8ecf2
AM
8557 case BUILT_IN_SYNC_SYNCHRONIZE:
8558 expand_builtin_sync_synchronize ();
48ae6c13
RH
8559 return const0_rtx;
8560
86951993
AM
8561 case BUILT_IN_ATOMIC_EXCHANGE_1:
8562 case BUILT_IN_ATOMIC_EXCHANGE_2:
8563 case BUILT_IN_ATOMIC_EXCHANGE_4:
8564 case BUILT_IN_ATOMIC_EXCHANGE_8:
8565 case BUILT_IN_ATOMIC_EXCHANGE_16:
8566 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8567 target = expand_builtin_atomic_exchange (mode, exp, target);
8568 if (target)
8569 return target;
8570 break;
8571
8572 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8573 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8574 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8575 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8576 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
e351ae85
AM
8577 {
8578 unsigned int nargs, z;
9771b263 8579 vec<tree, va_gc> *vec;
e351ae85
AM
8580
8581 mode =
8582 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8583 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8584 if (target)
8585 return target;
8586
8587 /* If this is turned into an external library call, the weak parameter
8588 must be dropped to match the expected parameter list. */
8589 nargs = call_expr_nargs (exp);
9771b263 8590 vec_alloc (vec, nargs - 1);
e351ae85 8591 for (z = 0; z < 3; z++)
9771b263 8592 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
8593 /* Skip the boolean weak parameter. */
8594 for (z = 4; z < 6; z++)
9771b263 8595 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
8596 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8597 break;
8598 }
86951993
AM
8599
8600 case BUILT_IN_ATOMIC_LOAD_1:
8601 case BUILT_IN_ATOMIC_LOAD_2:
8602 case BUILT_IN_ATOMIC_LOAD_4:
8603 case BUILT_IN_ATOMIC_LOAD_8:
8604 case BUILT_IN_ATOMIC_LOAD_16:
8605 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8606 target = expand_builtin_atomic_load (mode, exp, target);
8607 if (target)
8608 return target;
8609 break;
8610
8611 case BUILT_IN_ATOMIC_STORE_1:
8612 case BUILT_IN_ATOMIC_STORE_2:
8613 case BUILT_IN_ATOMIC_STORE_4:
8614 case BUILT_IN_ATOMIC_STORE_8:
8615 case BUILT_IN_ATOMIC_STORE_16:
8616 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8617 target = expand_builtin_atomic_store (mode, exp);
8618 if (target)
8619 return const0_rtx;
8620 break;
8621
8622 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8623 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8624 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8625 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8626 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8627 {
8628 enum built_in_function lib;
8629 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8630 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8631 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8632 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8633 ignore, lib);
8634 if (target)
8635 return target;
8636 break;
8637 }
8638 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8639 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8640 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8641 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8642 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8643 {
8644 enum built_in_function lib;
8645 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8646 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8647 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8648 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8649 ignore, lib);
8650 if (target)
8651 return target;
8652 break;
8653 }
8654 case BUILT_IN_ATOMIC_AND_FETCH_1:
8655 case BUILT_IN_ATOMIC_AND_FETCH_2:
8656 case BUILT_IN_ATOMIC_AND_FETCH_4:
8657 case BUILT_IN_ATOMIC_AND_FETCH_8:
8658 case BUILT_IN_ATOMIC_AND_FETCH_16:
8659 {
8660 enum built_in_function lib;
8661 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8662 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8663 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8664 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8665 ignore, lib);
8666 if (target)
8667 return target;
8668 break;
8669 }
8670 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8671 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8672 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8673 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8674 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8675 {
8676 enum built_in_function lib;
8677 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8678 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8679 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8680 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8681 ignore, lib);
8682 if (target)
8683 return target;
8684 break;
8685 }
8686 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8687 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8688 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8689 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8690 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8691 {
8692 enum built_in_function lib;
8693 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8694 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8695 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8696 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8697 ignore, lib);
8698 if (target)
8699 return target;
8700 break;
8701 }
8702 case BUILT_IN_ATOMIC_OR_FETCH_1:
8703 case BUILT_IN_ATOMIC_OR_FETCH_2:
8704 case BUILT_IN_ATOMIC_OR_FETCH_4:
8705 case BUILT_IN_ATOMIC_OR_FETCH_8:
8706 case BUILT_IN_ATOMIC_OR_FETCH_16:
8707 {
8708 enum built_in_function lib;
8709 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8710 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8711 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8712 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8713 ignore, lib);
8714 if (target)
8715 return target;
8716 break;
8717 }
8718 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8719 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8720 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8721 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8722 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8723 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8724 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8725 ignore, BUILT_IN_NONE);
8726 if (target)
8727 return target;
8728 break;
8729
8730 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8731 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8732 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8733 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8734 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8735 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8736 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8737 ignore, BUILT_IN_NONE);
8738 if (target)
8739 return target;
8740 break;
8741
8742 case BUILT_IN_ATOMIC_FETCH_AND_1:
8743 case BUILT_IN_ATOMIC_FETCH_AND_2:
8744 case BUILT_IN_ATOMIC_FETCH_AND_4:
8745 case BUILT_IN_ATOMIC_FETCH_AND_8:
8746 case BUILT_IN_ATOMIC_FETCH_AND_16:
8747 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8748 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8749 ignore, BUILT_IN_NONE);
8750 if (target)
8751 return target;
8752 break;
8753
8754 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8755 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8756 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8757 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8758 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8759 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8760 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8761 ignore, BUILT_IN_NONE);
8762 if (target)
8763 return target;
8764 break;
8765
8766 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8767 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8768 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8769 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8770 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8771 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8772 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8773 ignore, BUILT_IN_NONE);
8774 if (target)
8775 return target;
8776 break;
8777
8778 case BUILT_IN_ATOMIC_FETCH_OR_1:
8779 case BUILT_IN_ATOMIC_FETCH_OR_2:
8780 case BUILT_IN_ATOMIC_FETCH_OR_4:
8781 case BUILT_IN_ATOMIC_FETCH_OR_8:
8782 case BUILT_IN_ATOMIC_FETCH_OR_16:
8783 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8784 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8785 ignore, BUILT_IN_NONE);
8786 if (target)
8787 return target;
8788 break;
d660c35e
AM
8789
8790 case BUILT_IN_ATOMIC_TEST_AND_SET:
744accb2 8791 return expand_builtin_atomic_test_and_set (exp, target);
d660c35e
AM
8792
8793 case BUILT_IN_ATOMIC_CLEAR:
8794 return expand_builtin_atomic_clear (exp);
86951993
AM
8795
8796 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8797 return expand_builtin_atomic_always_lock_free (exp);
8798
8799 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8800 target = expand_builtin_atomic_is_lock_free (exp);
8801 if (target)
8802 return target;
8803 break;
8804
8805 case BUILT_IN_ATOMIC_THREAD_FENCE:
8806 expand_builtin_atomic_thread_fence (exp);
8807 return const0_rtx;
8808
8809 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8810 expand_builtin_atomic_signal_fence (exp);
8811 return const0_rtx;
8812
10a0d495
JJ
8813 case BUILT_IN_OBJECT_SIZE:
8814 return expand_builtin_object_size (exp);
8815
8816 case BUILT_IN_MEMCPY_CHK:
8817 case BUILT_IN_MEMPCPY_CHK:
8818 case BUILT_IN_MEMMOVE_CHK:
8819 case BUILT_IN_MEMSET_CHK:
8820 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8821 if (target)
8822 return target;
8823 break;
8824
8825 case BUILT_IN_STRCPY_CHK:
8826 case BUILT_IN_STPCPY_CHK:
8827 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 8828 case BUILT_IN_STPNCPY_CHK:
10a0d495 8829 case BUILT_IN_STRCAT_CHK:
1c2fc017 8830 case BUILT_IN_STRNCAT_CHK:
10a0d495
JJ
8831 case BUILT_IN_SNPRINTF_CHK:
8832 case BUILT_IN_VSNPRINTF_CHK:
8833 maybe_emit_chk_warning (exp, fcode);
8834 break;
8835
8836 case BUILT_IN_SPRINTF_CHK:
8837 case BUILT_IN_VSPRINTF_CHK:
8838 maybe_emit_sprintf_chk_warning (exp, fcode);
8839 break;
8840
f9555f40 8841 case BUILT_IN_FREE:
a3a704a4
MH
8842 if (warn_free_nonheap_object)
8843 maybe_emit_free_warning (exp);
f9555f40
JJ
8844 break;
8845
f959607b
CLT
8846 case BUILT_IN_THREAD_POINTER:
8847 return expand_builtin_thread_pointer (exp, target);
8848
8849 case BUILT_IN_SET_THREAD_POINTER:
8850 expand_builtin_set_thread_pointer (exp);
8851 return const0_rtx;
8852
41dbbb37 8853 case BUILT_IN_ACC_ON_DEVICE:
164453bb
NS
8854 /* Do library call, if we failed to expand the builtin when
8855 folding. */
41dbbb37
TS
8856 break;
8857
1f62d637
TV
8858 case BUILT_IN_GOACC_PARLEVEL_ID:
8859 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8860 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8861
425fc685
RE
8862 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8863 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8864
8865 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8866 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8867 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8868 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8869 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8870 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8871 return expand_speculation_safe_value (mode, exp, target, ignore);
8872
e62f4abc 8873 default: /* just do library call, if unknown builtin */
84b8b0e0 8874 break;
28f4ec01
BS
8875 }
8876
8877 /* The switch statement above can drop through to cause the function
8878 to be called normally. */
8879 return expand_call (exp, target, ignore);
8880}
b0b3afb2 8881
4977bab6 8882/* Determine whether a tree node represents a call to a built-in
feda1845
RS
8883 function. If the tree T is a call to a built-in function with
8884 the right number of arguments of the appropriate types, return
8885 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8886 Otherwise the return value is END_BUILTINS. */
4682ae04 8887
4977bab6 8888enum built_in_function
fa233e34 8889builtin_mathfn_code (const_tree t)
4977bab6 8890{
fa233e34
KG
8891 const_tree fndecl, arg, parmlist;
8892 const_tree argtype, parmtype;
8893 const_call_expr_arg_iterator iter;
4977bab6 8894
5f92d109 8895 if (TREE_CODE (t) != CALL_EXPR)
4977bab6
ZW
8896 return END_BUILTINS;
8897
2f503025 8898 fndecl = get_callee_fndecl (t);
3d78e008
ML
8899 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8900 return END_BUILTINS;
4977bab6 8901
feda1845 8902 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
fa233e34 8903 init_const_call_expr_arg_iterator (t, &iter);
feda1845 8904 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
c0a47a61 8905 {
feda1845
RS
8906 /* If a function doesn't take a variable number of arguments,
8907 the last element in the list will have type `void'. */
8908 parmtype = TREE_VALUE (parmlist);
8909 if (VOID_TYPE_P (parmtype))
8910 {
fa233e34 8911 if (more_const_call_expr_args_p (&iter))
feda1845
RS
8912 return END_BUILTINS;
8913 return DECL_FUNCTION_CODE (fndecl);
8914 }
8915
fa233e34 8916 if (! more_const_call_expr_args_p (&iter))
c0a47a61 8917 return END_BUILTINS;
b8698a0f 8918
fa233e34 8919 arg = next_const_call_expr_arg (&iter);
5039610b 8920 argtype = TREE_TYPE (arg);
feda1845
RS
8921
8922 if (SCALAR_FLOAT_TYPE_P (parmtype))
8923 {
8924 if (! SCALAR_FLOAT_TYPE_P (argtype))
8925 return END_BUILTINS;
8926 }
8927 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8928 {
8929 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8930 return END_BUILTINS;
8931 }
8932 else if (POINTER_TYPE_P (parmtype))
8933 {
8934 if (! POINTER_TYPE_P (argtype))
8935 return END_BUILTINS;
8936 }
8937 else if (INTEGRAL_TYPE_P (parmtype))
8938 {
8939 if (! INTEGRAL_TYPE_P (argtype))
8940 return END_BUILTINS;
8941 }
8942 else
c0a47a61 8943 return END_BUILTINS;
c0a47a61
RS
8944 }
8945
feda1845 8946 /* Variable-length argument list. */
4977bab6
ZW
8947 return DECL_FUNCTION_CODE (fndecl);
8948}
8949
5039610b
SL
8950/* Fold a call to __builtin_constant_p, if we know its argument ARG will
8951 evaluate to a constant. */
b0b3afb2
BS
8952
8953static tree
5039610b 8954fold_builtin_constant_p (tree arg)
b0b3afb2 8955{
b0b3afb2
BS
8956 /* We return 1 for a numeric type that's known to be a constant
8957 value at compile-time or for an aggregate type that's a
8958 literal constant. */
5039610b 8959 STRIP_NOPS (arg);
b0b3afb2
BS
8960
8961 /* If we know this is a constant, emit the constant of one. */
5039610b
SL
8962 if (CONSTANT_CLASS_P (arg)
8963 || (TREE_CODE (arg) == CONSTRUCTOR
8964 && TREE_CONSTANT (arg)))
b0b3afb2 8965 return integer_one_node;
5039610b 8966 if (TREE_CODE (arg) == ADDR_EXPR)
fb664a2c 8967 {
5039610b 8968 tree op = TREE_OPERAND (arg, 0);
fb664a2c
RG
8969 if (TREE_CODE (op) == STRING_CST
8970 || (TREE_CODE (op) == ARRAY_REF
8971 && integer_zerop (TREE_OPERAND (op, 1))
8972 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8973 return integer_one_node;
8974 }
b0b3afb2 8975
0dcd3840
RH
8976 /* If this expression has side effects, show we don't know it to be a
8977 constant. Likewise if it's a pointer or aggregate type since in
8978 those case we only want literals, since those are only optimized
13104975
ZW
8979 when generating RTL, not later.
8980 And finally, if we are compiling an initializer, not code, we
8981 need to return a definite result now; there's not going to be any
8982 more optimization done. */
5039610b
SL
8983 if (TREE_SIDE_EFFECTS (arg)
8984 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8985 || POINTER_TYPE_P (TREE_TYPE (arg))
63b48197 8986 || cfun == 0
4e7d7b3d
JJ
8987 || folding_initializer
8988 || force_folding_builtin_constant_p)
b0b3afb2
BS
8989 return integer_zero_node;
8990
5039610b 8991 return NULL_TREE;
b0b3afb2
BS
8992}
8993
1e9168b2
ML
8994/* Create builtin_expect or builtin_expect_with_probability
8995 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8996 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8997 builtin_expect_with_probability instead uses third argument as PROBABILITY
8998 value. */
6de9cd9a
DN
8999
9000static tree
ed9c79e1 9001build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
1e9168b2 9002 tree predictor, tree probability)
6de9cd9a 9003{
419ce103 9004 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6de9cd9a 9005
1e9168b2
ML
9006 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9007 : BUILT_IN_EXPECT_WITH_PROBABILITY);
419ce103
AN
9008 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9009 ret_type = TREE_TYPE (TREE_TYPE (fn));
9010 pred_type = TREE_VALUE (arg_types);
9011 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9012
db3927fb
AH
9013 pred = fold_convert_loc (loc, pred_type, pred);
9014 expected = fold_convert_loc (loc, expected_type, expected);
1e9168b2
ML
9015
9016 if (probability)
9017 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9018 else
9019 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9020 predictor);
419ce103
AN
9021
9022 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9023 build_int_cst (ret_type, 0));
9024}
9025
1e9168b2 9026/* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
419ce103
AN
9027 NULL_TREE if no simplification is possible. */
9028
ed9c79e1 9029tree
1e9168b2
ML
9030fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9031 tree arg3)
419ce103 9032{
be31603a 9033 tree inner, fndecl, inner_arg0;
419ce103
AN
9034 enum tree_code code;
9035
be31603a
KT
9036 /* Distribute the expected value over short-circuiting operators.
9037 See through the cast from truthvalue_type_node to long. */
9038 inner_arg0 = arg0;
625a9766 9039 while (CONVERT_EXPR_P (inner_arg0)
be31603a
KT
9040 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9041 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9042 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9043
419ce103
AN
9044 /* If this is a builtin_expect within a builtin_expect keep the
9045 inner one. See through a comparison against a constant. It
9046 might have been added to create a thruthvalue. */
be31603a
KT
9047 inner = inner_arg0;
9048
419ce103
AN
9049 if (COMPARISON_CLASS_P (inner)
9050 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9051 inner = TREE_OPERAND (inner, 0);
9052
9053 if (TREE_CODE (inner) == CALL_EXPR
9054 && (fndecl = get_callee_fndecl (inner))
3d78e008
ML
9055 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
9056 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
419ce103
AN
9057 return arg0;
9058
be31603a 9059 inner = inner_arg0;
419ce103
AN
9060 code = TREE_CODE (inner);
9061 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9062 {
9063 tree op0 = TREE_OPERAND (inner, 0);
9064 tree op1 = TREE_OPERAND (inner, 1);
0d2f7959 9065 arg1 = save_expr (arg1);
419ce103 9066
1e9168b2
ML
9067 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9068 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
419ce103
AN
9069 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9070
db3927fb 9071 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
419ce103
AN
9072 }
9073
9074 /* If the argument isn't invariant then there's nothing else we can do. */
be31603a 9075 if (!TREE_CONSTANT (inner_arg0))
5039610b 9076 return NULL_TREE;
6de9cd9a 9077
419ce103
AN
9078 /* If we expect that a comparison against the argument will fold to
9079 a constant return the constant. In practice, this means a true
9080 constant or the address of a non-weak symbol. */
be31603a 9081 inner = inner_arg0;
6de9cd9a
DN
9082 STRIP_NOPS (inner);
9083 if (TREE_CODE (inner) == ADDR_EXPR)
9084 {
9085 do
9086 {
9087 inner = TREE_OPERAND (inner, 0);
9088 }
9089 while (TREE_CODE (inner) == COMPONENT_REF
9090 || TREE_CODE (inner) == ARRAY_REF);
8813a647 9091 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
5039610b 9092 return NULL_TREE;
6de9cd9a
DN
9093 }
9094
419ce103
AN
9095 /* Otherwise, ARG0 already has the proper type for the return value. */
9096 return arg0;
6de9cd9a
DN
9097}
9098
5039610b 9099/* Fold a call to __builtin_classify_type with argument ARG. */
5197bd50 9100
ad82abb8 9101static tree
5039610b 9102fold_builtin_classify_type (tree arg)
ad82abb8 9103{
5039610b 9104 if (arg == 0)
45a2c477 9105 return build_int_cst (integer_type_node, no_type_class);
ad82abb8 9106
45a2c477 9107 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
ad82abb8
ZW
9108}
9109
5039610b 9110/* Fold a call to __builtin_strlen with argument ARG. */
667bbbbb
EC
9111
9112static tree
ab996409 9113fold_builtin_strlen (location_t loc, tree type, tree arg)
667bbbbb 9114{
5039610b 9115 if (!validate_arg (arg, POINTER_TYPE))
667bbbbb
EC
9116 return NULL_TREE;
9117 else
9118 {
e09aa5bd
MS
9119 c_strlen_data lendata = { };
9120 tree len = c_strlen (arg, 0, &lendata);
667bbbbb
EC
9121
9122 if (len)
ab996409 9123 return fold_convert_loc (loc, type, len);
667bbbbb 9124
e09aa5bd
MS
9125 if (!lendata.decl)
9126 c_strlen (arg, 1, &lendata);
6ab24ea8 9127
e09aa5bd 9128 if (lendata.decl)
6ab24ea8
MS
9129 {
9130 if (EXPR_HAS_LOCATION (arg))
9131 loc = EXPR_LOCATION (arg);
9132 else if (loc == UNKNOWN_LOCATION)
9133 loc = input_location;
e09aa5bd 9134 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
6ab24ea8
MS
9135 }
9136
667bbbbb
EC
9137 return NULL_TREE;
9138 }
9139}
9140
ab5e2615
RH
9141/* Fold a call to __builtin_inf or __builtin_huge_val. */
9142
9143static tree
db3927fb 9144fold_builtin_inf (location_t loc, tree type, int warn)
ab5e2615 9145{
efdc7e19
RH
9146 REAL_VALUE_TYPE real;
9147
6d84156b
JM
9148 /* __builtin_inff is intended to be usable to define INFINITY on all
9149 targets. If an infinity is not available, INFINITY expands "to a
9150 positive constant of type float that overflows at translation
9151 time", footnote "In this case, using INFINITY will violate the
9152 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9153 Thus we pedwarn to ensure this constraint violation is
9154 diagnosed. */
ab5e2615 9155 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
db3927fb 9156 pedwarn (loc, 0, "target format does not support infinity");
ab5e2615 9157
efdc7e19
RH
9158 real_inf (&real);
9159 return build_real (type, real);
ab5e2615
RH
9160}
9161
75c7c595
RG
9162/* Fold function call to builtin sincos, sincosf, or sincosl. Return
9163 NULL_TREE if no simplification can be made. */
9164
9165static tree
db3927fb
AH
9166fold_builtin_sincos (location_t loc,
9167 tree arg0, tree arg1, tree arg2)
75c7c595 9168{
5039610b 9169 tree type;
5c1a2e63 9170 tree fndecl, call = NULL_TREE;
75c7c595 9171
5039610b
SL
9172 if (!validate_arg (arg0, REAL_TYPE)
9173 || !validate_arg (arg1, POINTER_TYPE)
9174 || !validate_arg (arg2, POINTER_TYPE))
75c7c595
RG
9175 return NULL_TREE;
9176
75c7c595 9177 type = TREE_TYPE (arg0);
75c7c595
RG
9178
9179 /* Calculate the result when the argument is a constant. */
b03ff92e 9180 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
5c1a2e63 9181 if (fn == END_BUILTINS)
75c7c595
RG
9182 return NULL_TREE;
9183
5c1a2e63
RS
9184 /* Canonicalize sincos to cexpi. */
9185 if (TREE_CODE (arg0) == REAL_CST)
9186 {
9187 tree complex_type = build_complex_type (type);
d7ebef06 9188 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
5c1a2e63
RS
9189 }
9190 if (!call)
9191 {
9192 if (!targetm.libc_has_function (function_c99_math_complex)
9193 || !builtin_decl_implicit_p (fn))
9194 return NULL_TREE;
9195 fndecl = builtin_decl_explicit (fn);
9196 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9197 call = builtin_save_expr (call);
9198 }
75c7c595 9199
1b17b994
RB
9200 tree ptype = build_pointer_type (type);
9201 arg1 = fold_convert (ptype, arg1);
9202 arg2 = fold_convert (ptype, arg2);
928c19bb 9203 return build2 (COMPOUND_EXPR, void_type_node,
75c7c595 9204 build2 (MODIFY_EXPR, void_type_node,
db3927fb 9205 build_fold_indirect_ref_loc (loc, arg1),
5c1a2e63 9206 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
75c7c595 9207 build2 (MODIFY_EXPR, void_type_node,
db3927fb 9208 build_fold_indirect_ref_loc (loc, arg2),
5c1a2e63 9209 fold_build1_loc (loc, REALPART_EXPR, type, call)));
75c7c595
RG
9210}
9211
5039610b
SL
9212/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9213 Return NULL_TREE if no simplification can be made. */
5bb650ec
RS
9214
9215static tree
db3927fb 9216fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
5bb650ec 9217{
5039610b
SL
9218 if (!validate_arg (arg1, POINTER_TYPE)
9219 || !validate_arg (arg2, POINTER_TYPE)
9220 || !validate_arg (len, INTEGER_TYPE))
9221 return NULL_TREE;
5bb650ec
RS
9222
9223 /* If the LEN parameter is zero, return zero. */
9224 if (integer_zerop (len))
db3927fb 9225 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
01847e9d 9226 arg1, arg2);
5bb650ec
RS
9227
9228 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9229 if (operand_equal_p (arg1, arg2, 0))
db3927fb 9230 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
01847e9d 9231
01847e9d
RS
9232 /* If len parameter is one, return an expression corresponding to
9233 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
ae7e9ddd 9234 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
01847e9d
RS
9235 {
9236 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
9237 tree cst_uchar_ptr_node
9238 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9239
db3927fb
AH
9240 tree ind1
9241 = fold_convert_loc (loc, integer_type_node,
9242 build1 (INDIRECT_REF, cst_uchar_node,
9243 fold_convert_loc (loc,
9244 cst_uchar_ptr_node,
01847e9d 9245 arg1)));
db3927fb
AH
9246 tree ind2
9247 = fold_convert_loc (loc, integer_type_node,
9248 build1 (INDIRECT_REF, cst_uchar_node,
9249 fold_convert_loc (loc,
9250 cst_uchar_ptr_node,
01847e9d 9251 arg2)));
db3927fb 9252 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
01847e9d 9253 }
5bb650ec 9254
5039610b 9255 return NULL_TREE;
5bb650ec
RS
9256}
9257
5039610b 9258/* Fold a call to builtin isascii with argument ARG. */
df0785d6
KG
9259
9260static tree
db3927fb 9261fold_builtin_isascii (location_t loc, tree arg)
df0785d6 9262{
5039610b
SL
9263 if (!validate_arg (arg, INTEGER_TYPE))
9264 return NULL_TREE;
df0785d6
KG
9265 else
9266 {
9267 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
6728ee79 9268 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
45a2c477 9269 build_int_cst (integer_type_node,
6728ee79 9270 ~ (unsigned HOST_WIDE_INT) 0x7f));
db3927fb 9271 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
45a2c477 9272 arg, integer_zero_node);
df0785d6
KG
9273 }
9274}
9275
5039610b 9276/* Fold a call to builtin toascii with argument ARG. */
df0785d6
KG
9277
9278static tree
db3927fb 9279fold_builtin_toascii (location_t loc, tree arg)
df0785d6 9280{
5039610b
SL
9281 if (!validate_arg (arg, INTEGER_TYPE))
9282 return NULL_TREE;
b8698a0f 9283
5039610b 9284 /* Transform toascii(c) -> (c & 0x7f). */
db3927fb 9285 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
45a2c477 9286 build_int_cst (integer_type_node, 0x7f));
df0785d6
KG
9287}
9288
5039610b 9289/* Fold a call to builtin isdigit with argument ARG. */
61218d19
KG
9290
9291static tree
db3927fb 9292fold_builtin_isdigit (location_t loc, tree arg)
61218d19 9293{
5039610b
SL
9294 if (!validate_arg (arg, INTEGER_TYPE))
9295 return NULL_TREE;
61218d19
KG
9296 else
9297 {
9298 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
c5ff069d
ZW
9299 /* According to the C standard, isdigit is unaffected by locale.
9300 However, it definitely is affected by the target character set. */
c5ff069d
ZW
9301 unsigned HOST_WIDE_INT target_digit0
9302 = lang_hooks.to_target_charset ('0');
9303
9304 if (target_digit0 == 0)
9305 return NULL_TREE;
9306
db3927fb 9307 arg = fold_convert_loc (loc, unsigned_type_node, arg);
6728ee79
MM
9308 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9309 build_int_cst (unsigned_type_node, target_digit0));
db3927fb 9310 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
5cdc4a26 9311 build_int_cst (unsigned_type_node, 9));
61218d19
KG
9312 }
9313}
ef79730c 9314
5039610b 9315/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9655d83b
RS
9316
9317static tree
db3927fb 9318fold_builtin_fabs (location_t loc, tree arg, tree type)
9655d83b 9319{
5039610b
SL
9320 if (!validate_arg (arg, REAL_TYPE))
9321 return NULL_TREE;
9655d83b 9322
db3927fb 9323 arg = fold_convert_loc (loc, type, arg);
db3927fb 9324 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
9325}
9326
5039610b 9327/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9655d83b
RS
9328
9329static tree
db3927fb 9330fold_builtin_abs (location_t loc, tree arg, tree type)
9655d83b 9331{
5039610b
SL
9332 if (!validate_arg (arg, INTEGER_TYPE))
9333 return NULL_TREE;
9655d83b 9334
db3927fb 9335 arg = fold_convert_loc (loc, type, arg);
db3927fb 9336 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
9337}
9338
527cab20
KG
9339/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9340
9341static tree
db3927fb 9342fold_builtin_carg (location_t loc, tree arg, tree type)
527cab20 9343{
c128599a
KG
9344 if (validate_arg (arg, COMPLEX_TYPE)
9345 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
527cab20
KG
9346 {
9347 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
b8698a0f 9348
527cab20
KG
9349 if (atan2_fn)
9350 {
5039610b 9351 tree new_arg = builtin_save_expr (arg);
db3927fb
AH
9352 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9353 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9354 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
527cab20
KG
9355 }
9356 }
b8698a0f 9357
527cab20
KG
9358 return NULL_TREE;
9359}
9360
7a2a25ab
KG
9361/* Fold a call to builtin frexp, we can assume the base is 2. */
9362
9363static tree
db3927fb 9364fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7a2a25ab
KG
9365{
9366 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9367 return NULL_TREE;
b8698a0f 9368
7a2a25ab 9369 STRIP_NOPS (arg0);
b8698a0f 9370
7a2a25ab
KG
9371 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9372 return NULL_TREE;
b8698a0f 9373
db3927fb 9374 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7a2a25ab
KG
9375
9376 /* Proceed if a valid pointer type was passed in. */
9377 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9378 {
9379 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9380 tree frac, exp;
b8698a0f 9381
7a2a25ab
KG
9382 switch (value->cl)
9383 {
9384 case rvc_zero:
9385 /* For +-0, return (*exp = 0, +-0). */
9386 exp = integer_zero_node;
9387 frac = arg0;
9388 break;
9389 case rvc_nan:
9390 case rvc_inf:
9391 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
db3927fb 9392 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7a2a25ab
KG
9393 case rvc_normal:
9394 {
9395 /* Since the frexp function always expects base 2, and in
9396 GCC normalized significands are already in the range
9397 [0.5, 1.0), we have exactly what frexp wants. */
9398 REAL_VALUE_TYPE frac_rvt = *value;
9399 SET_REAL_EXP (&frac_rvt, 0);
9400 frac = build_real (rettype, frac_rvt);
45a2c477 9401 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7a2a25ab
KG
9402 }
9403 break;
9404 default:
9405 gcc_unreachable ();
9406 }
b8698a0f 9407
7a2a25ab 9408 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 9409 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7a2a25ab 9410 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 9411 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7a2a25ab
KG
9412 }
9413
9414 return NULL_TREE;
9415}
9416
3d577eaf
KG
9417/* Fold a call to builtin modf. */
9418
9419static tree
db3927fb 9420fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
3d577eaf
KG
9421{
9422 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9423 return NULL_TREE;
b8698a0f 9424
3d577eaf 9425 STRIP_NOPS (arg0);
b8698a0f 9426
3d577eaf
KG
9427 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9428 return NULL_TREE;
b8698a0f 9429
db3927fb 9430 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3d577eaf
KG
9431
9432 /* Proceed if a valid pointer type was passed in. */
9433 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9434 {
9435 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9436 REAL_VALUE_TYPE trunc, frac;
9437
9438 switch (value->cl)
9439 {
9440 case rvc_nan:
9441 case rvc_zero:
9442 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9443 trunc = frac = *value;
9444 break;
9445 case rvc_inf:
9446 /* For +-Inf, return (*arg1 = arg0, +-0). */
9447 frac = dconst0;
9448 frac.sign = value->sign;
9449 trunc = *value;
9450 break;
9451 case rvc_normal:
9452 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9453 real_trunc (&trunc, VOIDmode, value);
9454 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9455 /* If the original number was negative and already
9456 integral, then the fractional part is -0.0. */
9457 if (value->sign && frac.cl == rvc_zero)
9458 frac.sign = value->sign;
9459 break;
9460 }
b8698a0f 9461
3d577eaf 9462 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 9463 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
3d577eaf
KG
9464 build_real (rettype, trunc));
9465 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 9466 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
3d577eaf
KG
9467 build_real (rettype, frac));
9468 }
b8698a0f 9469
3d577eaf
KG
9470 return NULL_TREE;
9471}
9472
903c723b
TC
9473/* Given a location LOC, an interclass builtin function decl FNDECL
9474 and its single argument ARG, return an folded expression computing
9475 the same, or NULL_TREE if we either couldn't or didn't want to fold
9476 (the latter happen if there's an RTL instruction available). */
9477
9478static tree
9479fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9480{
9481 machine_mode mode;
9482
9483 if (!validate_arg (arg, REAL_TYPE))
9484 return NULL_TREE;
9485
9486 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9487 return NULL_TREE;
9488
9489 mode = TYPE_MODE (TREE_TYPE (arg));
9490
9491 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
02cf2861 9492
903c723b
TC
9493 /* If there is no optab, try generic code. */
9494 switch (DECL_FUNCTION_CODE (fndecl))
9495 {
9496 tree result;
44e10129 9497
903c723b
TC
9498 CASE_FLT_FN (BUILT_IN_ISINF):
9499 {
9500 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9501 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9502 tree type = TREE_TYPE (arg);
9503 REAL_VALUE_TYPE r;
9504 char buf[128];
9505
9506 if (is_ibm_extended)
9507 {
9508 /* NaN and Inf are encoded in the high-order double value
9509 only. The low-order value is not significant. */
9510 type = double_type_node;
9511 mode = DFmode;
9512 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9513 }
00be2a5f 9514 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
9515 real_from_string (&r, buf);
9516 result = build_call_expr (isgr_fn, 2,
9517 fold_build1_loc (loc, ABS_EXPR, type, arg),
9518 build_real (type, r));
9519 return result;
9520 }
9521 CASE_FLT_FN (BUILT_IN_FINITE):
9522 case BUILT_IN_ISFINITE:
9523 {
9524 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9525 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9526 tree type = TREE_TYPE (arg);
9527 REAL_VALUE_TYPE r;
9528 char buf[128];
9529
9530 if (is_ibm_extended)
9531 {
9532 /* NaN and Inf are encoded in the high-order double value
9533 only. The low-order value is not significant. */
9534 type = double_type_node;
9535 mode = DFmode;
9536 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9537 }
00be2a5f 9538 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
9539 real_from_string (&r, buf);
9540 result = build_call_expr (isle_fn, 2,
9541 fold_build1_loc (loc, ABS_EXPR, type, arg),
9542 build_real (type, r));
9543 /*result = fold_build2_loc (loc, UNGT_EXPR,
9544 TREE_TYPE (TREE_TYPE (fndecl)),
9545 fold_build1_loc (loc, ABS_EXPR, type, arg),
9546 build_real (type, r));
9547 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9548 TREE_TYPE (TREE_TYPE (fndecl)),
9549 result);*/
9550 return result;
9551 }
9552 case BUILT_IN_ISNORMAL:
9553 {
9554 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9555 islessequal(fabs(x),DBL_MAX). */
9556 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9557 tree type = TREE_TYPE (arg);
9558 tree orig_arg, max_exp, min_exp;
9559 machine_mode orig_mode = mode;
9560 REAL_VALUE_TYPE rmax, rmin;
9561 char buf[128];
9562
9563 orig_arg = arg = builtin_save_expr (arg);
9564 if (is_ibm_extended)
9565 {
9566 /* Use double to test the normal range of IBM extended
9567 precision. Emin for IBM extended precision is
9568 different to emin for IEEE double, being 53 higher
9569 since the low double exponent is at least 53 lower
9570 than the high double exponent. */
9571 type = double_type_node;
9572 mode = DFmode;
9573 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9574 }
9575 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9576
00be2a5f 9577 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
9578 real_from_string (&rmax, buf);
9579 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9580 real_from_string (&rmin, buf);
9581 max_exp = build_real (type, rmax);
9582 min_exp = build_real (type, rmin);
9583
9584 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9585 if (is_ibm_extended)
9586 {
9587 /* Testing the high end of the range is done just using
9588 the high double, using the same test as isfinite().
9589 For the subnormal end of the range we first test the
9590 high double, then if its magnitude is equal to the
9591 limit of 0x1p-969, we test whether the low double is
9592 non-zero and opposite sign to the high double. */
9593 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9594 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9595 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9596 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9597 arg, min_exp);
9598 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9599 complex_double_type_node, orig_arg);
9600 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9601 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9602 tree zero = build_real (type, dconst0);
9603 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9604 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9605 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9606 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9607 fold_build3 (COND_EXPR,
9608 integer_type_node,
9609 hilt, logt, lolt));
9610 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9611 eq_min, ok_lo);
9612 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9613 gt_min, eq_min);
9614 }
9615 else
9616 {
9617 tree const isge_fn
9618 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9619 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9620 }
9621 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9622 max_exp, min_exp);
9623 return result;
9624 }
9625 default:
9626 break;
9627 }
9628
9629 return NULL_TREE;
9630}
9631
9632/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
5039610b 9633 ARG is the argument for the call. */
64a9295a
PB
9634
9635static tree
903c723b 9636fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
64a9295a 9637{
903c723b
TC
9638 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9639
5039610b 9640 if (!validate_arg (arg, REAL_TYPE))
83322951 9641 return NULL_TREE;
64a9295a 9642
64a9295a
PB
9643 switch (builtin_index)
9644 {
903c723b
TC
9645 case BUILT_IN_ISINF:
9646 if (!HONOR_INFINITIES (arg))
9647 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9648
9649 return NULL_TREE;
9650
05f41289
KG
9651 case BUILT_IN_ISINF_SIGN:
9652 {
9653 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9654 /* In a boolean context, GCC will fold the inner COND_EXPR to
9655 1. So e.g. "if (isinf_sign(x))" would be folded to just
9656 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
72f52f30 9657 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
e79983f4 9658 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
05f41289
KG
9659 tree tmp = NULL_TREE;
9660
9661 arg = builtin_save_expr (arg);
9662
9663 if (signbit_fn && isinf_fn)
9664 {
db3927fb
AH
9665 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9666 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
05f41289 9667
db3927fb 9668 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 9669 signbit_call, integer_zero_node);
db3927fb 9670 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 9671 isinf_call, integer_zero_node);
b8698a0f 9672
db3927fb 9673 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
05f41289 9674 integer_minus_one_node, integer_one_node);
db3927fb
AH
9675 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9676 isinf_call, tmp,
05f41289
KG
9677 integer_zero_node);
9678 }
9679
9680 return tmp;
9681 }
9682
903c723b
TC
9683 case BUILT_IN_ISFINITE:
9684 if (!HONOR_NANS (arg)
9685 && !HONOR_INFINITIES (arg))
9686 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9687
9688 return NULL_TREE;
9689
9690 case BUILT_IN_ISNAN:
9691 if (!HONOR_NANS (arg))
9692 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9693
9694 {
9695 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9696 if (is_ibm_extended)
9697 {
9698 /* NaN and Inf are encoded in the high-order double value
9699 only. The low-order value is not significant. */
9700 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9701 }
9702 }
9703 arg = builtin_save_expr (arg);
9704 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9705
64a9295a 9706 default:
298e6adc 9707 gcc_unreachable ();
64a9295a
PB
9708 }
9709}
9710
903c723b
TC
9711/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9712 This builtin will generate code to return the appropriate floating
9713 point classification depending on the value of the floating point
9714 number passed in. The possible return values must be supplied as
9715 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9716 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9717 one floating point argument which is "type generic". */
9718
9719static tree
9720fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9721{
9722 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9723 arg, type, res, tmp;
9724 machine_mode mode;
9725 REAL_VALUE_TYPE r;
9726 char buf[128];
9727
9728 /* Verify the required arguments in the original call. */
9729 if (nargs != 6
9730 || !validate_arg (args[0], INTEGER_TYPE)
9731 || !validate_arg (args[1], INTEGER_TYPE)
9732 || !validate_arg (args[2], INTEGER_TYPE)
9733 || !validate_arg (args[3], INTEGER_TYPE)
9734 || !validate_arg (args[4], INTEGER_TYPE)
9735 || !validate_arg (args[5], REAL_TYPE))
9736 return NULL_TREE;
9737
9738 fp_nan = args[0];
9739 fp_infinite = args[1];
9740 fp_normal = args[2];
9741 fp_subnormal = args[3];
9742 fp_zero = args[4];
9743 arg = args[5];
9744 type = TREE_TYPE (arg);
9745 mode = TYPE_MODE (type);
9746 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9747
9748 /* fpclassify(x) ->
9749 isnan(x) ? FP_NAN :
9750 (fabs(x) == Inf ? FP_INFINITE :
9751 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9752 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9753
9754 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9755 build_real (type, dconst0));
9756 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9757 tmp, fp_zero, fp_subnormal);
9758
9759 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9760 real_from_string (&r, buf);
9761 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9762 arg, build_real (type, r));
9763 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9764
9765 if (HONOR_INFINITIES (mode))
9766 {
9767 real_inf (&r);
9768 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9769 build_real (type, r));
9770 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9771 fp_infinite, res);
9772 }
9773
9774 if (HONOR_NANS (mode))
9775 {
9776 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9777 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9778 }
9779
9780 return res;
9781}
9782
08039bd8 9783/* Fold a call to an unordered comparison function such as
a35da91f 9784 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
5039610b 9785 being called and ARG0 and ARG1 are the arguments for the call.
64a9295a
PB
9786 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9787 the opposite of the desired result. UNORDERED_CODE is used
9788 for modes that can hold NaNs and ORDERED_CODE is used for
9789 the rest. */
08039bd8
RS
9790
9791static tree
db3927fb 9792fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
08039bd8
RS
9793 enum tree_code unordered_code,
9794 enum tree_code ordered_code)
9795{
14f661f1 9796 tree type = TREE_TYPE (TREE_TYPE (fndecl));
08039bd8 9797 enum tree_code code;
1aeaea8d
GK
9798 tree type0, type1;
9799 enum tree_code code0, code1;
9800 tree cmp_type = NULL_TREE;
08039bd8 9801
1aeaea8d
GK
9802 type0 = TREE_TYPE (arg0);
9803 type1 = TREE_TYPE (arg1);
c22cacf3 9804
1aeaea8d
GK
9805 code0 = TREE_CODE (type0);
9806 code1 = TREE_CODE (type1);
c22cacf3 9807
1aeaea8d
GK
9808 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9809 /* Choose the wider of two real types. */
9810 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9811 ? type0 : type1;
9812 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9813 cmp_type = type0;
9814 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9815 cmp_type = type1;
c22cacf3 9816
db3927fb
AH
9817 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9818 arg1 = fold_convert_loc (loc, cmp_type, arg1);
14f661f1
RS
9819
9820 if (unordered_code == UNORDERED_EXPR)
9821 {
1b457aa4 9822 if (!HONOR_NANS (arg0))
db3927fb
AH
9823 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9824 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
14f661f1 9825 }
08039bd8 9826
1b457aa4 9827 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
db3927fb
AH
9828 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9829 fold_build2_loc (loc, code, type, arg0, arg1));
08039bd8
RS
9830}
9831
1304953e
JJ
9832/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9833 arithmetics if it can never overflow, or into internal functions that
9834 return both result of arithmetics and overflowed boolean flag in
44a845ca
MS
9835 a complex integer result, or some other check for overflow.
9836 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9837 checking part of that. */
1304953e
JJ
9838
9839static tree
9840fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9841 tree arg0, tree arg1, tree arg2)
9842{
9843 enum internal_fn ifn = IFN_LAST;
43574e4f 9844 /* The code of the expression corresponding to the built-in. */
44a845ca
MS
9845 enum tree_code opcode = ERROR_MARK;
9846 bool ovf_only = false;
9847
1304953e
JJ
9848 switch (fcode)
9849 {
44a845ca
MS
9850 case BUILT_IN_ADD_OVERFLOW_P:
9851 ovf_only = true;
9852 /* FALLTHRU */
1304953e
JJ
9853 case BUILT_IN_ADD_OVERFLOW:
9854 case BUILT_IN_SADD_OVERFLOW:
9855 case BUILT_IN_SADDL_OVERFLOW:
9856 case BUILT_IN_SADDLL_OVERFLOW:
9857 case BUILT_IN_UADD_OVERFLOW:
9858 case BUILT_IN_UADDL_OVERFLOW:
9859 case BUILT_IN_UADDLL_OVERFLOW:
43574e4f 9860 opcode = PLUS_EXPR;
1304953e
JJ
9861 ifn = IFN_ADD_OVERFLOW;
9862 break;
44a845ca
MS
9863 case BUILT_IN_SUB_OVERFLOW_P:
9864 ovf_only = true;
9865 /* FALLTHRU */
1304953e
JJ
9866 case BUILT_IN_SUB_OVERFLOW:
9867 case BUILT_IN_SSUB_OVERFLOW:
9868 case BUILT_IN_SSUBL_OVERFLOW:
9869 case BUILT_IN_SSUBLL_OVERFLOW:
9870 case BUILT_IN_USUB_OVERFLOW:
9871 case BUILT_IN_USUBL_OVERFLOW:
9872 case BUILT_IN_USUBLL_OVERFLOW:
43574e4f 9873 opcode = MINUS_EXPR;
1304953e
JJ
9874 ifn = IFN_SUB_OVERFLOW;
9875 break;
44a845ca
MS
9876 case BUILT_IN_MUL_OVERFLOW_P:
9877 ovf_only = true;
9878 /* FALLTHRU */
1304953e
JJ
9879 case BUILT_IN_MUL_OVERFLOW:
9880 case BUILT_IN_SMUL_OVERFLOW:
9881 case BUILT_IN_SMULL_OVERFLOW:
9882 case BUILT_IN_SMULLL_OVERFLOW:
9883 case BUILT_IN_UMUL_OVERFLOW:
9884 case BUILT_IN_UMULL_OVERFLOW:
9885 case BUILT_IN_UMULLL_OVERFLOW:
43574e4f 9886 opcode = MULT_EXPR;
1304953e
JJ
9887 ifn = IFN_MUL_OVERFLOW;
9888 break;
9889 default:
9890 gcc_unreachable ();
9891 }
44a845ca
MS
9892
9893 /* For the "generic" overloads, the first two arguments can have different
9894 types and the last argument determines the target type to use to check
9895 for overflow. The arguments of the other overloads all have the same
9896 type. */
9897 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9898
9899 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9900 arguments are constant, attempt to fold the built-in call into a constant
9901 expression indicating whether or not it detected an overflow. */
9902 if (ovf_only
9903 && TREE_CODE (arg0) == INTEGER_CST
9904 && TREE_CODE (arg1) == INTEGER_CST)
9905 /* Perform the computation in the target type and check for overflow. */
9906 return omit_one_operand_loc (loc, boolean_type_node,
9907 arith_overflowed_p (opcode, type, arg0, arg1)
9908 ? boolean_true_node : boolean_false_node,
9909 arg2);
9910
43574e4f
JJ
9911 tree intres, ovfres;
9912 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9913 {
9914 intres = fold_binary_loc (loc, opcode, type,
9915 fold_convert_loc (loc, type, arg0),
9916 fold_convert_loc (loc, type, arg1));
9917 if (TREE_OVERFLOW (intres))
9918 intres = drop_tree_overflow (intres);
9919 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9920 ? boolean_true_node : boolean_false_node);
9921 }
9922 else
9923 {
9924 tree ctype = build_complex_type (type);
9925 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9926 arg0, arg1);
9927 tree tgt = save_expr (call);
9928 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9929 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9930 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9931 }
44a845ca
MS
9932
9933 if (ovf_only)
9934 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9935
9936 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
1304953e
JJ
9937 tree store
9938 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9939 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9940}
9941
b25aad5f
MS
9942/* Fold a call to __builtin_FILE to a constant string. */
9943
9944static inline tree
9945fold_builtin_FILE (location_t loc)
9946{
9947 if (const char *fname = LOCATION_FILE (loc))
7365279f
BK
9948 {
9949 /* The documentation says this builtin is equivalent to the preprocessor
9950 __FILE__ macro so it appears appropriate to use the same file prefix
9951 mappings. */
9952 fname = remap_macro_filename (fname);
b25aad5f 9953 return build_string_literal (strlen (fname) + 1, fname);
7365279f 9954 }
b25aad5f
MS
9955
9956 return build_string_literal (1, "");
9957}
9958
9959/* Fold a call to __builtin_FUNCTION to a constant string. */
9960
9961static inline tree
9962fold_builtin_FUNCTION ()
9963{
f76b4224
NS
9964 const char *name = "";
9965
b25aad5f 9966 if (current_function_decl)
f76b4224 9967 name = lang_hooks.decl_printable_name (current_function_decl, 0);
b25aad5f 9968
f76b4224 9969 return build_string_literal (strlen (name) + 1, name);
b25aad5f
MS
9970}
9971
9972/* Fold a call to __builtin_LINE to an integer constant. */
9973
9974static inline tree
9975fold_builtin_LINE (location_t loc, tree type)
9976{
9977 return build_int_cst (type, LOCATION_LINE (loc));
9978}
9979
5039610b 9980/* Fold a call to built-in function FNDECL with 0 arguments.
2625bb5d 9981 This function returns NULL_TREE if no simplification was possible. */
b0b3afb2 9982
6de9cd9a 9983static tree
2625bb5d 9984fold_builtin_0 (location_t loc, tree fndecl)
b0b3afb2 9985{
c0a47a61 9986 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5039610b 9987 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
a0d2281e 9988 switch (fcode)
b0b3afb2 9989 {
b25aad5f
MS
9990 case BUILT_IN_FILE:
9991 return fold_builtin_FILE (loc);
9992
9993 case BUILT_IN_FUNCTION:
9994 return fold_builtin_FUNCTION ();
9995
9996 case BUILT_IN_LINE:
9997 return fold_builtin_LINE (loc, type);
9998
5039610b 9999 CASE_FLT_FN (BUILT_IN_INF):
6dc198e3 10000 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
5039610b
SL
10001 case BUILT_IN_INFD32:
10002 case BUILT_IN_INFD64:
10003 case BUILT_IN_INFD128:
db3927fb 10004 return fold_builtin_inf (loc, type, true);
d3147f64 10005
5039610b 10006 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
6dc198e3 10007 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
db3927fb 10008 return fold_builtin_inf (loc, type, false);
d3147f64 10009
5039610b
SL
10010 case BUILT_IN_CLASSIFY_TYPE:
10011 return fold_builtin_classify_type (NULL_TREE);
d3147f64 10012
5039610b
SL
10013 default:
10014 break;
10015 }
10016 return NULL_TREE;
10017}
d3147f64 10018
5039610b 10019/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
2625bb5d 10020 This function returns NULL_TREE if no simplification was possible. */
d3147f64 10021
5039610b 10022static tree
2625bb5d 10023fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
5039610b
SL
10024{
10025 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10026 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
10027
10028 if (TREE_CODE (arg0) == ERROR_MARK)
10029 return NULL_TREE;
10030
d7ebef06 10031 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
5c1a2e63
RS
10032 return ret;
10033
5039610b
SL
10034 switch (fcode)
10035 {
b0b3afb2 10036 case BUILT_IN_CONSTANT_P:
d3147f64 10037 {
5039610b 10038 tree val = fold_builtin_constant_p (arg0);
d3147f64 10039
d3147f64
EC
10040 /* Gimplification will pull the CALL_EXPR for the builtin out of
10041 an if condition. When not optimizing, we'll not CSE it back.
10042 To avoid link error types of regressions, return false now. */
10043 if (!val && !optimize)
10044 val = integer_zero_node;
10045
10046 return val;
10047 }
b0b3afb2 10048
ad82abb8 10049 case BUILT_IN_CLASSIFY_TYPE:
5039610b 10050 return fold_builtin_classify_type (arg0);
ad82abb8 10051
b0b3afb2 10052 case BUILT_IN_STRLEN:
ab996409 10053 return fold_builtin_strlen (loc, type, arg0);
b0b3afb2 10054
ea6a6627 10055 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 10056 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
10057 case BUILT_IN_FABSD32:
10058 case BUILT_IN_FABSD64:
10059 case BUILT_IN_FABSD128:
db3927fb 10060 return fold_builtin_fabs (loc, arg0, type);
9655d83b
RS
10061
10062 case BUILT_IN_ABS:
10063 case BUILT_IN_LABS:
10064 case BUILT_IN_LLABS:
10065 case BUILT_IN_IMAXABS:
db3927fb 10066 return fold_builtin_abs (loc, arg0, type);
07bae5ad 10067
ea6a6627 10068 CASE_FLT_FN (BUILT_IN_CONJ):
c128599a 10069 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 10070 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 10071 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
5039610b 10072 break;
aa6c7c3a 10073
ea6a6627 10074 CASE_FLT_FN (BUILT_IN_CREAL):
c128599a 10075 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 10076 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
6f3d1a5e 10077 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
5039610b 10078 break;
aa6c7c3a 10079
ea6a6627 10080 CASE_FLT_FN (BUILT_IN_CIMAG):
376da68e
KG
10081 if (validate_arg (arg0, COMPLEX_TYPE)
10082 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 10083 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
5039610b 10084 break;
aa6c7c3a 10085
5c1a2e63
RS
10086 CASE_FLT_FN (BUILT_IN_CARG):
10087 return fold_builtin_carg (loc, arg0, type);
43272bf5 10088
5c1a2e63
RS
10089 case BUILT_IN_ISASCII:
10090 return fold_builtin_isascii (loc, arg0);
b8698a0f 10091
5c1a2e63
RS
10092 case BUILT_IN_TOASCII:
10093 return fold_builtin_toascii (loc, arg0);
b8698a0f 10094
5c1a2e63
RS
10095 case BUILT_IN_ISDIGIT:
10096 return fold_builtin_isdigit (loc, arg0);
b8698a0f 10097
903c723b
TC
10098 CASE_FLT_FN (BUILT_IN_FINITE):
10099 case BUILT_IN_FINITED32:
10100 case BUILT_IN_FINITED64:
10101 case BUILT_IN_FINITED128:
10102 case BUILT_IN_ISFINITE:
10103 {
10104 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10105 if (ret)
10106 return ret;
10107 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10108 }
10109
10110 CASE_FLT_FN (BUILT_IN_ISINF):
10111 case BUILT_IN_ISINFD32:
10112 case BUILT_IN_ISINFD64:
10113 case BUILT_IN_ISINFD128:
10114 {
10115 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10116 if (ret)
10117 return ret;
10118 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10119 }
10120
10121 case BUILT_IN_ISNORMAL:
10122 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10123
5c1a2e63 10124 case BUILT_IN_ISINF_SIGN:
903c723b
TC
10125 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10126
10127 CASE_FLT_FN (BUILT_IN_ISNAN):
10128 case BUILT_IN_ISNAND32:
10129 case BUILT_IN_ISNAND64:
10130 case BUILT_IN_ISNAND128:
10131 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
b8698a0f 10132
5c1a2e63
RS
10133 case BUILT_IN_FREE:
10134 if (integer_zerop (arg0))
10135 return build_empty_stmt (loc);
abcc43f5 10136 break;
07bae5ad 10137
5c1a2e63 10138 default:
4835c978 10139 break;
5c1a2e63 10140 }
4977bab6 10141
5c1a2e63 10142 return NULL_TREE;
e19f6bde 10143
5c1a2e63 10144}
b53fed56 10145
b5338fb3
MS
10146/* Folds a call EXPR (which may be null) to built-in function FNDECL
10147 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10148 if no simplification was possible. */
5039610b
SL
10149
10150static tree
b5338fb3 10151fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
5039610b
SL
10152{
10153 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10154 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10155
5c1a2e63
RS
10156 if (TREE_CODE (arg0) == ERROR_MARK
10157 || TREE_CODE (arg1) == ERROR_MARK)
10158 return NULL_TREE;
ea91f957 10159
d7ebef06 10160 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
5c1a2e63 10161 return ret;
752b7d38 10162
5c1a2e63
RS
10163 switch (fcode)
10164 {
752b7d38
KG
10165 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10166 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10167 if (validate_arg (arg0, REAL_TYPE)
c3284718 10168 && validate_arg (arg1, POINTER_TYPE))
752b7d38
KG
10169 return do_mpfr_lgamma_r (arg0, arg1, type);
10170 break;
5039610b 10171
7a2a25ab 10172 CASE_FLT_FN (BUILT_IN_FREXP):
db3927fb 10173 return fold_builtin_frexp (loc, arg0, arg1, type);
7a2a25ab 10174
3d577eaf 10175 CASE_FLT_FN (BUILT_IN_MODF):
db3927fb 10176 return fold_builtin_modf (loc, arg0, arg1, type);
3d577eaf 10177
5039610b 10178 case BUILT_IN_STRSPN:
b5338fb3 10179 return fold_builtin_strspn (loc, expr, arg0, arg1);
5039610b
SL
10180
10181 case BUILT_IN_STRCSPN:
b5338fb3 10182 return fold_builtin_strcspn (loc, expr, arg0, arg1);
5039610b 10183
5039610b 10184 case BUILT_IN_STRPBRK:
b5338fb3 10185 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
5039610b
SL
10186
10187 case BUILT_IN_EXPECT:
1e9168b2 10188 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
5039610b 10189
08039bd8 10190 case BUILT_IN_ISGREATER:
db3927fb
AH
10191 return fold_builtin_unordered_cmp (loc, fndecl,
10192 arg0, arg1, UNLE_EXPR, LE_EXPR);
08039bd8 10193 case BUILT_IN_ISGREATEREQUAL:
db3927fb
AH
10194 return fold_builtin_unordered_cmp (loc, fndecl,
10195 arg0, arg1, UNLT_EXPR, LT_EXPR);
08039bd8 10196 case BUILT_IN_ISLESS:
db3927fb
AH
10197 return fold_builtin_unordered_cmp (loc, fndecl,
10198 arg0, arg1, UNGE_EXPR, GE_EXPR);
08039bd8 10199 case BUILT_IN_ISLESSEQUAL:
db3927fb
AH
10200 return fold_builtin_unordered_cmp (loc, fndecl,
10201 arg0, arg1, UNGT_EXPR, GT_EXPR);
08039bd8 10202 case BUILT_IN_ISLESSGREATER:
db3927fb
AH
10203 return fold_builtin_unordered_cmp (loc, fndecl,
10204 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
08039bd8 10205 case BUILT_IN_ISUNORDERED:
db3927fb
AH
10206 return fold_builtin_unordered_cmp (loc, fndecl,
10207 arg0, arg1, UNORDERED_EXPR,
a35da91f 10208 NOP_EXPR);
08039bd8 10209
d3147f64
EC
10210 /* We do the folding for va_start in the expander. */
10211 case BUILT_IN_VA_START:
10212 break;
a32e70c3 10213
10a0d495 10214 case BUILT_IN_OBJECT_SIZE:
5039610b 10215 return fold_builtin_object_size (arg0, arg1);
10a0d495 10216
86951993
AM
10217 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10218 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10219
10220 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10221 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10222
5039610b
SL
10223 default:
10224 break;
10225 }
10226 return NULL_TREE;
10227}
10228
10229/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
2625bb5d 10230 and ARG2.
5039610b
SL
10231 This function returns NULL_TREE if no simplification was possible. */
10232
10233static tree
db3927fb 10234fold_builtin_3 (location_t loc, tree fndecl,
2625bb5d 10235 tree arg0, tree arg1, tree arg2)
5039610b
SL
10236{
10237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10238 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
10239
10240 if (TREE_CODE (arg0) == ERROR_MARK
10241 || TREE_CODE (arg1) == ERROR_MARK
10242 || TREE_CODE (arg2) == ERROR_MARK)
10243 return NULL_TREE;
10244
d7ebef06
RS
10245 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10246 arg0, arg1, arg2))
5c1a2e63
RS
10247 return ret;
10248
5039610b
SL
10249 switch (fcode)
10250 {
10251
10252 CASE_FLT_FN (BUILT_IN_SINCOS):
db3927fb 10253 return fold_builtin_sincos (loc, arg0, arg1, arg2);
5039610b 10254
ea91f957
KG
10255 CASE_FLT_FN (BUILT_IN_REMQUO):
10256 if (validate_arg (arg0, REAL_TYPE)
c3284718
RS
10257 && validate_arg (arg1, REAL_TYPE)
10258 && validate_arg (arg2, POINTER_TYPE))
ea91f957
KG
10259 return do_mpfr_remquo (arg0, arg1, arg2);
10260 break;
ea91f957 10261
5039610b 10262 case BUILT_IN_MEMCMP:
5de73c05 10263 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
5039610b 10264
ed9c79e1 10265 case BUILT_IN_EXPECT:
1e9168b2
ML
10266 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10267
10268 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10269 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
ed9c79e1 10270
1304953e
JJ
10271 case BUILT_IN_ADD_OVERFLOW:
10272 case BUILT_IN_SUB_OVERFLOW:
10273 case BUILT_IN_MUL_OVERFLOW:
44a845ca
MS
10274 case BUILT_IN_ADD_OVERFLOW_P:
10275 case BUILT_IN_SUB_OVERFLOW_P:
10276 case BUILT_IN_MUL_OVERFLOW_P:
1304953e
JJ
10277 case BUILT_IN_SADD_OVERFLOW:
10278 case BUILT_IN_SADDL_OVERFLOW:
10279 case BUILT_IN_SADDLL_OVERFLOW:
10280 case BUILT_IN_SSUB_OVERFLOW:
10281 case BUILT_IN_SSUBL_OVERFLOW:
10282 case BUILT_IN_SSUBLL_OVERFLOW:
10283 case BUILT_IN_SMUL_OVERFLOW:
10284 case BUILT_IN_SMULL_OVERFLOW:
10285 case BUILT_IN_SMULLL_OVERFLOW:
10286 case BUILT_IN_UADD_OVERFLOW:
10287 case BUILT_IN_UADDL_OVERFLOW:
10288 case BUILT_IN_UADDLL_OVERFLOW:
10289 case BUILT_IN_USUB_OVERFLOW:
10290 case BUILT_IN_USUBL_OVERFLOW:
10291 case BUILT_IN_USUBLL_OVERFLOW:
10292 case BUILT_IN_UMUL_OVERFLOW:
10293 case BUILT_IN_UMULL_OVERFLOW:
10294 case BUILT_IN_UMULLL_OVERFLOW:
10295 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10296
b0b3afb2
BS
10297 default:
10298 break;
10299 }
5039610b
SL
10300 return NULL_TREE;
10301}
b0b3afb2 10302
b5338fb3
MS
10303/* Folds a call EXPR (which may be null) to built-in function FNDECL.
10304 ARGS is an array of NARGS arguments. IGNORE is true if the result
10305 of the function call is ignored. This function returns NULL_TREE
10306 if no simplification was possible. */
b8698a0f 10307
b5338fb3
MS
10308static tree
10309fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10310 int nargs, bool)
5039610b
SL
10311{
10312 tree ret = NULL_TREE;
f4577fcd 10313
5039610b
SL
10314 switch (nargs)
10315 {
10316 case 0:
2625bb5d 10317 ret = fold_builtin_0 (loc, fndecl);
5039610b
SL
10318 break;
10319 case 1:
2625bb5d 10320 ret = fold_builtin_1 (loc, fndecl, args[0]);
5039610b
SL
10321 break;
10322 case 2:
b5338fb3 10323 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
5039610b
SL
10324 break;
10325 case 3:
2625bb5d 10326 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
5039610b 10327 break;
5039610b 10328 default:
903c723b 10329 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
5039610b
SL
10330 break;
10331 }
10332 if (ret)
10333 {
726a989a 10334 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
db3927fb 10335 SET_EXPR_LOCATION (ret, loc);
5039610b
SL
10336 return ret;
10337 }
10338 return NULL_TREE;
10339}
10340
862d0b35
DN
10341/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10342 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10343 of arguments in ARGS to be omitted. OLDNARGS is the number of
10344 elements in ARGS. */
5039610b
SL
10345
10346static tree
862d0b35
DN
10347rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10348 int skip, tree fndecl, int n, va_list newargs)
5039610b 10349{
862d0b35
DN
10350 int nargs = oldnargs - skip + n;
10351 tree *buffer;
5039610b 10352
862d0b35 10353 if (n > 0)
5039610b 10354 {
862d0b35 10355 int i, j;
5039610b 10356
862d0b35
DN
10357 buffer = XALLOCAVEC (tree, nargs);
10358 for (i = 0; i < n; i++)
10359 buffer[i] = va_arg (newargs, tree);
10360 for (j = skip; j < oldnargs; j++, i++)
10361 buffer[i] = args[j];
10362 }
10363 else
10364 buffer = args + skip;
3bf5906b 10365
862d0b35
DN
10366 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10367}
5039610b 10368
0889e9bc
JJ
10369/* Return true if FNDECL shouldn't be folded right now.
10370 If a built-in function has an inline attribute always_inline
10371 wrapper, defer folding it after always_inline functions have
10372 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10373 might not be performed. */
10374
e7f9dae0 10375bool
0889e9bc
JJ
10376avoid_folding_inline_builtin (tree fndecl)
10377{
10378 return (DECL_DECLARED_INLINE_P (fndecl)
10379 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10380 && cfun
10381 && !cfun->always_inline_functions_inlined
10382 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10383}
10384
6de9cd9a 10385/* A wrapper function for builtin folding that prevents warnings for
caf93cb0 10386 "statement without effect" and the like, caused by removing the
6de9cd9a
DN
10387 call node earlier than the warning is generated. */
10388
10389tree
db3927fb 10390fold_call_expr (location_t loc, tree exp, bool ignore)
6de9cd9a 10391{
5039610b
SL
10392 tree ret = NULL_TREE;
10393 tree fndecl = get_callee_fndecl (exp);
3d78e008 10394 if (fndecl && fndecl_built_in_p (fndecl)
6ef5231b
JJ
10395 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10396 yet. Defer folding until we see all the arguments
10397 (after inlining). */
10398 && !CALL_EXPR_VA_ARG_PACK (exp))
10399 {
10400 int nargs = call_expr_nargs (exp);
10401
10402 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10403 instead last argument is __builtin_va_arg_pack (). Defer folding
10404 even in that case, until arguments are finalized. */
10405 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10406 {
10407 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
3d78e008 10408 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
6ef5231b
JJ
10409 return NULL_TREE;
10410 }
10411
0889e9bc
JJ
10412 if (avoid_folding_inline_builtin (fndecl))
10413 return NULL_TREE;
10414
5039610b 10415 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
f311c3b4
NF
10416 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10417 CALL_EXPR_ARGP (exp), ignore);
5039610b
SL
10418 else
10419 {
a6a0570f 10420 tree *args = CALL_EXPR_ARGP (exp);
b5338fb3 10421 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
5039610b 10422 if (ret)
db3927fb 10423 return ret;
5039610b 10424 }
6de9cd9a 10425 }
5039610b
SL
10426 return NULL_TREE;
10427}
b8698a0f 10428
a6a0570f
RB
10429/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10430 N arguments are passed in the array ARGARRAY. Return a folded
10431 expression or NULL_TREE if no simplification was possible. */
4977bab6
ZW
10432
10433tree
a6a0570f 10434fold_builtin_call_array (location_t loc, tree,
94a0dd7b
SL
10435 tree fn,
10436 int n,
10437 tree *argarray)
6385a28f 10438{
a6a0570f
RB
10439 if (TREE_CODE (fn) != ADDR_EXPR)
10440 return NULL_TREE;
5039610b 10441
a6a0570f
RB
10442 tree fndecl = TREE_OPERAND (fn, 0);
10443 if (TREE_CODE (fndecl) == FUNCTION_DECL
3d78e008 10444 && fndecl_built_in_p (fndecl))
a6a0570f
RB
10445 {
10446 /* If last argument is __builtin_va_arg_pack (), arguments to this
10447 function are not finalized yet. Defer folding until they are. */
10448 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10449 {
10450 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
3d78e008 10451 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
a6a0570f
RB
10452 return NULL_TREE;
10453 }
10454 if (avoid_folding_inline_builtin (fndecl))
10455 return NULL_TREE;
10456 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10457 return targetm.fold_builtin (fndecl, n, argarray, false);
10458 else
b5338fb3 10459 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
a6a0570f 10460 }
5039610b 10461
a6a0570f 10462 return NULL_TREE;
5039610b
SL
10463}
10464
43ea30dc
NF
10465/* Construct a new CALL_EXPR using the tail of the argument list of EXP
10466 along with N new arguments specified as the "..." parameters. SKIP
10467 is the number of arguments in EXP to be omitted. This function is used
10468 to do varargs-to-varargs transformations. */
10469
10470static tree
10471rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10472{
10473 va_list ap;
10474 tree t;
10475
10476 va_start (ap, n);
10477 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10478 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10479 va_end (ap);
5039610b 10480
43ea30dc 10481 return t;
5039610b
SL
10482}
10483
10484/* Validate a single argument ARG against a tree code CODE representing
0dba7960 10485 a type. Return true when argument is valid. */
b8698a0f 10486
5039610b 10487static bool
0dba7960 10488validate_arg (const_tree arg, enum tree_code code)
5039610b
SL
10489{
10490 if (!arg)
10491 return false;
10492 else if (code == POINTER_TYPE)
0dba7960 10493 return POINTER_TYPE_P (TREE_TYPE (arg));
4cd8e76f
RG
10494 else if (code == INTEGER_TYPE)
10495 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
5039610b 10496 return code == TREE_CODE (TREE_TYPE (arg));
6385a28f 10497}
019fa094 10498
726a989a
RB
10499/* This function validates the types of a function call argument list
10500 against a specified list of tree_codes. If the last specifier is a 0,
10501 that represents an ellipses, otherwise the last specifier must be a
10502 VOID_TYPE.
10503
10504 This is the GIMPLE version of validate_arglist. Eventually we want to
10505 completely convert builtins.c to work from GIMPLEs and the tree based
10506 validate_arglist will then be removed. */
10507
10508bool
538dd0b7 10509validate_gimple_arglist (const gcall *call, ...)
726a989a
RB
10510{
10511 enum tree_code code;
10512 bool res = 0;
10513 va_list ap;
10514 const_tree arg;
10515 size_t i;
10516
10517 va_start (ap, call);
10518 i = 0;
10519
10520 do
10521 {
72b5577d 10522 code = (enum tree_code) va_arg (ap, int);
726a989a
RB
10523 switch (code)
10524 {
10525 case 0:
10526 /* This signifies an ellipses, any further arguments are all ok. */
10527 res = true;
10528 goto end;
10529 case VOID_TYPE:
10530 /* This signifies an endlink, if no arguments remain, return
10531 true, otherwise return false. */
10532 res = (i == gimple_call_num_args (call));
10533 goto end;
10534 default:
10535 /* If no parameters remain or the parameter's code does not
10536 match the specified code, return false. Otherwise continue
10537 checking any remaining arguments. */
10538 arg = gimple_call_arg (call, i++);
10539 if (!validate_arg (arg, code))
10540 goto end;
10541 break;
10542 }
10543 }
10544 while (1);
10545
10546 /* We need gotos here since we can only have one VA_CLOSE in a
10547 function. */
10548 end: ;
10549 va_end (ap);
10550
10551 return res;
10552}
10553
f6155fda
SS
10554/* Default target-specific builtin expander that does nothing. */
10555
10556rtx
4682ae04
AJ
10557default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10558 rtx target ATTRIBUTE_UNUSED,
10559 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 10560 machine_mode mode ATTRIBUTE_UNUSED,
4682ae04 10561 int ignore ATTRIBUTE_UNUSED)
f6155fda
SS
10562{
10563 return NULL_RTX;
10564}
34ee7f82 10565
7dc61d6c
KG
10566/* Returns true is EXP represents data that would potentially reside
10567 in a readonly section. */
10568
fef5a0d9 10569bool
7dc61d6c
KG
10570readonly_data_expr (tree exp)
10571{
10572 STRIP_NOPS (exp);
10573
aef0afc4
UW
10574 if (TREE_CODE (exp) != ADDR_EXPR)
10575 return false;
10576
10577 exp = get_base_address (TREE_OPERAND (exp, 0));
10578 if (!exp)
10579 return false;
10580
10581 /* Make sure we call decl_readonly_section only for trees it
10582 can handle (since it returns true for everything it doesn't
10583 understand). */
caf93cb0 10584 if (TREE_CODE (exp) == STRING_CST
aef0afc4 10585 || TREE_CODE (exp) == CONSTRUCTOR
8813a647 10586 || (VAR_P (exp) && TREE_STATIC (exp)))
aef0afc4 10587 return decl_readonly_section (exp, 0);
7dc61d6c
KG
10588 else
10589 return false;
10590}
6de9cd9a 10591
5039610b
SL
10592/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10593 to the call, and TYPE is its return type.
6de9cd9a 10594
5039610b 10595 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10596 simplified form of the call as a tree.
10597
10598 The simplified form may be a constant or other expression which
10599 computes the same value, but in a more efficient manner (including
10600 calls to other builtin functions).
10601
10602 The call may contain arguments which need to be evaluated, but
10603 which are not useful to determine the result of the call. In
10604 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10605 COMPOUND_EXPR will be an argument which must be evaluated.
10606 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10607 COMPOUND_EXPR in the chain will contain the tree for the simplified
10608 form of the builtin function call. */
10609
10610static tree
b5338fb3 10611fold_builtin_strpbrk (location_t loc, tree expr, tree s1, tree s2, tree type)
6de9cd9a 10612{
5039610b
SL
10613 if (!validate_arg (s1, POINTER_TYPE)
10614 || !validate_arg (s2, POINTER_TYPE))
10615 return NULL_TREE;
6de9cd9a 10616
b5338fb3
MS
10617 if (!check_nul_terminated_array (expr, s1)
10618 || !check_nul_terminated_array (expr, s2))
10619 return NULL_TREE;
6de9cd9a 10620
b5338fb3
MS
10621 tree fn;
10622 const char *p1, *p2;
6de9cd9a 10623
b5338fb3
MS
10624 p2 = c_getstr (s2);
10625 if (p2 == NULL)
10626 return NULL_TREE;
6de9cd9a 10627
b5338fb3
MS
10628 p1 = c_getstr (s1);
10629 if (p1 != NULL)
10630 {
10631 const char *r = strpbrk (p1, p2);
10632 tree tem;
6de9cd9a 10633
b5338fb3
MS
10634 if (r == NULL)
10635 return build_int_cst (TREE_TYPE (s1), 0);
6de9cd9a 10636
b5338fb3
MS
10637 /* Return an offset into the constant string argument. */
10638 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10639 return fold_convert_loc (loc, type, tem);
10640 }
6de9cd9a 10641
b5338fb3
MS
10642 if (p2[0] == '\0')
10643 /* strpbrk(x, "") == NULL.
10644 Evaluate and ignore s1 in case it had side-effects. */
10645 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
6de9cd9a 10646
b5338fb3
MS
10647 if (p2[1] != '\0')
10648 return NULL_TREE; /* Really call strpbrk. */
10649
10650 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10651 if (!fn)
10652 return NULL_TREE;
10653
10654 /* New argument list transforming strpbrk(s1, s2) to
10655 strchr(s1, s2[0]). */
10656 return build_call_expr_loc (loc, fn, 2, s1,
10657 build_int_cst (integer_type_node, p2[0]));
6de9cd9a
DN
10658}
10659
5039610b
SL
10660/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10661 to the call.
6de9cd9a 10662
5039610b 10663 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10664 simplified form of the call as a tree.
10665
10666 The simplified form may be a constant or other expression which
10667 computes the same value, but in a more efficient manner (including
10668 calls to other builtin functions).
10669
10670 The call may contain arguments which need to be evaluated, but
10671 which are not useful to determine the result of the call. In
10672 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10673 COMPOUND_EXPR will be an argument which must be evaluated.
10674 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10675 COMPOUND_EXPR in the chain will contain the tree for the simplified
10676 form of the builtin function call. */
10677
10678static tree
b5338fb3 10679fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
6de9cd9a 10680{
5039610b
SL
10681 if (!validate_arg (s1, POINTER_TYPE)
10682 || !validate_arg (s2, POINTER_TYPE))
10683 return NULL_TREE;
6de9cd9a 10684
b5338fb3
MS
10685 if (!check_nul_terminated_array (expr, s1)
10686 || !check_nul_terminated_array (expr, s2))
10687 return NULL_TREE;
10688
10689 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10690
10691 /* If either argument is "", return NULL_TREE. */
10692 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10693 /* Evaluate and ignore both arguments in case either one has
10694 side-effects. */
10695 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
08039bd8 10696 s1, s2);
b5338fb3 10697 return NULL_TREE;
6de9cd9a
DN
10698}
10699
5039610b
SL
10700/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10701 to the call.
6de9cd9a 10702
5039610b 10703 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10704 simplified form of the call as a tree.
10705
10706 The simplified form may be a constant or other expression which
10707 computes the same value, but in a more efficient manner (including
10708 calls to other builtin functions).
10709
10710 The call may contain arguments which need to be evaluated, but
10711 which are not useful to determine the result of the call. In
10712 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10713 COMPOUND_EXPR will be an argument which must be evaluated.
10714 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10715 COMPOUND_EXPR in the chain will contain the tree for the simplified
10716 form of the builtin function call. */
10717
10718static tree
b5338fb3 10719fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
6de9cd9a 10720{
5039610b
SL
10721 if (!validate_arg (s1, POINTER_TYPE)
10722 || !validate_arg (s2, POINTER_TYPE))
10723 return NULL_TREE;
b5338fb3
MS
10724
10725 if (!check_nul_terminated_array (expr, s1)
10726 || !check_nul_terminated_array (expr, s2))
10727 return NULL_TREE;
10728
10729 /* If the first argument is "", return NULL_TREE. */
10730 const char *p1 = c_getstr (s1);
10731 if (p1 && *p1 == '\0')
6de9cd9a 10732 {
b5338fb3
MS
10733 /* Evaluate and ignore argument s2 in case it has
10734 side-effects. */
10735 return omit_one_operand_loc (loc, size_type_node,
002bd9f0 10736 size_zero_node, s2);
b5338fb3 10737 }
6de9cd9a 10738
b5338fb3
MS
10739 /* If the second argument is "", return __builtin_strlen(s1). */
10740 const char *p2 = c_getstr (s2);
10741 if (p2 && *p2 == '\0')
10742 {
10743 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
6de9cd9a 10744
b5338fb3
MS
10745 /* If the replacement _DECL isn't initialized, don't do the
10746 transformation. */
10747 if (!fn)
10748 return NULL_TREE;
6de9cd9a 10749
b5338fb3 10750 return build_call_expr_loc (loc, fn, 1, s1);
6de9cd9a 10751 }
b5338fb3 10752 return NULL_TREE;
6de9cd9a
DN
10753}
10754
5039610b 10755/* Fold the next_arg or va_start call EXP. Returns true if there was an error
2efcfa4e
AP
10756 produced. False otherwise. This is done so that we don't output the error
10757 or warning twice or three times. */
726a989a 10758
2efcfa4e 10759bool
5039610b 10760fold_builtin_next_arg (tree exp, bool va_start_p)
6de9cd9a
DN
10761{
10762 tree fntype = TREE_TYPE (current_function_decl);
5039610b
SL
10763 int nargs = call_expr_nargs (exp);
10764 tree arg;
34c88790
DS
10765 /* There is good chance the current input_location points inside the
10766 definition of the va_start macro (perhaps on the token for
10767 builtin) in a system header, so warnings will not be emitted.
10768 Use the location in real source code. */
620e594b 10769 location_t current_location =
34c88790
DS
10770 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10771 NULL);
6de9cd9a 10772
f38958e8 10773 if (!stdarg_p (fntype))
2efcfa4e 10774 {
a9c697b8 10775 error ("%<va_start%> used in function with fixed arguments");
2efcfa4e
AP
10776 return true;
10777 }
5039610b
SL
10778
10779 if (va_start_p)
8870e212 10780 {
5039610b
SL
10781 if (va_start_p && (nargs != 2))
10782 {
10783 error ("wrong number of arguments to function %<va_start%>");
10784 return true;
10785 }
10786 arg = CALL_EXPR_ARG (exp, 1);
8870e212
JJ
10787 }
10788 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10789 when we checked the arguments and if needed issued a warning. */
5039610b 10790 else
6de9cd9a 10791 {
5039610b
SL
10792 if (nargs == 0)
10793 {
10794 /* Evidently an out of date version of <stdarg.h>; can't validate
10795 va_start's second argument, but can still work as intended. */
34c88790 10796 warning_at (current_location,
b9c8da34
DS
10797 OPT_Wvarargs,
10798 "%<__builtin_next_arg%> called without an argument");
5039610b
SL
10799 return true;
10800 }
10801 else if (nargs > 1)
c22cacf3 10802 {
5039610b 10803 error ("wrong number of arguments to function %<__builtin_next_arg%>");
c22cacf3
MS
10804 return true;
10805 }
5039610b
SL
10806 arg = CALL_EXPR_ARG (exp, 0);
10807 }
10808
4e3825db
MM
10809 if (TREE_CODE (arg) == SSA_NAME)
10810 arg = SSA_NAME_VAR (arg);
10811
5039610b 10812 /* We destructively modify the call to be __builtin_va_start (ap, 0)
b8698a0f 10813 or __builtin_next_arg (0) the first time we see it, after checking
5039610b
SL
10814 the arguments and if needed issuing a warning. */
10815 if (!integer_zerop (arg))
10816 {
10817 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8870e212 10818
6de9cd9a
DN
10819 /* Strip off all nops for the sake of the comparison. This
10820 is not quite the same as STRIP_NOPS. It does more.
10821 We must also strip off INDIRECT_EXPR for C++ reference
10822 parameters. */
1043771b 10823 while (CONVERT_EXPR_P (arg)
6de9cd9a
DN
10824 || TREE_CODE (arg) == INDIRECT_REF)
10825 arg = TREE_OPERAND (arg, 0);
10826 if (arg != last_parm)
c22cacf3 10827 {
118f3b19
KH
10828 /* FIXME: Sometimes with the tree optimizers we can get the
10829 not the last argument even though the user used the last
10830 argument. We just warn and set the arg to be the last
10831 argument so that we will get wrong-code because of
10832 it. */
34c88790 10833 warning_at (current_location,
b9c8da34 10834 OPT_Wvarargs,
34c88790 10835 "second parameter of %<va_start%> not last named argument");
2efcfa4e 10836 }
2985f531
MLI
10837
10838 /* Undefined by C99 7.15.1.4p4 (va_start):
10839 "If the parameter parmN is declared with the register storage
10840 class, with a function or array type, or with a type that is
10841 not compatible with the type that results after application of
10842 the default argument promotions, the behavior is undefined."
10843 */
10844 else if (DECL_REGISTER (arg))
34c88790
DS
10845 {
10846 warning_at (current_location,
b9c8da34 10847 OPT_Wvarargs,
9c582551 10848 "undefined behavior when second parameter of "
34c88790
DS
10849 "%<va_start%> is declared with %<register%> storage");
10850 }
2985f531 10851
8870e212 10852 /* We want to verify the second parameter just once before the tree
c22cacf3
MS
10853 optimizers are run and then avoid keeping it in the tree,
10854 as otherwise we could warn even for correct code like:
10855 void foo (int i, ...)
10856 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
5039610b
SL
10857 if (va_start_p)
10858 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10859 else
10860 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
2efcfa4e
AP
10861 }
10862 return false;
6de9cd9a
DN
10863}
10864
10865
5039610b 10866/* Expand a call EXP to __builtin_object_size. */
10a0d495 10867
9b2b7279 10868static rtx
10a0d495
JJ
10869expand_builtin_object_size (tree exp)
10870{
10871 tree ost;
10872 int object_size_type;
10873 tree fndecl = get_callee_fndecl (exp);
10a0d495 10874
5039610b 10875 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10a0d495 10876 {
0f2c4a8f 10877 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
c94ed7a1 10878 exp, fndecl);
10a0d495
JJ
10879 expand_builtin_trap ();
10880 return const0_rtx;
10881 }
10882
5039610b 10883 ost = CALL_EXPR_ARG (exp, 1);
10a0d495
JJ
10884 STRIP_NOPS (ost);
10885
10886 if (TREE_CODE (ost) != INTEGER_CST
10887 || tree_int_cst_sgn (ost) < 0
10888 || compare_tree_int (ost, 3) > 0)
10889 {
0f2c4a8f 10890 error ("%Klast argument of %qD is not integer constant between 0 and 3",
c94ed7a1 10891 exp, fndecl);
10a0d495
JJ
10892 expand_builtin_trap ();
10893 return const0_rtx;
10894 }
10895
9439e9a1 10896 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
10897
10898 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10899}
10900
10901/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10902 FCODE is the BUILT_IN_* to use.
5039610b 10903 Return NULL_RTX if we failed; the caller should emit a normal call,
10a0d495
JJ
10904 otherwise try to get the result in TARGET, if convenient (and in
10905 mode MODE if that's convenient). */
10906
10907static rtx
ef4bddc2 10908expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10a0d495
JJ
10909 enum built_in_function fcode)
10910{
5039610b 10911 if (!validate_arglist (exp,
10a0d495
JJ
10912 POINTER_TYPE,
10913 fcode == BUILT_IN_MEMSET_CHK
10914 ? INTEGER_TYPE : POINTER_TYPE,
10915 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5039610b 10916 return NULL_RTX;
10a0d495 10917
cc8bea0a
MS
10918 tree dest = CALL_EXPR_ARG (exp, 0);
10919 tree src = CALL_EXPR_ARG (exp, 1);
10920 tree len = CALL_EXPR_ARG (exp, 2);
10921 tree size = CALL_EXPR_ARG (exp, 3);
10a0d495 10922
cc8bea0a
MS
10923 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10924 /*str=*/NULL_TREE, size);
ee92e7ba
MS
10925
10926 if (!tree_fits_uhwi_p (size))
5039610b 10927 return NULL_RTX;
10a0d495 10928
cc269bb6 10929 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10a0d495 10930 {
ee92e7ba
MS
10931 /* Avoid transforming the checking call to an ordinary one when
10932 an overflow has been detected or when the call couldn't be
10933 validated because the size is not constant. */
10934 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10935 return NULL_RTX;
10a0d495 10936
ee92e7ba 10937 tree fn = NULL_TREE;
10a0d495
JJ
10938 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10939 mem{cpy,pcpy,move,set} is available. */
10940 switch (fcode)
10941 {
10942 case BUILT_IN_MEMCPY_CHK:
e79983f4 10943 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10a0d495
JJ
10944 break;
10945 case BUILT_IN_MEMPCPY_CHK:
e79983f4 10946 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10a0d495
JJ
10947 break;
10948 case BUILT_IN_MEMMOVE_CHK:
e79983f4 10949 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10a0d495
JJ
10950 break;
10951 case BUILT_IN_MEMSET_CHK:
e79983f4 10952 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10a0d495
JJ
10953 break;
10954 default:
10955 break;
10956 }
10957
10958 if (! fn)
5039610b 10959 return NULL_RTX;
10a0d495 10960
aa493694 10961 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
44e10129
MM
10962 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10963 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
10964 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10965 }
10966 else if (fcode == BUILT_IN_MEMSET_CHK)
5039610b 10967 return NULL_RTX;
10a0d495
JJ
10968 else
10969 {
0eb77834 10970 unsigned int dest_align = get_pointer_alignment (dest);
10a0d495
JJ
10971
10972 /* If DEST is not a pointer type, call the normal function. */
10973 if (dest_align == 0)
5039610b 10974 return NULL_RTX;
10a0d495
JJ
10975
10976 /* If SRC and DEST are the same (and not volatile), do nothing. */
10977 if (operand_equal_p (src, dest, 0))
10978 {
10979 tree expr;
10980
10981 if (fcode != BUILT_IN_MEMPCPY_CHK)
10982 {
10983 /* Evaluate and ignore LEN in case it has side-effects. */
10984 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10985 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10986 }
10987
5d49b6a7 10988 expr = fold_build_pointer_plus (dest, len);
10a0d495
JJ
10989 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10990 }
10991
10992 /* __memmove_chk special case. */
10993 if (fcode == BUILT_IN_MEMMOVE_CHK)
10994 {
0eb77834 10995 unsigned int src_align = get_pointer_alignment (src);
10a0d495
JJ
10996
10997 if (src_align == 0)
5039610b 10998 return NULL_RTX;
10a0d495
JJ
10999
11000 /* If src is categorized for a readonly section we can use
11001 normal __memcpy_chk. */
11002 if (readonly_data_expr (src))
11003 {
e79983f4 11004 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10a0d495 11005 if (!fn)
5039610b 11006 return NULL_RTX;
aa493694
JJ
11007 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11008 dest, src, len, size);
44e10129
MM
11009 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11010 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
11011 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11012 }
11013 }
5039610b 11014 return NULL_RTX;
10a0d495
JJ
11015 }
11016}
11017
11018/* Emit warning if a buffer overflow is detected at compile time. */
11019
11020static void
11021maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11022{
ee92e7ba
MS
11023 /* The source string. */
11024 tree srcstr = NULL_TREE;
11025 /* The size of the destination object. */
11026 tree objsize = NULL_TREE;
11027 /* The string that is being concatenated with (as in __strcat_chk)
11028 or null if it isn't. */
11029 tree catstr = NULL_TREE;
11030 /* The maximum length of the source sequence in a bounded operation
11031 (such as __strncat_chk) or null if the operation isn't bounded
11032 (such as __strcat_chk). */
cc8bea0a 11033 tree maxread = NULL_TREE;
9c1caf50
MS
11034 /* The exact size of the access (such as in __strncpy_chk). */
11035 tree size = NULL_TREE;
10a0d495
JJ
11036
11037 switch (fcode)
11038 {
11039 case BUILT_IN_STRCPY_CHK:
11040 case BUILT_IN_STPCPY_CHK:
ee92e7ba
MS
11041 srcstr = CALL_EXPR_ARG (exp, 1);
11042 objsize = CALL_EXPR_ARG (exp, 2);
11043 break;
11044
10a0d495 11045 case BUILT_IN_STRCAT_CHK:
ee92e7ba
MS
11046 /* For __strcat_chk the warning will be emitted only if overflowing
11047 by at least strlen (dest) + 1 bytes. */
11048 catstr = CALL_EXPR_ARG (exp, 0);
11049 srcstr = CALL_EXPR_ARG (exp, 1);
11050 objsize = CALL_EXPR_ARG (exp, 2);
10a0d495 11051 break;
ee92e7ba 11052
1c2fc017 11053 case BUILT_IN_STRNCAT_CHK:
ee92e7ba
MS
11054 catstr = CALL_EXPR_ARG (exp, 0);
11055 srcstr = CALL_EXPR_ARG (exp, 1);
cc8bea0a 11056 maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
11057 objsize = CALL_EXPR_ARG (exp, 3);
11058 break;
11059
10a0d495 11060 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 11061 case BUILT_IN_STPNCPY_CHK:
ee92e7ba 11062 srcstr = CALL_EXPR_ARG (exp, 1);
9c1caf50 11063 size = CALL_EXPR_ARG (exp, 2);
ee92e7ba 11064 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495 11065 break;
ee92e7ba 11066
10a0d495
JJ
11067 case BUILT_IN_SNPRINTF_CHK:
11068 case BUILT_IN_VSNPRINTF_CHK:
cc8bea0a 11069 maxread = CALL_EXPR_ARG (exp, 1);
ee92e7ba 11070 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495
JJ
11071 break;
11072 default:
11073 gcc_unreachable ();
11074 }
11075
cc8bea0a 11076 if (catstr && maxread)
10a0d495 11077 {
ee92e7ba
MS
11078 /* Check __strncat_chk. There is no way to determine the length
11079 of the string to which the source string is being appended so
11080 just warn when the length of the source string is not known. */
d9c5a8b9
MS
11081 check_strncat_sizes (exp, objsize);
11082 return;
10a0d495 11083 }
10a0d495 11084
cc8bea0a
MS
11085 /* The destination argument is the first one for all built-ins above. */
11086 tree dst = CALL_EXPR_ARG (exp, 0);
11087
11088 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10a0d495
JJ
11089}
11090
11091/* Emit warning if a buffer overflow is detected at compile time
11092 in __sprintf_chk/__vsprintf_chk calls. */
11093
11094static void
11095maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11096{
451409e4 11097 tree size, len, fmt;
10a0d495 11098 const char *fmt_str;
5039610b 11099 int nargs = call_expr_nargs (exp);
10a0d495
JJ
11100
11101 /* Verify the required arguments in the original call. */
b8698a0f 11102
5039610b 11103 if (nargs < 4)
10a0d495 11104 return;
5039610b
SL
11105 size = CALL_EXPR_ARG (exp, 2);
11106 fmt = CALL_EXPR_ARG (exp, 3);
10a0d495 11107
cc269bb6 11108 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10a0d495
JJ
11109 return;
11110
11111 /* Check whether the format is a literal string constant. */
11112 fmt_str = c_getstr (fmt);
11113 if (fmt_str == NULL)
11114 return;
11115
62e5bf5d 11116 if (!init_target_chars ())
000ba23d
KG
11117 return;
11118
10a0d495 11119 /* If the format doesn't contain % args or %%, we know its size. */
000ba23d 11120 if (strchr (fmt_str, target_percent) == 0)
10a0d495
JJ
11121 len = build_int_cstu (size_type_node, strlen (fmt_str));
11122 /* If the format is "%s" and first ... argument is a string literal,
11123 we know it too. */
5039610b
SL
11124 else if (fcode == BUILT_IN_SPRINTF_CHK
11125 && strcmp (fmt_str, target_percent_s) == 0)
10a0d495
JJ
11126 {
11127 tree arg;
11128
5039610b 11129 if (nargs < 5)
10a0d495 11130 return;
5039610b 11131 arg = CALL_EXPR_ARG (exp, 4);
10a0d495
JJ
11132 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11133 return;
11134
11135 len = c_strlen (arg, 1);
cc269bb6 11136 if (!len || ! tree_fits_uhwi_p (len))
10a0d495
JJ
11137 return;
11138 }
11139 else
11140 return;
11141
ee92e7ba
MS
11142 /* Add one for the terminating nul. */
11143 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
cc8bea0a
MS
11144
11145 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
11146 /*maxread=*/NULL_TREE, len, size);
10a0d495
JJ
11147}
11148
f9555f40
JJ
11149/* Emit warning if a free is called with address of a variable. */
11150
11151static void
11152maybe_emit_free_warning (tree exp)
11153{
9616781d
JJ
11154 if (call_expr_nargs (exp) != 1)
11155 return;
11156
f9555f40
JJ
11157 tree arg = CALL_EXPR_ARG (exp, 0);
11158
11159 STRIP_NOPS (arg);
11160 if (TREE_CODE (arg) != ADDR_EXPR)
11161 return;
11162
11163 arg = get_base_address (TREE_OPERAND (arg, 0));
70f34814 11164 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
f9555f40
JJ
11165 return;
11166
11167 if (SSA_VAR_P (arg))
a3a704a4
MH
11168 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11169 "%Kattempt to free a non-heap object %qD", exp, arg);
f9555f40 11170 else
a3a704a4
MH
11171 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11172 "%Kattempt to free a non-heap object", exp);
f9555f40
JJ
11173}
11174
5039610b
SL
11175/* Fold a call to __builtin_object_size with arguments PTR and OST,
11176 if possible. */
10a0d495 11177
9b2b7279 11178static tree
5039610b 11179fold_builtin_object_size (tree ptr, tree ost)
10a0d495 11180{
88e06841 11181 unsigned HOST_WIDE_INT bytes;
10a0d495
JJ
11182 int object_size_type;
11183
5039610b
SL
11184 if (!validate_arg (ptr, POINTER_TYPE)
11185 || !validate_arg (ost, INTEGER_TYPE))
11186 return NULL_TREE;
10a0d495 11187
10a0d495
JJ
11188 STRIP_NOPS (ost);
11189
11190 if (TREE_CODE (ost) != INTEGER_CST
11191 || tree_int_cst_sgn (ost) < 0
11192 || compare_tree_int (ost, 3) > 0)
5039610b 11193 return NULL_TREE;
10a0d495 11194
9439e9a1 11195 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
11196
11197 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11198 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11199 and (size_t) 0 for types 2 and 3. */
11200 if (TREE_SIDE_EFFECTS (ptr))
2ac7cbb5 11201 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10a0d495
JJ
11202
11203 if (TREE_CODE (ptr) == ADDR_EXPR)
88e06841 11204 {
05a64756 11205 compute_builtin_object_size (ptr, object_size_type, &bytes);
807e902e 11206 if (wi::fits_to_tree_p (bytes, size_type_node))
88e06841
AS
11207 return build_int_cstu (size_type_node, bytes);
11208 }
10a0d495
JJ
11209 else if (TREE_CODE (ptr) == SSA_NAME)
11210 {
10a0d495
JJ
11211 /* If object size is not known yet, delay folding until
11212 later. Maybe subsequent passes will help determining
11213 it. */
05a64756
MS
11214 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11215 && wi::fits_to_tree_p (bytes, size_type_node))
88e06841 11216 return build_int_cstu (size_type_node, bytes);
10a0d495
JJ
11217 }
11218
88e06841 11219 return NULL_TREE;
10a0d495
JJ
11220}
11221
903c723b
TC
11222/* Builtins with folding operations that operate on "..." arguments
11223 need special handling; we need to store the arguments in a convenient
11224 data structure before attempting any folding. Fortunately there are
11225 only a few builtins that fall into this category. FNDECL is the
11226 function, EXP is the CALL_EXPR for the call. */
11227
11228static tree
11229fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11230{
11231 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11232 tree ret = NULL_TREE;
11233
11234 switch (fcode)
11235 {
11236 case BUILT_IN_FPCLASSIFY:
11237 ret = fold_builtin_fpclassify (loc, args, nargs);
11238 break;
11239
11240 default:
11241 break;
11242 }
11243 if (ret)
11244 {
11245 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11246 SET_EXPR_LOCATION (ret, loc);
11247 TREE_NO_WARNING (ret) = 1;
11248 return ret;
11249 }
11250 return NULL_TREE;
11251}
11252
000ba23d
KG
11253/* Initialize format string characters in the target charset. */
11254
fef5a0d9 11255bool
000ba23d
KG
11256init_target_chars (void)
11257{
11258 static bool init;
11259 if (!init)
11260 {
11261 target_newline = lang_hooks.to_target_charset ('\n');
11262 target_percent = lang_hooks.to_target_charset ('%');
11263 target_c = lang_hooks.to_target_charset ('c');
11264 target_s = lang_hooks.to_target_charset ('s');
11265 if (target_newline == 0 || target_percent == 0 || target_c == 0
11266 || target_s == 0)
11267 return false;
11268
11269 target_percent_c[0] = target_percent;
11270 target_percent_c[1] = target_c;
11271 target_percent_c[2] = '\0';
11272
11273 target_percent_s[0] = target_percent;
11274 target_percent_s[1] = target_s;
11275 target_percent_s[2] = '\0';
11276
11277 target_percent_s_newline[0] = target_percent;
11278 target_percent_s_newline[1] = target_s;
11279 target_percent_s_newline[2] = target_newline;
11280 target_percent_s_newline[3] = '\0';
c22cacf3 11281
000ba23d
KG
11282 init = true;
11283 }
11284 return true;
11285}
1f3f1f68 11286
4413d881
KG
11287/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11288 and no overflow/underflow occurred. INEXACT is true if M was not
2f8e468b 11289 exactly calculated. TYPE is the tree type for the result. This
4413d881
KG
11290 function assumes that you cleared the MPFR flags and then
11291 calculated M to see if anything subsequently set a flag prior to
11292 entering this function. Return NULL_TREE if any checks fail. */
11293
11294static tree
62e5bf5d 11295do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
4413d881
KG
11296{
11297 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11298 overflow/underflow occurred. If -frounding-math, proceed iff the
11299 result of calling FUNC was exact. */
62e5bf5d 11300 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
4413d881
KG
11301 && (!flag_rounding_math || !inexact))
11302 {
11303 REAL_VALUE_TYPE rr;
11304
90ca6847 11305 real_from_mpfr (&rr, m, type, MPFR_RNDN);
4413d881
KG
11306 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11307 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11308 but the mpft_t is not, then we underflowed in the
11309 conversion. */
4c8c70e0 11310 if (real_isfinite (&rr)
4413d881
KG
11311 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11312 {
11313 REAL_VALUE_TYPE rmode;
11314
11315 real_convert (&rmode, TYPE_MODE (type), &rr);
11316 /* Proceed iff the specified mode can hold the value. */
11317 if (real_identical (&rmode, &rr))
11318 return build_real (type, rmode);
11319 }
11320 }
11321 return NULL_TREE;
11322}
11323
c128599a
KG
11324/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11325 number and no overflow/underflow occurred. INEXACT is true if M
11326 was not exactly calculated. TYPE is the tree type for the result.
11327 This function assumes that you cleared the MPFR flags and then
11328 calculated M to see if anything subsequently set a flag prior to
ca75b926
KG
11329 entering this function. Return NULL_TREE if any checks fail, if
11330 FORCE_CONVERT is true, then bypass the checks. */
c128599a
KG
11331
11332static tree
ca75b926 11333do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
c128599a
KG
11334{
11335 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11336 overflow/underflow occurred. If -frounding-math, proceed iff the
11337 result of calling FUNC was exact. */
ca75b926
KG
11338 if (force_convert
11339 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11340 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11341 && (!flag_rounding_math || !inexact)))
c128599a
KG
11342 {
11343 REAL_VALUE_TYPE re, im;
11344
90ca6847
TB
11345 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11346 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
c128599a
KG
11347 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11348 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11349 but the mpft_t is not, then we underflowed in the
11350 conversion. */
ca75b926
KG
11351 if (force_convert
11352 || (real_isfinite (&re) && real_isfinite (&im)
11353 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11354 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
c128599a
KG
11355 {
11356 REAL_VALUE_TYPE re_mode, im_mode;
11357
11358 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11359 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11360 /* Proceed iff the specified mode can hold the value. */
ca75b926
KG
11361 if (force_convert
11362 || (real_identical (&re_mode, &re)
11363 && real_identical (&im_mode, &im)))
c128599a
KG
11364 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11365 build_real (TREE_TYPE (type), im_mode));
11366 }
11367 }
11368 return NULL_TREE;
11369}
c128599a 11370
ea91f957
KG
11371/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11372 the pointer *(ARG_QUO) and return the result. The type is taken
11373 from the type of ARG0 and is used for setting the precision of the
11374 calculation and results. */
11375
11376static tree
11377do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11378{
11379 tree const type = TREE_TYPE (arg0);
11380 tree result = NULL_TREE;
b8698a0f 11381
ea91f957
KG
11382 STRIP_NOPS (arg0);
11383 STRIP_NOPS (arg1);
b8698a0f 11384
ea91f957
KG
11385 /* To proceed, MPFR must exactly represent the target floating point
11386 format, which only happens when the target base equals two. */
11387 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11388 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11389 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11390 {
11391 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11392 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11393
4c8c70e0 11394 if (real_isfinite (ra0) && real_isfinite (ra1))
ea91f957 11395 {
3e479de3
UW
11396 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11397 const int prec = fmt->p;
90ca6847 11398 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
ea91f957
KG
11399 tree result_rem;
11400 long integer_quo;
11401 mpfr_t m0, m1;
11402
11403 mpfr_inits2 (prec, m0, m1, NULL);
90ca6847
TB
11404 mpfr_from_real (m0, ra0, MPFR_RNDN);
11405 mpfr_from_real (m1, ra1, MPFR_RNDN);
ea91f957 11406 mpfr_clear_flags ();
3e479de3 11407 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
ea91f957
KG
11408 /* Remquo is independent of the rounding mode, so pass
11409 inexact=0 to do_mpfr_ckconv(). */
11410 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11411 mpfr_clears (m0, m1, NULL);
11412 if (result_rem)
11413 {
11414 /* MPFR calculates quo in the host's long so it may
11415 return more bits in quo than the target int can hold
11416 if sizeof(host long) > sizeof(target int). This can
11417 happen even for native compilers in LP64 mode. In
11418 these cases, modulo the quo value with the largest
11419 number that the target int can hold while leaving one
11420 bit for the sign. */
11421 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11422 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11423
11424 /* Dereference the quo pointer argument. */
11425 arg_quo = build_fold_indirect_ref (arg_quo);
11426 /* Proceed iff a valid pointer type was passed in. */
11427 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11428 {
11429 /* Set the value. */
45a2c477
RG
11430 tree result_quo
11431 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11432 build_int_cst (TREE_TYPE (arg_quo),
11433 integer_quo));
ea91f957
KG
11434 TREE_SIDE_EFFECTS (result_quo) = 1;
11435 /* Combine the quo assignment with the rem. */
11436 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11437 result_quo, result_rem));
11438 }
11439 }
11440 }
11441 }
11442 return result;
11443}
752b7d38
KG
11444
11445/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11446 resulting value as a tree with type TYPE. The mpfr precision is
11447 set to the precision of TYPE. We assume that this mpfr function
11448 returns zero if the result could be calculated exactly within the
11449 requested precision. In addition, the integer pointer represented
11450 by ARG_SG will be dereferenced and set to the appropriate signgam
11451 (-1,1) value. */
11452
11453static tree
11454do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11455{
11456 tree result = NULL_TREE;
11457
11458 STRIP_NOPS (arg);
b8698a0f 11459
752b7d38
KG
11460 /* To proceed, MPFR must exactly represent the target floating point
11461 format, which only happens when the target base equals two. Also
11462 verify ARG is a constant and that ARG_SG is an int pointer. */
11463 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11464 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11465 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11466 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11467 {
11468 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11469
11470 /* In addition to NaN and Inf, the argument cannot be zero or a
11471 negative integer. */
4c8c70e0 11472 if (real_isfinite (ra)
752b7d38 11473 && ra->cl != rvc_zero
c3284718 11474 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
752b7d38 11475 {
3e479de3
UW
11476 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11477 const int prec = fmt->p;
90ca6847 11478 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
752b7d38
KG
11479 int inexact, sg;
11480 mpfr_t m;
11481 tree result_lg;
11482
11483 mpfr_init2 (m, prec);
90ca6847 11484 mpfr_from_real (m, ra, MPFR_RNDN);
752b7d38 11485 mpfr_clear_flags ();
3e479de3 11486 inexact = mpfr_lgamma (m, &sg, m, rnd);
752b7d38
KG
11487 result_lg = do_mpfr_ckconv (m, type, inexact);
11488 mpfr_clear (m);
11489 if (result_lg)
11490 {
11491 tree result_sg;
11492
11493 /* Dereference the arg_sg pointer argument. */
11494 arg_sg = build_fold_indirect_ref (arg_sg);
11495 /* Assign the signgam value into *arg_sg. */
11496 result_sg = fold_build2 (MODIFY_EXPR,
11497 TREE_TYPE (arg_sg), arg_sg,
45a2c477 11498 build_int_cst (TREE_TYPE (arg_sg), sg));
752b7d38
KG
11499 TREE_SIDE_EFFECTS (result_sg) = 1;
11500 /* Combine the signgam assignment with the lgamma result. */
11501 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11502 result_sg, result_lg));
11503 }
11504 }
11505 }
11506
11507 return result;
11508}
726a989a 11509
a41d064d
KG
11510/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11511 mpc function FUNC on it and return the resulting value as a tree
11512 with type TYPE. The mpfr precision is set to the precision of
11513 TYPE. We assume that function FUNC returns zero if the result
ca75b926
KG
11514 could be calculated exactly within the requested precision. If
11515 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11516 in the arguments and/or results. */
a41d064d 11517
2f440f6a 11518tree
ca75b926 11519do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
a41d064d
KG
11520 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11521{
11522 tree result = NULL_TREE;
b8698a0f 11523
a41d064d
KG
11524 STRIP_NOPS (arg0);
11525 STRIP_NOPS (arg1);
11526
11527 /* To proceed, MPFR must exactly represent the target floating point
11528 format, which only happens when the target base equals two. */
11529 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11530 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11531 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11532 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11533 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11534 {
11535 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11536 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11537 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11538 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11539
ca75b926
KG
11540 if (do_nonfinite
11541 || (real_isfinite (re0) && real_isfinite (im0)
11542 && real_isfinite (re1) && real_isfinite (im1)))
a41d064d
KG
11543 {
11544 const struct real_format *const fmt =
11545 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11546 const int prec = fmt->p;
90ca6847
TB
11547 const mpfr_rnd_t rnd = fmt->round_towards_zero
11548 ? MPFR_RNDZ : MPFR_RNDN;
a41d064d
KG
11549 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11550 int inexact;
11551 mpc_t m0, m1;
b8698a0f 11552
a41d064d
KG
11553 mpc_init2 (m0, prec);
11554 mpc_init2 (m1, prec);
c3284718
RS
11555 mpfr_from_real (mpc_realref (m0), re0, rnd);
11556 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11557 mpfr_from_real (mpc_realref (m1), re1, rnd);
11558 mpfr_from_real (mpc_imagref (m1), im1, rnd);
a41d064d
KG
11559 mpfr_clear_flags ();
11560 inexact = func (m0, m0, m1, crnd);
ca75b926 11561 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
a41d064d
KG
11562 mpc_clear (m0);
11563 mpc_clear (m1);
11564 }
11565 }
11566
11567 return result;
11568}
c128599a 11569
726a989a
RB
11570/* A wrapper function for builtin folding that prevents warnings for
11571 "statement without effect" and the like, caused by removing the
11572 call node earlier than the warning is generated. */
11573
11574tree
538dd0b7 11575fold_call_stmt (gcall *stmt, bool ignore)
726a989a
RB
11576{
11577 tree ret = NULL_TREE;
11578 tree fndecl = gimple_call_fndecl (stmt);
db3927fb 11579 location_t loc = gimple_location (stmt);
3d78e008 11580 if (fndecl && fndecl_built_in_p (fndecl)
726a989a
RB
11581 && !gimple_call_va_arg_pack_p (stmt))
11582 {
11583 int nargs = gimple_call_num_args (stmt);
8897c9ce
NF
11584 tree *args = (nargs > 0
11585 ? gimple_call_arg_ptr (stmt, 0)
11586 : &error_mark_node);
726a989a 11587
0889e9bc
JJ
11588 if (avoid_folding_inline_builtin (fndecl))
11589 return NULL_TREE;
726a989a
RB
11590 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11591 {
8897c9ce 11592 return targetm.fold_builtin (fndecl, nargs, args, ignore);
726a989a
RB
11593 }
11594 else
11595 {
b5338fb3 11596 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
726a989a
RB
11597 if (ret)
11598 {
11599 /* Propagate location information from original call to
11600 expansion of builtin. Otherwise things like
11601 maybe_emit_chk_warning, that operate on the expansion
11602 of a builtin, will use the wrong location information. */
11603 if (gimple_has_location (stmt))
11604 {
11605 tree realret = ret;
11606 if (TREE_CODE (ret) == NOP_EXPR)
11607 realret = TREE_OPERAND (ret, 0);
11608 if (CAN_HAVE_LOCATION_P (realret)
11609 && !EXPR_HAS_LOCATION (realret))
db3927fb 11610 SET_EXPR_LOCATION (realret, loc);
726a989a
RB
11611 return realret;
11612 }
11613 return ret;
11614 }
11615 }
11616 }
11617 return NULL_TREE;
11618}
d7f09764 11619
e79983f4 11620/* Look up the function in builtin_decl that corresponds to DECL
d7f09764
DN
11621 and set ASMSPEC as its user assembler name. DECL must be a
11622 function decl that declares a builtin. */
11623
11624void
11625set_builtin_user_assembler_name (tree decl, const char *asmspec)
11626{
3d78e008 11627 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
d7f09764
DN
11628 && asmspec != 0);
11629
ee516de9 11630 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
ce835863 11631 set_user_assembler_name (builtin, asmspec);
ee516de9
EB
11632
11633 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11634 && INT_TYPE_SIZE < BITS_PER_WORD)
d7f09764 11635 {
fffbab82 11636 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
ee516de9 11637 set_user_assembler_libfunc ("ffs", asmspec);
fffbab82 11638 set_optab_libfunc (ffs_optab, mode, "ffs");
d7f09764
DN
11639 }
11640}
bec922f0
SL
11641
11642/* Return true if DECL is a builtin that expands to a constant or similarly
11643 simple code. */
11644bool
11645is_simple_builtin (tree decl)
11646{
3d78e008 11647 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
bec922f0
SL
11648 switch (DECL_FUNCTION_CODE (decl))
11649 {
11650 /* Builtins that expand to constants. */
11651 case BUILT_IN_CONSTANT_P:
11652 case BUILT_IN_EXPECT:
11653 case BUILT_IN_OBJECT_SIZE:
11654 case BUILT_IN_UNREACHABLE:
11655 /* Simple register moves or loads from stack. */
45d439ac 11656 case BUILT_IN_ASSUME_ALIGNED:
bec922f0
SL
11657 case BUILT_IN_RETURN_ADDRESS:
11658 case BUILT_IN_EXTRACT_RETURN_ADDR:
11659 case BUILT_IN_FROB_RETURN_ADDR:
11660 case BUILT_IN_RETURN:
11661 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11662 case BUILT_IN_FRAME_ADDRESS:
11663 case BUILT_IN_VA_END:
11664 case BUILT_IN_STACK_SAVE:
11665 case BUILT_IN_STACK_RESTORE:
11666 /* Exception state returns or moves registers around. */
11667 case BUILT_IN_EH_FILTER:
11668 case BUILT_IN_EH_POINTER:
11669 case BUILT_IN_EH_COPY_VALUES:
11670 return true;
11671
11672 default:
11673 return false;
11674 }
11675
11676 return false;
11677}
11678
11679/* Return true if DECL is a builtin that is not expensive, i.e., they are
11680 most probably expanded inline into reasonably simple code. This is a
11681 superset of is_simple_builtin. */
11682bool
11683is_inexpensive_builtin (tree decl)
11684{
11685 if (!decl)
11686 return false;
11687 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11688 return true;
11689 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11690 switch (DECL_FUNCTION_CODE (decl))
11691 {
11692 case BUILT_IN_ABS:
9e878cf1 11693 CASE_BUILT_IN_ALLOCA:
ac868f29 11694 case BUILT_IN_BSWAP16:
bec922f0
SL
11695 case BUILT_IN_BSWAP32:
11696 case BUILT_IN_BSWAP64:
11697 case BUILT_IN_CLZ:
11698 case BUILT_IN_CLZIMAX:
11699 case BUILT_IN_CLZL:
11700 case BUILT_IN_CLZLL:
11701 case BUILT_IN_CTZ:
11702 case BUILT_IN_CTZIMAX:
11703 case BUILT_IN_CTZL:
11704 case BUILT_IN_CTZLL:
11705 case BUILT_IN_FFS:
11706 case BUILT_IN_FFSIMAX:
11707 case BUILT_IN_FFSL:
11708 case BUILT_IN_FFSLL:
11709 case BUILT_IN_IMAXABS:
11710 case BUILT_IN_FINITE:
11711 case BUILT_IN_FINITEF:
11712 case BUILT_IN_FINITEL:
11713 case BUILT_IN_FINITED32:
11714 case BUILT_IN_FINITED64:
11715 case BUILT_IN_FINITED128:
11716 case BUILT_IN_FPCLASSIFY:
11717 case BUILT_IN_ISFINITE:
11718 case BUILT_IN_ISINF_SIGN:
11719 case BUILT_IN_ISINF:
11720 case BUILT_IN_ISINFF:
11721 case BUILT_IN_ISINFL:
11722 case BUILT_IN_ISINFD32:
11723 case BUILT_IN_ISINFD64:
11724 case BUILT_IN_ISINFD128:
11725 case BUILT_IN_ISNAN:
11726 case BUILT_IN_ISNANF:
11727 case BUILT_IN_ISNANL:
11728 case BUILT_IN_ISNAND32:
11729 case BUILT_IN_ISNAND64:
11730 case BUILT_IN_ISNAND128:
11731 case BUILT_IN_ISNORMAL:
11732 case BUILT_IN_ISGREATER:
11733 case BUILT_IN_ISGREATEREQUAL:
11734 case BUILT_IN_ISLESS:
11735 case BUILT_IN_ISLESSEQUAL:
11736 case BUILT_IN_ISLESSGREATER:
11737 case BUILT_IN_ISUNORDERED:
11738 case BUILT_IN_VA_ARG_PACK:
11739 case BUILT_IN_VA_ARG_PACK_LEN:
11740 case BUILT_IN_VA_COPY:
11741 case BUILT_IN_TRAP:
11742 case BUILT_IN_SAVEREGS:
11743 case BUILT_IN_POPCOUNTL:
11744 case BUILT_IN_POPCOUNTLL:
11745 case BUILT_IN_POPCOUNTIMAX:
11746 case BUILT_IN_POPCOUNT:
11747 case BUILT_IN_PARITYL:
11748 case BUILT_IN_PARITYLL:
11749 case BUILT_IN_PARITYIMAX:
11750 case BUILT_IN_PARITY:
11751 case BUILT_IN_LABS:
11752 case BUILT_IN_LLABS:
11753 case BUILT_IN_PREFETCH:
41dbbb37 11754 case BUILT_IN_ACC_ON_DEVICE:
bec922f0
SL
11755 return true;
11756
11757 default:
11758 return is_simple_builtin (decl);
11759 }
11760
11761 return false;
11762}
488c6247
ML
11763
11764/* Return true if T is a constant and the value cast to a target char
11765 can be represented by a host char.
11766 Store the casted char constant in *P if so. */
11767
11768bool
11769target_char_cst_p (tree t, char *p)
11770{
11771 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11772 return false;
11773
11774 *p = (char)tree_to_uhwi (t);
11775 return true;
11776}
5747e0c0
XHL
11777
11778/* Return true if the builtin DECL is implemented in a standard library.
11779 Otherwise returns false which doesn't guarantee it is not (thus the list of
11780 handled builtins below may be incomplete). */
11781
11782bool
11783builtin_with_linkage_p (tree decl)
11784{
11785 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11786 switch (DECL_FUNCTION_CODE (decl))
11787 {
11788 CASE_FLT_FN (BUILT_IN_ACOS):
11789 CASE_FLT_FN (BUILT_IN_ACOSH):
11790 CASE_FLT_FN (BUILT_IN_ASIN):
11791 CASE_FLT_FN (BUILT_IN_ASINH):
11792 CASE_FLT_FN (BUILT_IN_ATAN):
11793 CASE_FLT_FN (BUILT_IN_ATANH):
11794 CASE_FLT_FN (BUILT_IN_ATAN2):
11795 CASE_FLT_FN (BUILT_IN_CBRT):
11796 CASE_FLT_FN (BUILT_IN_CEIL):
11797 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11798 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11799 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11800 CASE_FLT_FN (BUILT_IN_COS):
11801 CASE_FLT_FN (BUILT_IN_COSH):
11802 CASE_FLT_FN (BUILT_IN_ERF):
11803 CASE_FLT_FN (BUILT_IN_ERFC):
11804 CASE_FLT_FN (BUILT_IN_EXP):
11805 CASE_FLT_FN (BUILT_IN_EXP2):
11806 CASE_FLT_FN (BUILT_IN_EXPM1):
11807 CASE_FLT_FN (BUILT_IN_FABS):
11808 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11809 CASE_FLT_FN (BUILT_IN_FDIM):
11810 CASE_FLT_FN (BUILT_IN_FLOOR):
11811 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11812 CASE_FLT_FN (BUILT_IN_FMA):
11813 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11814 CASE_FLT_FN (BUILT_IN_FMAX):
11815 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11816 CASE_FLT_FN (BUILT_IN_FMIN):
11817 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11818 CASE_FLT_FN (BUILT_IN_FMOD):
11819 CASE_FLT_FN (BUILT_IN_FREXP):
11820 CASE_FLT_FN (BUILT_IN_HYPOT):
11821 CASE_FLT_FN (BUILT_IN_ILOGB):
11822 CASE_FLT_FN (BUILT_IN_LDEXP):
11823 CASE_FLT_FN (BUILT_IN_LGAMMA):
11824 CASE_FLT_FN (BUILT_IN_LLRINT):
11825 CASE_FLT_FN (BUILT_IN_LLROUND):
11826 CASE_FLT_FN (BUILT_IN_LOG):
11827 CASE_FLT_FN (BUILT_IN_LOG10):
11828 CASE_FLT_FN (BUILT_IN_LOG1P):
11829 CASE_FLT_FN (BUILT_IN_LOG2):
11830 CASE_FLT_FN (BUILT_IN_LOGB):
11831 CASE_FLT_FN (BUILT_IN_LRINT):
11832 CASE_FLT_FN (BUILT_IN_LROUND):
11833 CASE_FLT_FN (BUILT_IN_MODF):
11834 CASE_FLT_FN (BUILT_IN_NAN):
11835 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11836 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11837 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11838 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11839 CASE_FLT_FN (BUILT_IN_POW):
11840 CASE_FLT_FN (BUILT_IN_REMAINDER):
11841 CASE_FLT_FN (BUILT_IN_REMQUO):
11842 CASE_FLT_FN (BUILT_IN_RINT):
11843 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11844 CASE_FLT_FN (BUILT_IN_ROUND):
11845 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11846 CASE_FLT_FN (BUILT_IN_SCALBLN):
11847 CASE_FLT_FN (BUILT_IN_SCALBN):
11848 CASE_FLT_FN (BUILT_IN_SIN):
11849 CASE_FLT_FN (BUILT_IN_SINH):
11850 CASE_FLT_FN (BUILT_IN_SINCOS):
11851 CASE_FLT_FN (BUILT_IN_SQRT):
11852 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11853 CASE_FLT_FN (BUILT_IN_TAN):
11854 CASE_FLT_FN (BUILT_IN_TANH):
11855 CASE_FLT_FN (BUILT_IN_TGAMMA):
11856 CASE_FLT_FN (BUILT_IN_TRUNC):
11857 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11858 return true;
11859 default:
11860 break;
11861 }
11862 return false;
11863}