]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
[Ada] Update headers
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
28f4ec01 1/* Expand builtin functions.
8d9254fc 2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
28f4ec01 3
1322177d 4This file is part of GCC.
28f4ec01 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
28f4ec01 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
28f4ec01
BS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
28f4ec01 19
25ab3b0a
RB
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
28f4ec01
BS
24#include "config.h"
25#include "system.h"
4977bab6 26#include "coretypes.h"
c7131fb2 27#include "backend.h"
957060b5
AM
28#include "target.h"
29#include "rtl.h"
c7131fb2 30#include "tree.h"
e73cf9a2 31#include "memmodel.h"
c7131fb2 32#include "gimple.h"
957060b5
AM
33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
f90aa46c 36#include "tree-vrp.h"
957060b5
AM
37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
957060b5
AM
40#include "emit-rtl.h"
41#include "recog.h"
957060b5 42#include "diagnostic-core.h"
40e23961 43#include "alias.h"
40e23961 44#include "fold-const.h"
5c1a2e63 45#include "fold-const-call.h"
cc8bea0a 46#include "gimple-ssa-warn-restrict.h"
d8a2d370
DN
47#include "stor-layout.h"
48#include "calls.h"
49#include "varasm.h"
50#include "tree-object-size.h"
ef29b12c 51#include "tree-ssa-strlen.h"
d49b6e1e 52#include "realmpfr.h"
60393bbc 53#include "cfgrtl.h"
28f4ec01 54#include "except.h"
36566b39
PK
55#include "dojump.h"
56#include "explow.h"
36566b39 57#include "stmt.h"
28f4ec01 58#include "expr.h"
e78d8e51 59#include "libfuncs.h"
28f4ec01
BS
60#include "output.h"
61#include "typeclass.h"
ab393bf1 62#include "langhooks.h"
079a182e 63#include "value-prof.h"
fa19795e 64#include "builtins.h"
314e6352
ML
65#include "stringpool.h"
66#include "attribs.h"
bdea98ca 67#include "asan.h"
686ee971 68#include "internal-fn.h"
b03ff92e 69#include "case-cfn-macros.h"
44a845ca 70#include "gimple-fold.h"
ee92e7ba 71#include "intl.h"
7365279f 72#include "file-prefix-map.h" /* remap_macro_filename() */
1f62d637
TV
73#include "gomp-constants.h"
74#include "omp-general.h"
464969eb 75#include "tree-dfa.h"
81f5094d 76
fa19795e
RS
77struct target_builtins default_target_builtins;
78#if SWITCHABLE_TARGET
79struct target_builtins *this_target_builtins = &default_target_builtins;
80#endif
81
9df2c88c 82/* Define the names of the builtin function types and codes. */
5e351e96 83const char *const built_in_class_names[BUILT_IN_LAST]
9df2c88c
RK
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85
c6a912da 86#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
7e7e470f 87const char * built_in_names[(int) END_BUILTINS] =
cb1072f4
KG
88{
89#include "builtins.def"
90};
9df2c88c 91
cbf5d0e7 92/* Setup an array of builtin_info_type, make sure each element decl is
3ff5f682 93 initialized to NULL_TREE. */
cbf5d0e7 94builtin_info_type builtin_info[(int)END_BUILTINS];
3ff5f682 95
4e7d7b3d
JJ
96/* Non-zero if __builtin_constant_p should be folded right away. */
97bool force_folding_builtin_constant_p;
98
4682ae04 99static int target_char_cast (tree, char *);
435bb2a1 100static rtx get_memory_rtx (tree, tree);
4682ae04
AJ
101static int apply_args_size (void);
102static int apply_result_size (void);
4682ae04 103static rtx result_vector (int, rtx);
4682ae04
AJ
104static void expand_builtin_prefetch (tree);
105static rtx expand_builtin_apply_args (void);
106static rtx expand_builtin_apply_args_1 (void);
107static rtx expand_builtin_apply (rtx, rtx, rtx);
108static void expand_builtin_return (rtx);
109static enum type_class type_to_class (tree);
110static rtx expand_builtin_classify_type (tree);
6c7cf1f0 111static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
1b1562a5 112static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
4359dc2a 113static rtx expand_builtin_interclass_mathfn (tree, rtx);
403e54f0 114static rtx expand_builtin_sincos (tree);
4359dc2a 115static rtx expand_builtin_cexpi (tree, rtx);
1856c8dc
JH
116static rtx expand_builtin_int_roundingfn (tree, rtx);
117static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
8870e212 118static rtx expand_builtin_next_arg (void);
4682ae04
AJ
119static rtx expand_builtin_va_start (tree);
120static rtx expand_builtin_va_end (tree);
121static rtx expand_builtin_va_copy (tree);
523a59ff 122static rtx inline_expand_builtin_string_cmp (tree, rtx);
44e10129 123static rtx expand_builtin_strcmp (tree, rtx);
ef4bddc2 124static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
095a2d76 125static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
d9c5a8b9 126static rtx expand_builtin_memchr (tree, rtx);
44e10129 127static rtx expand_builtin_memcpy (tree, rtx);
671a00ee 128static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
2ff5ffb6 129 rtx target, tree exp,
03a9b90a
AS
130 memop_ret retmode,
131 bool might_overlap);
e50d56a5 132static rtx expand_builtin_memmove (tree, rtx);
671a00ee 133static rtx expand_builtin_mempcpy (tree, rtx);
2ff5ffb6 134static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
b5338fb3 135static rtx expand_builtin_strcat (tree);
44e10129 136static rtx expand_builtin_strcpy (tree, rtx);
e08341bb 137static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
ef4bddc2 138static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
e50d56a5 139static rtx expand_builtin_stpncpy (tree, rtx);
ee92e7ba 140static rtx expand_builtin_strncat (tree, rtx);
44e10129 141static rtx expand_builtin_strncpy (tree, rtx);
095a2d76 142static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
ef4bddc2
RS
143static rtx expand_builtin_memset (tree, rtx, machine_mode);
144static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
4682ae04 145static rtx expand_builtin_bzero (tree);
ef4bddc2 146static rtx expand_builtin_strlen (tree, rtx, machine_mode);
781ff3d8 147static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
b7e52782 148static rtx expand_builtin_alloca (tree);
ef4bddc2 149static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
4682ae04 150static rtx expand_builtin_frame_address (tree, tree);
db3927fb 151static tree stabilize_va_list_loc (location_t, tree, int);
4682ae04 152static rtx expand_builtin_expect (tree, rtx);
1e9168b2 153static rtx expand_builtin_expect_with_probability (tree, rtx);
4682ae04
AJ
154static tree fold_builtin_constant_p (tree);
155static tree fold_builtin_classify_type (tree);
ab996409 156static tree fold_builtin_strlen (location_t, tree, tree);
db3927fb 157static tree fold_builtin_inf (location_t, tree, int);
db3927fb 158static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
0dba7960 159static bool validate_arg (const_tree, enum tree_code code);
4682ae04 160static rtx expand_builtin_fabs (tree, rtx, rtx);
ef79730c 161static rtx expand_builtin_signbit (tree, rtx);
db3927fb 162static tree fold_builtin_memcmp (location_t, tree, tree, tree);
db3927fb
AH
163static tree fold_builtin_isascii (location_t, tree);
164static tree fold_builtin_toascii (location_t, tree);
165static tree fold_builtin_isdigit (location_t, tree);
166static tree fold_builtin_fabs (location_t, tree, tree);
167static tree fold_builtin_abs (location_t, tree, tree);
168static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
a35da91f 169 enum tree_code);
903c723b 170static tree fold_builtin_varargs (location_t, tree, tree*, int);
db3927fb 171
b5338fb3
MS
172static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
173static tree fold_builtin_strspn (location_t, tree, tree, tree);
174static tree fold_builtin_strcspn (location_t, tree, tree, tree);
6de9cd9a 175
10a0d495 176static rtx expand_builtin_object_size (tree);
ef4bddc2 177static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
10a0d495
JJ
178 enum built_in_function);
179static void maybe_emit_chk_warning (tree, enum built_in_function);
180static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
f9555f40 181static void maybe_emit_free_warning (tree);
5039610b 182static tree fold_builtin_object_size (tree, tree);
000ba23d 183
ad03a744 184unsigned HOST_WIDE_INT target_newline;
fef5a0d9 185unsigned HOST_WIDE_INT target_percent;
000ba23d
KG
186static unsigned HOST_WIDE_INT target_c;
187static unsigned HOST_WIDE_INT target_s;
edd7ae68 188char target_percent_c[3];
fef5a0d9 189char target_percent_s[3];
ad03a744 190char target_percent_s_newline[4];
ea91f957 191static tree do_mpfr_remquo (tree, tree, tree);
752b7d38 192static tree do_mpfr_lgamma_r (tree, tree, tree);
86951993 193static void expand_builtin_sync_synchronize (void);
10a0d495 194
d7f09764
DN
195/* Return true if NAME starts with __builtin_ or __sync_. */
196
0c1e7e42 197static bool
bbf7ce11 198is_builtin_name (const char *name)
48ae6c13 199{
48ae6c13
RH
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
86951993
AM
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
48ae6c13
RH
206 return false;
207}
6de9cd9a 208
bbf7ce11
RAE
209/* Return true if NODE should be considered for inline expansion regardless
210 of the optimization level. This means whenever a function is invoked with
211 its "internal" name, which normally contains the prefix "__builtin". */
212
4cfe7a6c 213bool
bbf7ce11
RAE
214called_as_built_in (tree node)
215{
216 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
217 we want the name used to call the function, not the name it
218 will have. */
219 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
220 return is_builtin_name (name);
221}
222
644ffefd
MJ
223/* Compute values M and N such that M divides (address of EXP - N) and such
224 that N < M. If these numbers can be determined, store M in alignp and N in
225 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
226 *alignp and any bit-offset to *bitposp.
73f6eabc
RS
227
228 Note that the address (and thus the alignment) computed here is based
229 on the address to which a symbol resolves, whereas DECL_ALIGN is based
230 on the address at which an object is actually located. These two
231 addresses are not always the same. For example, on ARM targets,
232 the address &foo of a Thumb function foo() has the lowest bit set,
b0f4a35f 233 whereas foo() itself starts on an even address.
df96b059 234
b0f4a35f
RG
235 If ADDR_P is true we are taking the address of the memory reference EXP
236 and thus cannot rely on the access taking place. */
237
238static bool
239get_object_alignment_2 (tree exp, unsigned int *alignp,
240 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
df96b059 241{
7df9b6f1 242 poly_int64 bitsize, bitpos;
e80c2726 243 tree offset;
ef4bddc2 244 machine_mode mode;
ee45a32d 245 int unsignedp, reversep, volatilep;
eae76e53 246 unsigned int align = BITS_PER_UNIT;
644ffefd 247 bool known_alignment = false;
df96b059 248
e80c2726
RG
249 /* Get the innermost object and the constant (bitpos) and possibly
250 variable (offset) offset of the access. */
ee45a32d 251 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
25b75a48 252 &unsignedp, &reversep, &volatilep);
e80c2726
RG
253
254 /* Extract alignment information from the innermost object and
255 possibly adjust bitpos and offset. */
b0f4a35f 256 if (TREE_CODE (exp) == FUNCTION_DECL)
73f6eabc 257 {
b0f4a35f
RG
258 /* Function addresses can encode extra information besides their
259 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
260 allows the low bit to be used as a virtual bit, we know
261 that the address itself must be at least 2-byte aligned. */
262 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
263 align = 2 * BITS_PER_UNIT;
73f6eabc 264 }
b0f4a35f
RG
265 else if (TREE_CODE (exp) == LABEL_DECL)
266 ;
267 else if (TREE_CODE (exp) == CONST_DECL)
e80c2726 268 {
b0f4a35f
RG
269 /* The alignment of a CONST_DECL is determined by its initializer. */
270 exp = DECL_INITIAL (exp);
e80c2726 271 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 272 if (CONSTANT_CLASS_P (exp))
58e17cf8 273 align = targetm.constant_alignment (exp, align);
6b00e42d 274
b0f4a35f 275 known_alignment = true;
e80c2726 276 }
b0f4a35f 277 else if (DECL_P (exp))
644ffefd 278 {
b0f4a35f 279 align = DECL_ALIGN (exp);
644ffefd 280 known_alignment = true;
644ffefd 281 }
b0f4a35f
RG
282 else if (TREE_CODE (exp) == INDIRECT_REF
283 || TREE_CODE (exp) == MEM_REF
284 || TREE_CODE (exp) == TARGET_MEM_REF)
e80c2726
RG
285 {
286 tree addr = TREE_OPERAND (exp, 0);
644ffefd
MJ
287 unsigned ptr_align;
288 unsigned HOST_WIDE_INT ptr_bitpos;
4ceae7e9 289 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
644ffefd 290
4ceae7e9 291 /* If the address is explicitely aligned, handle that. */
e80c2726
RG
292 if (TREE_CODE (addr) == BIT_AND_EXPR
293 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
294 {
4ceae7e9
RB
295 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
296 ptr_bitmask *= BITS_PER_UNIT;
146ec50f 297 align = least_bit_hwi (ptr_bitmask);
e80c2726
RG
298 addr = TREE_OPERAND (addr, 0);
299 }
644ffefd 300
b0f4a35f
RG
301 known_alignment
302 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
b0f4a35f
RG
303 align = MAX (ptr_align, align);
304
4ceae7e9
RB
305 /* Re-apply explicit alignment to the bitpos. */
306 ptr_bitpos &= ptr_bitmask;
307
3c82efd9
RG
308 /* The alignment of the pointer operand in a TARGET_MEM_REF
309 has to take the variable offset parts into account. */
b0f4a35f 310 if (TREE_CODE (exp) == TARGET_MEM_REF)
1be38ccb 311 {
b0f4a35f
RG
312 if (TMR_INDEX (exp))
313 {
314 unsigned HOST_WIDE_INT step = 1;
315 if (TMR_STEP (exp))
316 step = TREE_INT_CST_LOW (TMR_STEP (exp));
146ec50f 317 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
b0f4a35f
RG
318 }
319 if (TMR_INDEX2 (exp))
320 align = BITS_PER_UNIT;
321 known_alignment = false;
1be38ccb 322 }
644ffefd 323
b0f4a35f
RG
324 /* When EXP is an actual memory reference then we can use
325 TYPE_ALIGN of a pointer indirection to derive alignment.
326 Do so only if get_pointer_alignment_1 did not reveal absolute
3c82efd9
RG
327 alignment knowledge and if using that alignment would
328 improve the situation. */
a4cf4b64 329 unsigned int talign;
3c82efd9 330 if (!addr_p && !known_alignment
a4cf4b64
RB
331 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
332 && talign > align)
333 align = talign;
3c82efd9
RG
334 else
335 {
336 /* Else adjust bitpos accordingly. */
337 bitpos += ptr_bitpos;
338 if (TREE_CODE (exp) == MEM_REF
339 || TREE_CODE (exp) == TARGET_MEM_REF)
aca52e6f 340 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
3c82efd9 341 }
e80c2726 342 }
b0f4a35f 343 else if (TREE_CODE (exp) == STRING_CST)
1be38ccb 344 {
b0f4a35f
RG
345 /* STRING_CST are the only constant objects we allow to be not
346 wrapped inside a CONST_DECL. */
347 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 348 if (CONSTANT_CLASS_P (exp))
58e17cf8 349 align = targetm.constant_alignment (exp, align);
6b00e42d 350
b0f4a35f 351 known_alignment = true;
e80c2726 352 }
e80c2726
RG
353
354 /* If there is a non-constant offset part extract the maximum
355 alignment that can prevail. */
eae76e53 356 if (offset)
e80c2726 357 {
e75fde1a 358 unsigned int trailing_zeros = tree_ctz (offset);
eae76e53 359 if (trailing_zeros < HOST_BITS_PER_INT)
e80c2726 360 {
eae76e53
JJ
361 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
362 if (inner)
363 align = MIN (align, inner);
e80c2726 364 }
e80c2726
RG
365 }
366
7df9b6f1
RS
367 /* Account for the alignment of runtime coefficients, so that the constant
368 bitpos is guaranteed to be accurate. */
369 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
370 if (alt_align != 0 && alt_align < align)
371 {
372 align = alt_align;
373 known_alignment = false;
374 }
375
b0f4a35f 376 *alignp = align;
7df9b6f1 377 *bitposp = bitpos.coeffs[0] & (align - 1);
644ffefd 378 return known_alignment;
daade206
RG
379}
380
b0f4a35f
RG
381/* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385
386bool
387get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
389{
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
391}
392
0eb77834 393/* Return the alignment in bits of EXP, an object. */
daade206
RG
394
395unsigned int
0eb77834 396get_object_alignment (tree exp)
daade206
RG
397{
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
400
644ffefd 401 get_object_alignment_1 (exp, &align, &bitpos);
daade206 402
e80c2726
RG
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
405
406 if (bitpos != 0)
146ec50f 407 align = least_bit_hwi (bitpos);
0eb77834 408 return align;
df96b059
JJ
409}
410
644ffefd
MJ
411/* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
b0f4a35f
RG
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
28f4ec01 415
644ffefd 416 If EXP is not a pointer, false is returned too. */
28f4ec01 417
644ffefd
MJ
418bool
419get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
28f4ec01 421{
1be38ccb 422 STRIP_NOPS (exp);
6026b73e 423
1be38ccb 424 if (TREE_CODE (exp) == ADDR_EXPR)
b0f4a35f
RG
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
5fa79de8
RB
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 {
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
436 {
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
439 {
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
443 }
444 }
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
448 }
1be38ccb
RG
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
28f4ec01 451 {
644ffefd 452 unsigned int ptr_align, ptr_misalign;
1be38ccb 453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
644ffefd
MJ
454
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 {
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
5505978a
RB
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
b0f4a35f 463 /* We cannot really tell whether this result is an approximation. */
5f9a167b 464 return false;
644ffefd
MJ
465 }
466 else
87c0fb4b
RG
467 {
468 *bitposp = 0;
644ffefd
MJ
469 *alignp = BITS_PER_UNIT;
470 return false;
87c0fb4b 471 }
28f4ec01 472 }
44fabee4
RG
473 else if (TREE_CODE (exp) == INTEGER_CST)
474 {
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
479 }
1be38ccb 480
87c0fb4b 481 *bitposp = 0;
644ffefd
MJ
482 *alignp = BITS_PER_UNIT;
483 return false;
28f4ec01
BS
484}
485
87c0fb4b
RG
486/* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
492
493unsigned int
494get_pointer_alignment (tree exp)
495{
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
644ffefd
MJ
498
499 get_pointer_alignment_1 (exp, &align, &bitpos);
87c0fb4b
RG
500
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
503
504 if (bitpos != 0)
146ec50f 505 align = least_bit_hwi (bitpos);
87c0fb4b
RG
506
507 return align;
508}
509
bfb9bd47 510/* Return the number of leading non-zero elements in the sequence
1eb4547b
MS
511 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
512 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
513
bfb9bd47 514unsigned
1eb4547b
MS
515string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
516{
517 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
518
519 unsigned n;
520
521 if (eltsize == 1)
522 {
523 /* Optimize the common case of plain char. */
524 for (n = 0; n < maxelts; n++)
525 {
526 const char *elt = (const char*) ptr + n;
527 if (!*elt)
528 break;
529 }
530 }
531 else
532 {
533 for (n = 0; n < maxelts; n++)
534 {
535 const char *elt = (const char*) ptr + n * eltsize;
536 if (!memcmp (elt, "\0\0\0\0", eltsize))
537 break;
538 }
539 }
540 return n;
541}
542
6ab24ea8
MS
543/* For a call at LOC to a function FN that expects a string in the argument
544 ARG, issue a diagnostic due to it being a called with an argument
545 declared at NONSTR that is a character array with no terminating NUL. */
546
547void
548warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
549{
550 if (TREE_NO_WARNING (arg))
551 return;
552
553 loc = expansion_point_location_if_in_system_header (loc);
554
555 if (warning_at (loc, OPT_Wstringop_overflow_,
556 "%qs argument missing terminating nul", fn))
557 {
558 inform (DECL_SOURCE_LOCATION (decl),
559 "referenced argument declared here");
560 TREE_NO_WARNING (arg) = 1;
561 }
562}
563
b5338fb3
MS
564/* For a call EXPR (which may be null) that expects a string argument
565 and SRC as the argument, returns false if SRC is a character array
566 with no terminating NUL. When nonnull, BOUND is the number of
567 characters in which to expect the terminating NUL.
568 When EXPR is nonnull also issues a warning. */
569
570bool
571check_nul_terminated_array (tree expr, tree src, tree bound /* = NULL_TREE */)
572{
573 tree size;
574 bool exact;
575 tree nonstr = unterminated_array (src, &size, &exact);
576 if (!nonstr)
577 return true;
578
579 /* NONSTR refers to the non-nul terminated constant array and SIZE
580 is the constant size of the array in bytes. EXACT is true when
581 SIZE is exact. */
582
583 if (bound)
584 {
585 wide_int min, max;
586 if (TREE_CODE (bound) == INTEGER_CST)
587 min = max = wi::to_wide (bound);
588 else
589 {
590 value_range_kind rng = get_range_info (bound, &min, &max);
591 if (rng != VR_RANGE)
592 return true;
593 }
594
595 if (wi::leu_p (min, wi::to_wide (size)))
596 return true;
597 }
598
599 if (expr && !TREE_NO_WARNING (expr))
600 {
601 tree fndecl = get_callee_fndecl (expr);
602 const char *fname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
603 warn_string_no_nul (EXPR_LOCATION (expr), fname, src, nonstr);
604 }
605
606 return false;
607}
608
e08341bb
MS
609/* If EXP refers to an unterminated constant character array return
610 the declaration of the object of which the array is a member or
6c4aa5f6
MS
611 element and if SIZE is not null, set *SIZE to the size of
612 the unterminated array and set *EXACT if the size is exact or
613 clear it otherwise. Otherwise return null. */
e08341bb 614
01b0acb7 615tree
6c4aa5f6 616unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
e08341bb 617{
6c4aa5f6
MS
618 /* C_STRLEN will return NULL and set DECL in the info
619 structure if EXP references a unterminated array. */
e09aa5bd
MS
620 c_strlen_data lendata = { };
621 tree len = c_strlen (exp, 1, &lendata);
b71bbbe2 622 if (len == NULL_TREE && lendata.minlen && lendata.decl)
6c4aa5f6
MS
623 {
624 if (size)
625 {
b71bbbe2 626 len = lendata.minlen;
e09aa5bd 627 if (lendata.off)
6c4aa5f6 628 {
e09aa5bd
MS
629 /* Constant offsets are already accounted for in LENDATA.MINLEN,
630 but not in a SSA_NAME + CST expression. */
631 if (TREE_CODE (lendata.off) == INTEGER_CST)
6c4aa5f6 632 *exact = true;
e09aa5bd
MS
633 else if (TREE_CODE (lendata.off) == PLUS_EXPR
634 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
6c4aa5f6
MS
635 {
636 /* Subtract the offset from the size of the array. */
637 *exact = false;
e09aa5bd 638 tree temp = TREE_OPERAND (lendata.off, 1);
6c4aa5f6
MS
639 temp = fold_convert (ssizetype, temp);
640 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
641 }
642 else
643 *exact = false;
644 }
645 else
646 *exact = true;
647
648 *size = len;
649 }
e09aa5bd 650 return lendata.decl;
6c4aa5f6
MS
651 }
652
653 return NULL_TREE;
e08341bb
MS
654}
655
1eb4547b
MS
656/* Compute the length of a null-terminated character string or wide
657 character string handling character sizes of 1, 2, and 4 bytes.
658 TREE_STRING_LENGTH is not the right way because it evaluates to
659 the size of the character array in bytes (as opposed to characters)
660 and because it can contain a zero byte in the middle.
28f4ec01 661
f1ba665b 662 ONLY_VALUE should be nonzero if the result is not going to be emitted
88373ed0 663 into the instruction stream and zero if it is going to be expanded.
f1ba665b 664 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
ae808627 665 is returned, otherwise NULL, since
14b7950f 666 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
ae808627
JJ
667 evaluate the side-effects.
668
21e8fb22
RB
669 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
670 accesses. Note that this implies the result is not going to be emitted
671 into the instruction stream.
672
7d583f42 673 Additional information about the string accessed may be recorded
14b7950f 674 in DATA. For example, if ARG references an unterminated string,
7d583f42
JL
675 then the declaration will be stored in the DECL field. If the
676 length of the unterminated string can be determined, it'll be
677 stored in the LEN field. Note this length could well be different
678 than what a C strlen call would return.
6ab24ea8 679
4148b00d
BE
680 ELTSIZE is 1 for normal single byte character strings, and 2 or
681 4 for wide characer strings. ELTSIZE is by default 1.
fed3cef0 682
4148b00d 683 The value returned is of type `ssizetype'. */
28f4ec01 684
6de9cd9a 685tree
14b7950f 686c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
28f4ec01 687{
7d583f42
JL
688 /* If we were not passed a DATA pointer, then get one to a local
689 structure. That avoids having to check DATA for NULL before
690 each time we want to use it. */
3f46ef1f 691 c_strlen_data local_strlen_data = { };
7d583f42
JL
692 if (!data)
693 data = &local_strlen_data;
694
1ebf0641 695 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
14b7950f
MS
696
697 tree src = STRIP_NOPS (arg);
ae808627
JJ
698 if (TREE_CODE (src) == COND_EXPR
699 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
700 {
701 tree len1, len2;
702
7d583f42
JL
703 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
704 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
33521f7d 705 if (tree_int_cst_equal (len1, len2))
ae808627
JJ
706 return len1;
707 }
708
709 if (TREE_CODE (src) == COMPOUND_EXPR
710 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
7d583f42 711 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
ae808627 712
1eb4547b 713 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
59d49708 714
1eb4547b
MS
715 /* Offset from the beginning of the string in bytes. */
716 tree byteoff;
4148b00d 717 tree memsize;
6ab24ea8
MS
718 tree decl;
719 src = string_constant (src, &byteoff, &memsize, &decl);
28f4ec01 720 if (src == 0)
5039610b 721 return NULL_TREE;
fed3cef0 722
1eb4547b 723 /* Determine the size of the string element. */
4148b00d
BE
724 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
725 return NULL_TREE;
1eb4547b
MS
726
727 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
35b4d3a6 728 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
bfb9bd47
MS
729 in case the latter is less than the size of the array, such as when
730 SRC refers to a short string literal used to initialize a large array.
731 In that case, the elements of the array after the terminating NUL are
732 all NUL. */
733 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
d01b568a 734 strelts = strelts / eltsize;
bfb9bd47 735
4148b00d
BE
736 if (!tree_fits_uhwi_p (memsize))
737 return NULL_TREE;
738
d01b568a 739 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
1eb4547b
MS
740
741 /* PTR can point to the byte representation of any string type, including
742 char* and wchar_t*. */
743 const char *ptr = TREE_STRING_POINTER (src);
fed3cef0 744
1eb4547b 745 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
28f4ec01 746 {
4148b00d
BE
747 /* The code below works only for single byte character types. */
748 if (eltsize != 1)
749 return NULL_TREE;
750
bfb9bd47
MS
751 /* If the string has an internal NUL character followed by any
752 non-NUL characters (e.g., "foo\0bar"), we can't compute
753 the offset to the following NUL if we don't know where to
28f4ec01 754 start searching for it. */
bfb9bd47 755 unsigned len = string_length (ptr, eltsize, strelts);
fed3cef0 756
7d583f42
JL
757 /* Return when an embedded null character is found or none at all.
758 In the latter case, set the DECL/LEN field in the DATA structure
759 so that callers may examine them. */
6ab24ea8 760 if (len + 1 < strelts)
4148b00d 761 return NULL_TREE;
6ab24ea8
MS
762 else if (len >= maxelts)
763 {
7d583f42 764 data->decl = decl;
6c4aa5f6 765 data->off = byteoff;
b71bbbe2 766 data->minlen = ssize_int (len);
6ab24ea8
MS
767 return NULL_TREE;
768 }
c42d0aa0 769
d01b568a
BE
770 /* For empty strings the result should be zero. */
771 if (len == 0)
772 return ssize_int (0);
773
28f4ec01 774 /* We don't know the starting offset, but we do know that the string
bfb9bd47
MS
775 has no internal zero bytes. If the offset falls within the bounds
776 of the string subtract the offset from the length of the string,
777 and return that. Otherwise the length is zero. Take care to
778 use SAVE_EXPR in case the OFFSET has side-effects. */
e8bf3d5e
BE
779 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
780 : byteoff;
781 offsave = fold_convert_loc (loc, sizetype, offsave);
bfb9bd47 782 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
e8bf3d5e
BE
783 size_int (len));
784 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
785 offsave);
786 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
bfb9bd47
MS
787 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
788 build_zero_cst (ssizetype));
28f4ec01
BS
789 }
790
1eb4547b
MS
791 /* Offset from the beginning of the string in elements. */
792 HOST_WIDE_INT eltoff;
793
28f4ec01 794 /* We have a known offset into the string. Start searching there for
5197bd50 795 a null character if we can represent it as a single HOST_WIDE_INT. */
1eb4547b
MS
796 if (byteoff == 0)
797 eltoff = 0;
1ebf0641 798 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1eb4547b 799 eltoff = -1;
28f4ec01 800 else
1ebf0641 801 eltoff = tree_to_uhwi (byteoff) / eltsize;
fed3cef0 802
b2ed71b6
BE
803 /* If the offset is known to be out of bounds, warn, and call strlen at
804 runtime. */
d01b568a 805 if (eltoff < 0 || eltoff >= maxelts)
28f4ec01 806 {
1db01ff9 807 /* Suppress multiple warnings for propagated constant strings. */
3b57ff81 808 if (only_value != 2
14b7950f 809 && !TREE_NO_WARNING (arg)
1db01ff9
JJ
810 && warning_at (loc, OPT_Warray_bounds,
811 "offset %qwi outside bounds of constant string",
812 eltoff))
14b7950f
MS
813 {
814 if (decl)
815 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
816 TREE_NO_WARNING (arg) = 1;
817 }
5039610b 818 return NULL_TREE;
28f4ec01 819 }
fed3cef0 820
4148b00d
BE
821 /* If eltoff is larger than strelts but less than maxelts the
822 string length is zero, since the excess memory will be zero. */
823 if (eltoff > strelts)
824 return ssize_int (0);
825
28f4ec01
BS
826 /* Use strlen to search for the first zero byte. Since any strings
827 constructed with build_string will have nulls appended, we win even
828 if we get handed something like (char[4])"abcd".
829
1eb4547b 830 Since ELTOFF is our starting index into the string, no further
28f4ec01 831 calculation is needed. */
1eb4547b 832 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
4148b00d 833 strelts - eltoff);
1eb4547b 834
d01b568a 835 /* Don't know what to return if there was no zero termination.
7d583f42
JL
836 Ideally this would turn into a gcc_checking_assert over time.
837 Set DECL/LEN so callers can examine them. */
d01b568a 838 if (len >= maxelts - eltoff)
6ab24ea8 839 {
7d583f42 840 data->decl = decl;
6c4aa5f6 841 data->off = byteoff;
b71bbbe2 842 data->minlen = ssize_int (len);
6ab24ea8
MS
843 return NULL_TREE;
844 }
1ebf0641 845
1eb4547b 846 return ssize_int (len);
28f4ec01
BS
847}
848
807e902e 849/* Return a constant integer corresponding to target reading
3140b2ed
JJ
850 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
851 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
852 are assumed to be zero, otherwise it reads as many characters
853 as needed. */
854
855rtx
856c_readstr (const char *str, scalar_int_mode mode,
857 bool null_terminated_p/*=true*/)
57814e5e 858{
57814e5e
JJ
859 HOST_WIDE_INT ch;
860 unsigned int i, j;
807e902e 861 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
57814e5e 862
298e6adc 863 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
807e902e
KZ
864 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
865 / HOST_BITS_PER_WIDE_INT;
866
867 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
868 for (i = 0; i < len; i++)
869 tmp[i] = 0;
5906d013 870
57814e5e
JJ
871 ch = 1;
872 for (i = 0; i < GET_MODE_SIZE (mode); i++)
873 {
874 j = i;
875 if (WORDS_BIG_ENDIAN)
876 j = GET_MODE_SIZE (mode) - i - 1;
877 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
e046112d 878 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
57814e5e
JJ
879 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
880 j *= BITS_PER_UNIT;
5906d013 881
3140b2ed 882 if (ch || !null_terminated_p)
57814e5e 883 ch = (unsigned char) str[i];
807e902e 884 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
57814e5e 885 }
807e902e
KZ
886
887 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
888 return immed_wide_int_const (c, mode);
57814e5e
JJ
889}
890
ab937357 891/* Cast a target constant CST to target CHAR and if that value fits into
206048bd 892 host char type, return zero and put that value into variable pointed to by
ab937357
JJ
893 P. */
894
895static int
4682ae04 896target_char_cast (tree cst, char *p)
ab937357
JJ
897{
898 unsigned HOST_WIDE_INT val, hostval;
899
de77ab75 900 if (TREE_CODE (cst) != INTEGER_CST
ab937357
JJ
901 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
902 return 1;
903
807e902e 904 /* Do not care if it fits or not right here. */
de77ab75 905 val = TREE_INT_CST_LOW (cst);
807e902e 906
ab937357 907 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
fecfbfa4 908 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ab937357
JJ
909
910 hostval = val;
911 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
fecfbfa4 912 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ab937357
JJ
913
914 if (val != hostval)
915 return 1;
916
917 *p = hostval;
918 return 0;
919}
920
6de9cd9a
DN
921/* Similar to save_expr, but assumes that arbitrary code is not executed
922 in between the multiple evaluations. In particular, we assume that a
923 non-addressable local variable will not be modified. */
924
925static tree
926builtin_save_expr (tree exp)
927{
5cbf5c20
RG
928 if (TREE_CODE (exp) == SSA_NAME
929 || (TREE_ADDRESSABLE (exp) == 0
930 && (TREE_CODE (exp) == PARM_DECL
8813a647 931 || (VAR_P (exp) && !TREE_STATIC (exp)))))
6de9cd9a
DN
932 return exp;
933
934 return save_expr (exp);
935}
936
28f4ec01
BS
937/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
938 times to get the address of either a higher stack frame, or a return
939 address located within it (depending on FNDECL_CODE). */
fed3cef0 940
54e62799 941static rtx
c6d01079 942expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
28f4ec01
BS
943{
944 int i;
c6d01079 945 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
579f2946 946 if (tem == NULL_RTX)
c8f27794 947 {
579f2946
TS
948 /* For a zero count with __builtin_return_address, we don't care what
949 frame address we return, because target-specific definitions will
950 override us. Therefore frame pointer elimination is OK, and using
951 the soft frame pointer is OK.
952
953 For a nonzero count, or a zero count with __builtin_frame_address,
954 we require a stable offset from the current frame pointer to the
955 previous one, so we must use the hard frame pointer, and
956 we must disable frame pointer elimination. */
957 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
958 tem = frame_pointer_rtx;
959 else
960 {
961 tem = hard_frame_pointer_rtx;
c8f27794 962
579f2946
TS
963 /* Tell reload not to eliminate the frame pointer. */
964 crtl->accesses_prior_frames = 1;
965 }
c8f27794 966 }
c6d01079 967
28f4ec01
BS
968 if (count > 0)
969 SETUP_FRAME_ADDRESSES ();
28f4ec01 970
224869d9 971 /* On the SPARC, the return address is not in the frame, it is in a
28f4ec01
BS
972 register. There is no way to access it off of the current frame
973 pointer, but it can be accessed off the previous frame pointer by
974 reading the value from the register window save area. */
2e612c47 975 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
28f4ec01 976 count--;
28f4ec01
BS
977
978 /* Scan back COUNT frames to the specified frame. */
979 for (i = 0; i < count; i++)
980 {
981 /* Assume the dynamic chain pointer is in the word that the
982 frame address points to, unless otherwise specified. */
28f4ec01 983 tem = DYNAMIC_CHAIN_ADDRESS (tem);
28f4ec01 984 tem = memory_address (Pmode, tem);
bf877a76 985 tem = gen_frame_mem (Pmode, tem);
432fd734 986 tem = copy_to_reg (tem);
28f4ec01
BS
987 }
988
224869d9
EB
989 /* For __builtin_frame_address, return what we've got. But, on
990 the SPARC for example, we may have to add a bias. */
28f4ec01 991 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
224869d9 992 return FRAME_ADDR_RTX (tem);
28f4ec01 993
224869d9 994 /* For __builtin_return_address, get the return address from that frame. */
28f4ec01
BS
995#ifdef RETURN_ADDR_RTX
996 tem = RETURN_ADDR_RTX (count, tem);
997#else
998 tem = memory_address (Pmode,
0a81f074 999 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
bf877a76 1000 tem = gen_frame_mem (Pmode, tem);
28f4ec01
BS
1001#endif
1002 return tem;
1003}
1004
3bdf5ad1 1005/* Alias set used for setjmp buffer. */
4862826d 1006static alias_set_type setjmp_alias_set = -1;
3bdf5ad1 1007
250d07b6 1008/* Construct the leading half of a __builtin_setjmp call. Control will
4f6c2131
EB
1009 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1010 exception handling code. */
28f4ec01 1011
250d07b6 1012void
4682ae04 1013expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
28f4ec01 1014{
ef4bddc2 1015 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 1016 rtx stack_save;
3bdf5ad1 1017 rtx mem;
28f4ec01 1018
3bdf5ad1
RK
1019 if (setjmp_alias_set == -1)
1020 setjmp_alias_set = new_alias_set ();
1021
5ae6cd0d 1022 buf_addr = convert_memory_address (Pmode, buf_addr);
28f4ec01 1023
7d505b82 1024 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
28f4ec01 1025
250d07b6
RH
1026 /* We store the frame pointer and the address of receiver_label in
1027 the buffer and use the rest of it for the stack save area, which
1028 is machine-dependent. */
28f4ec01 1029
3bdf5ad1 1030 mem = gen_rtx_MEM (Pmode, buf_addr);
ba4828e0 1031 set_mem_alias_set (mem, setjmp_alias_set);
25403c41 1032 emit_move_insn (mem, hard_frame_pointer_rtx);
3bdf5ad1 1033
0a81f074
RS
1034 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 GET_MODE_SIZE (Pmode))),
ba4828e0 1036 set_mem_alias_set (mem, setjmp_alias_set);
3bdf5ad1
RK
1037
1038 emit_move_insn (validize_mem (mem),
250d07b6 1039 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
28f4ec01
BS
1040
1041 stack_save = gen_rtx_MEM (sa_mode,
0a81f074 1042 plus_constant (Pmode, buf_addr,
28f4ec01 1043 2 * GET_MODE_SIZE (Pmode)));
ba4828e0 1044 set_mem_alias_set (stack_save, setjmp_alias_set);
9eac0f2a 1045 emit_stack_save (SAVE_NONLOCAL, &stack_save);
28f4ec01
BS
1046
1047 /* If there is further processing to do, do it. */
95a3fb9d
RS
1048 if (targetm.have_builtin_setjmp_setup ())
1049 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
28f4ec01 1050
ecaebb9e 1051 /* We have a nonlocal label. */
e3b5732b 1052 cfun->has_nonlocal_label = 1;
250d07b6 1053}
28f4ec01 1054
4f6c2131 1055/* Construct the trailing part of a __builtin_setjmp call. This is
e90d1568
HPN
1056 also called directly by the SJLJ exception handling code.
1057 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
250d07b6
RH
1058
1059void
95a3fb9d 1060expand_builtin_setjmp_receiver (rtx receiver_label)
250d07b6 1061{
531ca746
RH
1062 rtx chain;
1063
e90d1568 1064 /* Mark the FP as used when we get here, so we have to make sure it's
28f4ec01 1065 marked as used by this function. */
c41c1387 1066 emit_use (hard_frame_pointer_rtx);
28f4ec01
BS
1067
1068 /* Mark the static chain as clobbered here so life information
1069 doesn't get messed up for it. */
4b522b8f 1070 chain = rtx_for_static_chain (current_function_decl, true);
531ca746
RH
1071 if (chain && REG_P (chain))
1072 emit_clobber (chain);
28f4ec01 1073
38b0b093 1074 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
28f4ec01 1075 {
e90d1568
HPN
1076 /* If the argument pointer can be eliminated in favor of the
1077 frame pointer, we don't need to restore it. We assume here
1078 that if such an elimination is present, it can always be used.
1079 This is the case on all known machines; if we don't make this
1080 assumption, we do unnecessary saving on many machines. */
28f4ec01 1081 size_t i;
8b60264b 1082 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
28f4ec01 1083
b6a1cbae 1084 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
28f4ec01
BS
1085 if (elim_regs[i].from == ARG_POINTER_REGNUM
1086 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1087 break;
1088
b6a1cbae 1089 if (i == ARRAY_SIZE (elim_regs))
28f4ec01
BS
1090 {
1091 /* Now restore our arg pointer from the address at which it
278ed218 1092 was saved in our stack frame. */
2e3f842f 1093 emit_move_insn (crtl->args.internal_arg_pointer,
bd60bab2 1094 copy_to_reg (get_arg_pointer_save_area ()));
28f4ec01
BS
1095 }
1096 }
28f4ec01 1097
95a3fb9d
RS
1098 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1099 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1100 else if (targetm.have_nonlocal_goto_receiver ())
1101 emit_insn (targetm.gen_nonlocal_goto_receiver ());
28f4ec01 1102 else
95a3fb9d 1103 { /* Nothing */ }
bcd7edfe 1104
6fb5fa3c
DB
1105 /* We must not allow the code we just generated to be reordered by
1106 scheduling. Specifically, the update of the frame pointer must
f1257268 1107 happen immediately, not later. */
6fb5fa3c 1108 emit_insn (gen_blockage ());
250d07b6 1109}
28f4ec01 1110
28f4ec01
BS
1111/* __builtin_longjmp is passed a pointer to an array of five words (not
1112 all will be used on all machines). It operates similarly to the C
1113 library function of the same name, but is more efficient. Much of
4f6c2131 1114 the code below is copied from the handling of non-local gotos. */
28f4ec01 1115
54e62799 1116static void
4682ae04 1117expand_builtin_longjmp (rtx buf_addr, rtx value)
28f4ec01 1118{
58f4cf2a
DM
1119 rtx fp, lab, stack;
1120 rtx_insn *insn, *last;
ef4bddc2 1121 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 1122
b8698a0f 1123 /* DRAP is needed for stack realign if longjmp is expanded to current
2e3f842f
L
1124 function */
1125 if (SUPPORTS_STACK_ALIGNMENT)
1126 crtl->need_drap = true;
1127
3bdf5ad1
RK
1128 if (setjmp_alias_set == -1)
1129 setjmp_alias_set = new_alias_set ();
1130
5ae6cd0d 1131 buf_addr = convert_memory_address (Pmode, buf_addr);
4b6c1672 1132
28f4ec01
BS
1133 buf_addr = force_reg (Pmode, buf_addr);
1134
531ca746
RH
1135 /* We require that the user must pass a second argument of 1, because
1136 that is what builtin_setjmp will return. */
298e6adc 1137 gcc_assert (value == const1_rtx);
28f4ec01 1138
d337d653 1139 last = get_last_insn ();
95a3fb9d
RS
1140 if (targetm.have_builtin_longjmp ())
1141 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
28f4ec01 1142 else
28f4ec01
BS
1143 {
1144 fp = gen_rtx_MEM (Pmode, buf_addr);
0a81f074 1145 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
28f4ec01
BS
1146 GET_MODE_SIZE (Pmode)));
1147
0a81f074 1148 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
28f4ec01 1149 2 * GET_MODE_SIZE (Pmode)));
ba4828e0
RK
1150 set_mem_alias_set (fp, setjmp_alias_set);
1151 set_mem_alias_set (lab, setjmp_alias_set);
1152 set_mem_alias_set (stack, setjmp_alias_set);
28f4ec01
BS
1153
1154 /* Pick up FP, label, and SP from the block and jump. This code is
1155 from expand_goto in stmt.c; see there for detailed comments. */
95a3fb9d 1156 if (targetm.have_nonlocal_goto ())
28f4ec01
BS
1157 /* We have to pass a value to the nonlocal_goto pattern that will
1158 get copied into the static_chain pointer, but it does not matter
1159 what that value is, because builtin_setjmp does not use it. */
95a3fb9d 1160 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
28f4ec01 1161 else
28f4ec01 1162 {
c41c1387
RS
1163 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1164 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
41439bf6 1165
511ed59d
WD
1166 lab = copy_to_reg (lab);
1167
71b14428
WD
1168 /* Restore the frame pointer and stack pointer. We must use a
1169 temporary since the setjmp buffer may be a local. */
1170 fp = copy_to_reg (fp);
9eac0f2a 1171 emit_stack_restore (SAVE_NONLOCAL, stack);
511ed59d
WD
1172
1173 /* Ensure the frame pointer move is not optimized. */
1174 emit_insn (gen_blockage ());
1175 emit_clobber (hard_frame_pointer_rtx);
1176 emit_clobber (frame_pointer_rtx);
71b14428 1177 emit_move_insn (hard_frame_pointer_rtx, fp);
28f4ec01 1178
c41c1387
RS
1179 emit_use (hard_frame_pointer_rtx);
1180 emit_use (stack_pointer_rtx);
28f4ec01
BS
1181 emit_indirect_jump (lab);
1182 }
1183 }
4b01bd16
RH
1184
1185 /* Search backwards and mark the jump insn as a non-local goto.
1186 Note that this precludes the use of __builtin_longjmp to a
1187 __builtin_setjmp target in the same function. However, we've
1188 already cautioned the user that these functions are for
1189 internal exception handling use only. */
8206fc89
AM
1190 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1191 {
298e6adc 1192 gcc_assert (insn != last);
5906d013 1193
4b4bf941 1194 if (JUMP_P (insn))
8206fc89 1195 {
65c5f2a6 1196 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
8206fc89
AM
1197 break;
1198 }
4b4bf941 1199 else if (CALL_P (insn))
ca7fd9cd 1200 break;
8206fc89 1201 }
28f4ec01
BS
1202}
1203
862d0b35
DN
1204static inline bool
1205more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1206{
1207 return (iter->i < iter->n);
1208}
1209
1210/* This function validates the types of a function call argument list
1211 against a specified list of tree_codes. If the last specifier is a 0,
474da67e 1212 that represents an ellipsis, otherwise the last specifier must be a
862d0b35
DN
1213 VOID_TYPE. */
1214
1215static bool
1216validate_arglist (const_tree callexpr, ...)
1217{
1218 enum tree_code code;
1219 bool res = 0;
1220 va_list ap;
1221 const_call_expr_arg_iterator iter;
1222 const_tree arg;
1223
1224 va_start (ap, callexpr);
1225 init_const_call_expr_arg_iterator (callexpr, &iter);
1226
474da67e 1227 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
0dba7960
JJ
1228 tree fn = CALL_EXPR_FN (callexpr);
1229 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
474da67e
MS
1230
1231 for (unsigned argno = 1; ; ++argno)
862d0b35
DN
1232 {
1233 code = (enum tree_code) va_arg (ap, int);
474da67e 1234
862d0b35
DN
1235 switch (code)
1236 {
1237 case 0:
1238 /* This signifies an ellipses, any further arguments are all ok. */
1239 res = true;
1240 goto end;
1241 case VOID_TYPE:
1242 /* This signifies an endlink, if no arguments remain, return
1243 true, otherwise return false. */
1244 res = !more_const_call_expr_args_p (&iter);
1245 goto end;
474da67e
MS
1246 case POINTER_TYPE:
1247 /* The actual argument must be nonnull when either the whole
1248 called function has been declared nonnull, or when the formal
1249 argument corresponding to the actual argument has been. */
0dba7960
JJ
1250 if (argmap
1251 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1252 {
1253 arg = next_const_call_expr_arg (&iter);
1254 if (!validate_arg (arg, code) || integer_zerop (arg))
1255 goto end;
1256 break;
1257 }
474da67e 1258 /* FALLTHRU */
862d0b35
DN
1259 default:
1260 /* If no parameters remain or the parameter's code does not
1261 match the specified code, return false. Otherwise continue
1262 checking any remaining arguments. */
1263 arg = next_const_call_expr_arg (&iter);
0dba7960 1264 if (!validate_arg (arg, code))
862d0b35
DN
1265 goto end;
1266 break;
1267 }
1268 }
862d0b35
DN
1269
1270 /* We need gotos here since we can only have one VA_CLOSE in a
1271 function. */
1272 end: ;
1273 va_end (ap);
1274
474da67e
MS
1275 BITMAP_FREE (argmap);
1276
862d0b35
DN
1277 return res;
1278}
1279
6de9cd9a
DN
1280/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1281 and the address of the save area. */
1282
1283static rtx
5039610b 1284expand_builtin_nonlocal_goto (tree exp)
6de9cd9a
DN
1285{
1286 tree t_label, t_save_area;
58f4cf2a
DM
1287 rtx r_label, r_save_area, r_fp, r_sp;
1288 rtx_insn *insn;
6de9cd9a 1289
5039610b 1290 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
1291 return NULL_RTX;
1292
5039610b
SL
1293 t_label = CALL_EXPR_ARG (exp, 0);
1294 t_save_area = CALL_EXPR_ARG (exp, 1);
6de9cd9a 1295
84217346 1296 r_label = expand_normal (t_label);
5e89a381 1297 r_label = convert_memory_address (Pmode, r_label);
84217346 1298 r_save_area = expand_normal (t_save_area);
5e89a381 1299 r_save_area = convert_memory_address (Pmode, r_save_area);
bc6d3f91
EB
1300 /* Copy the address of the save location to a register just in case it was
1301 based on the frame pointer. */
cba2d79f 1302 r_save_area = copy_to_reg (r_save_area);
6de9cd9a
DN
1303 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1304 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
0a81f074
RS
1305 plus_constant (Pmode, r_save_area,
1306 GET_MODE_SIZE (Pmode)));
6de9cd9a 1307
e3b5732b 1308 crtl->has_nonlocal_goto = 1;
6de9cd9a 1309
6de9cd9a 1310 /* ??? We no longer need to pass the static chain value, afaik. */
95a3fb9d
RS
1311 if (targetm.have_nonlocal_goto ())
1312 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
6de9cd9a 1313 else
6de9cd9a 1314 {
c41c1387
RS
1315 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1316 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
caf93cb0 1317
511ed59d
WD
1318 r_label = copy_to_reg (r_label);
1319
71b14428
WD
1320 /* Restore the frame pointer and stack pointer. We must use a
1321 temporary since the setjmp buffer may be a local. */
1322 r_fp = copy_to_reg (r_fp);
9eac0f2a 1323 emit_stack_restore (SAVE_NONLOCAL, r_sp);
511ed59d
WD
1324
1325 /* Ensure the frame pointer move is not optimized. */
1326 emit_insn (gen_blockage ());
1327 emit_clobber (hard_frame_pointer_rtx);
1328 emit_clobber (frame_pointer_rtx);
71b14428 1329 emit_move_insn (hard_frame_pointer_rtx, r_fp);
caf93cb0 1330
6de9cd9a
DN
1331 /* USE of hard_frame_pointer_rtx added for consistency;
1332 not clear if really needed. */
c41c1387
RS
1333 emit_use (hard_frame_pointer_rtx);
1334 emit_use (stack_pointer_rtx);
eae645b6
RS
1335
1336 /* If the architecture is using a GP register, we must
1337 conservatively assume that the target function makes use of it.
1338 The prologue of functions with nonlocal gotos must therefore
1339 initialize the GP register to the appropriate value, and we
1340 must then make sure that this value is live at the point
1341 of the jump. (Note that this doesn't necessarily apply
1342 to targets with a nonlocal_goto pattern; they are free
1343 to implement it in their own way. Note also that this is
1344 a no-op if the GP register is a global invariant.) */
959c1e20
AH
1345 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1346 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
c41c1387 1347 emit_use (pic_offset_table_rtx);
eae645b6 1348
6de9cd9a
DN
1349 emit_indirect_jump (r_label);
1350 }
caf93cb0 1351
6de9cd9a
DN
1352 /* Search backwards to the jump insn and mark it as a
1353 non-local goto. */
1354 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1355 {
4b4bf941 1356 if (JUMP_P (insn))
6de9cd9a 1357 {
65c5f2a6 1358 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
6de9cd9a
DN
1359 break;
1360 }
4b4bf941 1361 else if (CALL_P (insn))
6de9cd9a
DN
1362 break;
1363 }
1364
1365 return const0_rtx;
1366}
1367
2b92e7f5
RK
1368/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1369 (not all will be used on all machines) that was passed to __builtin_setjmp.
d33606c3
EB
1370 It updates the stack pointer in that block to the current value. This is
1371 also called directly by the SJLJ exception handling code. */
2b92e7f5 1372
d33606c3 1373void
2b92e7f5
RK
1374expand_builtin_update_setjmp_buf (rtx buf_addr)
1375{
ef4bddc2 1376 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
4887028b 1377 buf_addr = convert_memory_address (Pmode, buf_addr);
bc6d3f91 1378 rtx stack_save
2b92e7f5
RK
1379 = gen_rtx_MEM (sa_mode,
1380 memory_address
1381 (sa_mode,
0a81f074
RS
1382 plus_constant (Pmode, buf_addr,
1383 2 * GET_MODE_SIZE (Pmode))));
2b92e7f5 1384
9eac0f2a 1385 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2b92e7f5
RK
1386}
1387
a9ccbb60
JJ
1388/* Expand a call to __builtin_prefetch. For a target that does not support
1389 data prefetch, evaluate the memory address argument in case it has side
1390 effects. */
1391
1392static void
5039610b 1393expand_builtin_prefetch (tree exp)
a9ccbb60
JJ
1394{
1395 tree arg0, arg1, arg2;
5039610b 1396 int nargs;
a9ccbb60
JJ
1397 rtx op0, op1, op2;
1398
5039610b 1399 if (!validate_arglist (exp, POINTER_TYPE, 0))
e83d297b
JJ
1400 return;
1401
5039610b
SL
1402 arg0 = CALL_EXPR_ARG (exp, 0);
1403
e83d297b
JJ
1404 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1405 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1406 locality). */
5039610b
SL
1407 nargs = call_expr_nargs (exp);
1408 if (nargs > 1)
1409 arg1 = CALL_EXPR_ARG (exp, 1);
e83d297b 1410 else
5039610b
SL
1411 arg1 = integer_zero_node;
1412 if (nargs > 2)
1413 arg2 = CALL_EXPR_ARG (exp, 2);
1414 else
9a9d280e 1415 arg2 = integer_three_node;
a9ccbb60
JJ
1416
1417 /* Argument 0 is an address. */
1418 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1419
1420 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1421 if (TREE_CODE (arg1) != INTEGER_CST)
1422 {
40b97a2e 1423 error ("second argument to %<__builtin_prefetch%> must be a constant");
ca7fd9cd 1424 arg1 = integer_zero_node;
a9ccbb60 1425 }
84217346 1426 op1 = expand_normal (arg1);
a9ccbb60
JJ
1427 /* Argument 1 must be either zero or one. */
1428 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1429 {
d4ee4d25 1430 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
40b97a2e 1431 " using zero");
a9ccbb60
JJ
1432 op1 = const0_rtx;
1433 }
1434
1435 /* Argument 2 (locality) must be a compile-time constant int. */
1436 if (TREE_CODE (arg2) != INTEGER_CST)
1437 {
40b97a2e 1438 error ("third argument to %<__builtin_prefetch%> must be a constant");
a9ccbb60
JJ
1439 arg2 = integer_zero_node;
1440 }
84217346 1441 op2 = expand_normal (arg2);
a9ccbb60
JJ
1442 /* Argument 2 must be 0, 1, 2, or 3. */
1443 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1444 {
d4ee4d25 1445 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
a9ccbb60
JJ
1446 op2 = const0_rtx;
1447 }
1448
134b044d 1449 if (targetm.have_prefetch ())
a9ccbb60 1450 {
99b1c316 1451 class expand_operand ops[3];
a5c7d693
RS
1452
1453 create_address_operand (&ops[0], op0);
1454 create_integer_operand (&ops[1], INTVAL (op1));
1455 create_integer_operand (&ops[2], INTVAL (op2));
134b044d 1456 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
a5c7d693 1457 return;
a9ccbb60 1458 }
ad76cef8 1459
5ab2f7b7
KH
1460 /* Don't do anything with direct references to volatile memory, but
1461 generate code to handle other side effects. */
3c0cb5de 1462 if (!MEM_P (op0) && side_effects_p (op0))
5ab2f7b7 1463 emit_insn (op0);
a9ccbb60
JJ
1464}
1465
3bdf5ad1 1466/* Get a MEM rtx for expression EXP which is the address of an operand
76715c32 1467 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
435bb2a1
JJ
1468 the maximum length of the block of memory that might be accessed or
1469 NULL if unknown. */
3bdf5ad1 1470
28f4ec01 1471static rtx
435bb2a1 1472get_memory_rtx (tree exp, tree len)
28f4ec01 1473{
805903b5
JJ
1474 tree orig_exp = exp;
1475 rtx addr, mem;
805903b5
JJ
1476
1477 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1478 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1479 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1480 exp = TREE_OPERAND (exp, 0);
1481
1482 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1483 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
8ac61af7 1484
3bdf5ad1 1485 /* Get an expression we can use to find the attributes to assign to MEM.
625ed172 1486 First remove any nops. */
1043771b 1487 while (CONVERT_EXPR_P (exp)
3bdf5ad1
RK
1488 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1489 exp = TREE_OPERAND (exp, 0);
1490
625ed172
MM
1491 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1492 (as builtin stringops may alias with anything). */
1493 exp = fold_build2 (MEM_REF,
1494 build_array_type (char_type_node,
1495 build_range_type (sizetype,
1496 size_one_node, len)),
1497 exp, build_int_cst (ptr_type_node, 0));
1498
1499 /* If the MEM_REF has no acceptable address, try to get the base object
1500 from the original address we got, and build an all-aliasing
1501 unknown-sized access to that one. */
1502 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1503 set_mem_attributes (mem, exp, 0);
1504 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1505 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1506 0))))
343fb412 1507 {
625ed172
MM
1508 exp = build_fold_addr_expr (exp);
1509 exp = fold_build2 (MEM_REF,
1510 build_array_type (char_type_node,
1511 build_range_type (sizetype,
1512 size_zero_node,
1513 NULL)),
1514 exp, build_int_cst (ptr_type_node, 0));
931e6c29 1515 set_mem_attributes (mem, exp, 0);
343fb412 1516 }
625ed172 1517 set_mem_alias_set (mem, 0);
28f4ec01
BS
1518 return mem;
1519}
1520\f
1521/* Built-in functions to perform an untyped call and return. */
1522
fa19795e
RS
1523#define apply_args_mode \
1524 (this_target_builtins->x_apply_args_mode)
1525#define apply_result_mode \
1526 (this_target_builtins->x_apply_result_mode)
28f4ec01 1527
28f4ec01
BS
1528/* Return the size required for the block returned by __builtin_apply_args,
1529 and initialize apply_args_mode. */
1530
1531static int
4682ae04 1532apply_args_size (void)
28f4ec01
BS
1533{
1534 static int size = -1;
cbf5468f
AH
1535 int align;
1536 unsigned int regno;
28f4ec01
BS
1537
1538 /* The values computed by this function never change. */
1539 if (size < 0)
1540 {
1541 /* The first value is the incoming arg-pointer. */
1542 size = GET_MODE_SIZE (Pmode);
1543
1544 /* The second value is the structure value address unless this is
1545 passed as an "invisible" first argument. */
92f6864c 1546 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1547 size += GET_MODE_SIZE (Pmode);
1548
1549 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1550 if (FUNCTION_ARG_REGNO_P (regno))
1551 {
b660eccf 1552 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
33521f7d 1553
298e6adc 1554 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1555
1556 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1557 if (size % align != 0)
1558 size = CEIL (size, align) * align;
28f4ec01
BS
1559 size += GET_MODE_SIZE (mode);
1560 apply_args_mode[regno] = mode;
1561 }
1562 else
1563 {
b660eccf 1564 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1565 }
1566 }
1567 return size;
1568}
1569
1570/* Return the size required for the block returned by __builtin_apply,
1571 and initialize apply_result_mode. */
1572
1573static int
4682ae04 1574apply_result_size (void)
28f4ec01
BS
1575{
1576 static int size = -1;
1577 int align, regno;
28f4ec01
BS
1578
1579 /* The values computed by this function never change. */
1580 if (size < 0)
1581 {
1582 size = 0;
1583
1584 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
82f81f18 1585 if (targetm.calls.function_value_regno_p (regno))
28f4ec01 1586 {
b660eccf 1587 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
33521f7d 1588
298e6adc 1589 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1590
1591 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1592 if (size % align != 0)
1593 size = CEIL (size, align) * align;
1594 size += GET_MODE_SIZE (mode);
1595 apply_result_mode[regno] = mode;
1596 }
1597 else
b660eccf 1598 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1599
1600 /* Allow targets that use untyped_call and untyped_return to override
1601 the size so that machine-specific information can be stored here. */
1602#ifdef APPLY_RESULT_SIZE
1603 size = APPLY_RESULT_SIZE;
1604#endif
1605 }
1606 return size;
1607}
1608
28f4ec01
BS
1609/* Create a vector describing the result block RESULT. If SAVEP is true,
1610 the result block is used to save the values; otherwise it is used to
1611 restore the values. */
1612
1613static rtx
4682ae04 1614result_vector (int savep, rtx result)
28f4ec01
BS
1615{
1616 int regno, size, align, nelts;
b660eccf 1617 fixed_size_mode mode;
28f4ec01 1618 rtx reg, mem;
f883e0a7 1619 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
8d51ecf8 1620
28f4ec01
BS
1621 size = nelts = 0;
1622 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1623 if ((mode = apply_result_mode[regno]) != VOIDmode)
1624 {
1625 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1626 if (size % align != 0)
1627 size = CEIL (size, align) * align;
1628 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
f4ef873c 1629 mem = adjust_address (result, mode, size);
28f4ec01 1630 savevec[nelts++] = (savep
f7df4a84
RS
1631 ? gen_rtx_SET (mem, reg)
1632 : gen_rtx_SET (reg, mem));
28f4ec01
BS
1633 size += GET_MODE_SIZE (mode);
1634 }
1635 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1636}
28f4ec01
BS
1637
1638/* Save the state required to perform an untyped call with the same
1639 arguments as were passed to the current function. */
1640
1641static rtx
4682ae04 1642expand_builtin_apply_args_1 (void)
28f4ec01 1643{
88e541e1 1644 rtx registers, tem;
28f4ec01 1645 int size, align, regno;
b660eccf 1646 fixed_size_mode mode;
92f6864c 1647 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
28f4ec01
BS
1648
1649 /* Create a block where the arg-pointer, structure value address,
1650 and argument registers can be saved. */
1651 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1652
1653 /* Walk past the arg-pointer and structure value address. */
1654 size = GET_MODE_SIZE (Pmode);
92f6864c 1655 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1656 size += GET_MODE_SIZE (Pmode);
1657
1658 /* Save each register used in calling a function to the block. */
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_args_mode[regno]) != VOIDmode)
1661 {
28f4ec01
BS
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1665
1666 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1667
f4ef873c 1668 emit_move_insn (adjust_address (registers, mode, size), tem);
28f4ec01
BS
1669 size += GET_MODE_SIZE (mode);
1670 }
1671
1672 /* Save the arg pointer to the block. */
2e3f842f 1673 tem = copy_to_reg (crtl->args.internal_arg_pointer);
88e541e1 1674 /* We need the pointer as the caller actually passed them to us, not
ac3f5df7
HPN
1675 as we might have pretended they were passed. Make sure it's a valid
1676 operand, as emit_move_insn isn't expected to handle a PLUS. */
581edfa3
TS
1677 if (STACK_GROWS_DOWNWARD)
1678 tem
1679 = force_operand (plus_constant (Pmode, tem,
1680 crtl->args.pretend_args_size),
1681 NULL_RTX);
88e541e1 1682 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
33521f7d 1683
28f4ec01
BS
1684 size = GET_MODE_SIZE (Pmode);
1685
1686 /* Save the structure value address unless this is passed as an
1687 "invisible" first argument. */
61f71b34 1688 if (struct_incoming_value)
45309d28
ML
1689 emit_move_insn (adjust_address (registers, Pmode, size),
1690 copy_to_reg (struct_incoming_value));
28f4ec01
BS
1691
1692 /* Return the address of the block. */
1693 return copy_addr_to_reg (XEXP (registers, 0));
1694}
1695
1696/* __builtin_apply_args returns block of memory allocated on
1697 the stack into which is stored the arg pointer, structure
1698 value address, static chain, and all the registers that might
1699 possibly be used in performing a function call. The code is
1700 moved to the start of the function so the incoming values are
1701 saved. */
5197bd50 1702
28f4ec01 1703static rtx
4682ae04 1704expand_builtin_apply_args (void)
28f4ec01
BS
1705{
1706 /* Don't do __builtin_apply_args more than once in a function.
1707 Save the result of the first call and reuse it. */
1708 if (apply_args_value != 0)
1709 return apply_args_value;
1710 {
1711 /* When this function is called, it means that registers must be
1712 saved on entry to this function. So we migrate the
1713 call to the first insn of this function. */
1714 rtx temp;
28f4ec01
BS
1715
1716 start_sequence ();
1717 temp = expand_builtin_apply_args_1 ();
e67d1102 1718 rtx_insn *seq = get_insns ();
28f4ec01
BS
1719 end_sequence ();
1720
1721 apply_args_value = temp;
1722
2f937369
DM
1723 /* Put the insns after the NOTE that starts the function.
1724 If this is inside a start_sequence, make the outer-level insn
28f4ec01 1725 chain current, so the code is placed at the start of the
1f21b6f4
JJ
1726 function. If internal_arg_pointer is a non-virtual pseudo,
1727 it needs to be placed after the function that initializes
1728 that pseudo. */
28f4ec01 1729 push_topmost_sequence ();
1f21b6f4
JJ
1730 if (REG_P (crtl->args.internal_arg_pointer)
1731 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1732 emit_insn_before (seq, parm_birth_insn);
1733 else
1734 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
28f4ec01
BS
1735 pop_topmost_sequence ();
1736 return temp;
1737 }
1738}
1739
1740/* Perform an untyped call and save the state required to perform an
1741 untyped return of whatever value was returned by the given function. */
1742
1743static rtx
4682ae04 1744expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
28f4ec01
BS
1745{
1746 int size, align, regno;
b660eccf 1747 fixed_size_mode mode;
58f4cf2a
DM
1748 rtx incoming_args, result, reg, dest, src;
1749 rtx_call_insn *call_insn;
28f4ec01
BS
1750 rtx old_stack_level = 0;
1751 rtx call_fusage = 0;
92f6864c 1752 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
28f4ec01 1753
5ae6cd0d 1754 arguments = convert_memory_address (Pmode, arguments);
ce2d32cd 1755
28f4ec01
BS
1756 /* Create a block where the return registers can be saved. */
1757 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1758
28f4ec01
BS
1759 /* Fetch the arg pointer from the ARGUMENTS block. */
1760 incoming_args = gen_reg_rtx (Pmode);
ce2d32cd 1761 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
581edfa3
TS
1762 if (!STACK_GROWS_DOWNWARD)
1763 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1764 incoming_args, 0, OPTAB_LIB_WIDEN);
28f4ec01 1765
9d53e585
JM
1766 /* Push a new argument block and copy the arguments. Do not allow
1767 the (potential) memcpy call below to interfere with our stack
1768 manipulations. */
28f4ec01 1769 do_pending_stack_adjust ();
9d53e585 1770 NO_DEFER_POP;
28f4ec01 1771
f9da5064 1772 /* Save the stack with nonlocal if available. */
4476e1a0 1773 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1774 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
28f4ec01 1775 else
9eac0f2a 1776 emit_stack_save (SAVE_BLOCK, &old_stack_level);
28f4ec01 1777
316d0b19 1778 /* Allocate a block of memory onto the stack and copy the memory
d3c12306
EB
1779 arguments to the outgoing arguments address. We can pass TRUE
1780 as the 4th argument because we just saved the stack pointer
1781 and will restore it right after the call. */
9e878cf1 1782 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2e3f842f
L
1783
1784 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1785 may have already set current_function_calls_alloca to true.
1786 current_function_calls_alloca won't be set if argsize is zero,
1787 so we have to guarantee need_drap is true here. */
1788 if (SUPPORTS_STACK_ALIGNMENT)
1789 crtl->need_drap = true;
1790
316d0b19 1791 dest = virtual_outgoing_args_rtx;
581edfa3
TS
1792 if (!STACK_GROWS_DOWNWARD)
1793 {
1794 if (CONST_INT_P (argsize))
1795 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1796 else
1797 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1798 }
8ac61af7
RK
1799 dest = gen_rtx_MEM (BLKmode, dest);
1800 set_mem_align (dest, PARM_BOUNDARY);
1801 src = gen_rtx_MEM (BLKmode, incoming_args);
1802 set_mem_align (src, PARM_BOUNDARY);
44bb111a 1803 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
28f4ec01
BS
1804
1805 /* Refer to the argument block. */
1806 apply_args_size ();
1807 arguments = gen_rtx_MEM (BLKmode, arguments);
8ac61af7 1808 set_mem_align (arguments, PARM_BOUNDARY);
28f4ec01
BS
1809
1810 /* Walk past the arg-pointer and structure value address. */
1811 size = GET_MODE_SIZE (Pmode);
61f71b34 1812 if (struct_value)
28f4ec01
BS
1813 size += GET_MODE_SIZE (Pmode);
1814
1815 /* Restore each of the registers previously saved. Make USE insns
1816 for each of these registers for use in making the call. */
1817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1818 if ((mode = apply_args_mode[regno]) != VOIDmode)
1819 {
1820 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1821 if (size % align != 0)
1822 size = CEIL (size, align) * align;
1823 reg = gen_rtx_REG (mode, regno);
f4ef873c 1824 emit_move_insn (reg, adjust_address (arguments, mode, size));
28f4ec01
BS
1825 use_reg (&call_fusage, reg);
1826 size += GET_MODE_SIZE (mode);
1827 }
1828
1829 /* Restore the structure value address unless this is passed as an
1830 "invisible" first argument. */
1831 size = GET_MODE_SIZE (Pmode);
61f71b34 1832 if (struct_value)
28f4ec01
BS
1833 {
1834 rtx value = gen_reg_rtx (Pmode);
f4ef873c 1835 emit_move_insn (value, adjust_address (arguments, Pmode, size));
61f71b34 1836 emit_move_insn (struct_value, value);
f8cfc6aa 1837 if (REG_P (struct_value))
61f71b34 1838 use_reg (&call_fusage, struct_value);
28f4ec01
BS
1839 }
1840
1841 /* All arguments and registers used for the call are set up by now! */
531ca746 1842 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
28f4ec01
BS
1843
1844 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1845 and we don't want to load it into a register as an optimization,
1846 because prepare_call_address already did it if it should be done. */
1847 if (GET_CODE (function) != SYMBOL_REF)
1848 function = memory_address (FUNCTION_MODE, function);
1849
1850 /* Generate the actual call instruction and save the return value. */
43c7dca8
RS
1851 if (targetm.have_untyped_call ())
1852 {
1853 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1854 emit_call_insn (targetm.gen_untyped_call (mem, result,
1855 result_vector (1, result)));
1856 }
58d745ec 1857 else if (targetm.have_call_value ())
28f4ec01
BS
1858 {
1859 rtx valreg = 0;
1860
1861 /* Locate the unique return register. It is not possible to
1862 express a call that sets more than one return register using
1863 call_value; use untyped_call for that. In fact, untyped_call
1864 only needs to save the return registers in the given block. */
1865 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1866 if ((mode = apply_result_mode[regno]) != VOIDmode)
1867 {
58d745ec 1868 gcc_assert (!valreg); /* have_untyped_call required. */
5906d013 1869
28f4ec01
BS
1870 valreg = gen_rtx_REG (mode, regno);
1871 }
1872
58d745ec
RS
1873 emit_insn (targetm.gen_call_value (valreg,
1874 gen_rtx_MEM (FUNCTION_MODE, function),
1875 const0_rtx, NULL_RTX, const0_rtx));
28f4ec01 1876
f4ef873c 1877 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
28f4ec01
BS
1878 }
1879 else
298e6adc 1880 gcc_unreachable ();
28f4ec01 1881
ee960939
OH
1882 /* Find the CALL insn we just emitted, and attach the register usage
1883 information. */
1884 call_insn = last_call_insn ();
1885 add_function_usage_to (call_insn, call_fusage);
28f4ec01
BS
1886
1887 /* Restore the stack. */
4476e1a0 1888 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1889 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
28f4ec01 1890 else
9eac0f2a 1891 emit_stack_restore (SAVE_BLOCK, old_stack_level);
c3284718 1892 fixup_args_size_notes (call_insn, get_last_insn (), 0);
28f4ec01 1893
9d53e585
JM
1894 OK_DEFER_POP;
1895
28f4ec01 1896 /* Return the address of the result block. */
5ae6cd0d
MM
1897 result = copy_addr_to_reg (XEXP (result, 0));
1898 return convert_memory_address (ptr_mode, result);
28f4ec01
BS
1899}
1900
1901/* Perform an untyped return. */
1902
1903static void
4682ae04 1904expand_builtin_return (rtx result)
28f4ec01
BS
1905{
1906 int size, align, regno;
b660eccf 1907 fixed_size_mode mode;
28f4ec01 1908 rtx reg;
fee3e72c 1909 rtx_insn *call_fusage = 0;
28f4ec01 1910
5ae6cd0d 1911 result = convert_memory_address (Pmode, result);
ce2d32cd 1912
28f4ec01
BS
1913 apply_result_size ();
1914 result = gen_rtx_MEM (BLKmode, result);
1915
43c7dca8 1916 if (targetm.have_untyped_return ())
28f4ec01 1917 {
43c7dca8
RS
1918 rtx vector = result_vector (0, result);
1919 emit_jump_insn (targetm.gen_untyped_return (result, vector));
28f4ec01
BS
1920 emit_barrier ();
1921 return;
1922 }
28f4ec01
BS
1923
1924 /* Restore the return value and note that each value is used. */
1925 size = 0;
1926 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1927 if ((mode = apply_result_mode[regno]) != VOIDmode)
1928 {
1929 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1930 if (size % align != 0)
1931 size = CEIL (size, align) * align;
1932 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
f4ef873c 1933 emit_move_insn (reg, adjust_address (result, mode, size));
28f4ec01
BS
1934
1935 push_to_sequence (call_fusage);
c41c1387 1936 emit_use (reg);
28f4ec01
BS
1937 call_fusage = get_insns ();
1938 end_sequence ();
1939 size += GET_MODE_SIZE (mode);
1940 }
1941
1942 /* Put the USE insns before the return. */
2f937369 1943 emit_insn (call_fusage);
28f4ec01
BS
1944
1945 /* Return whatever values was restored by jumping directly to the end
1946 of the function. */
6e3077c6 1947 expand_naked_return ();
28f4ec01
BS
1948}
1949
ad82abb8 1950/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
5197bd50 1951
ad82abb8 1952static enum type_class
4682ae04 1953type_to_class (tree type)
ad82abb8
ZW
1954{
1955 switch (TREE_CODE (type))
1956 {
1957 case VOID_TYPE: return void_type_class;
1958 case INTEGER_TYPE: return integer_type_class;
ad82abb8
ZW
1959 case ENUMERAL_TYPE: return enumeral_type_class;
1960 case BOOLEAN_TYPE: return boolean_type_class;
1961 case POINTER_TYPE: return pointer_type_class;
1962 case REFERENCE_TYPE: return reference_type_class;
1963 case OFFSET_TYPE: return offset_type_class;
1964 case REAL_TYPE: return real_type_class;
1965 case COMPLEX_TYPE: return complex_type_class;
1966 case FUNCTION_TYPE: return function_type_class;
1967 case METHOD_TYPE: return method_type_class;
1968 case RECORD_TYPE: return record_type_class;
1969 case UNION_TYPE:
1970 case QUAL_UNION_TYPE: return union_type_class;
1971 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1972 ? string_type_class : array_type_class);
ad82abb8
ZW
1973 case LANG_TYPE: return lang_type_class;
1974 default: return no_type_class;
1975 }
1976}
8d51ecf8 1977
5039610b 1978/* Expand a call EXP to __builtin_classify_type. */
5197bd50 1979
28f4ec01 1980static rtx
5039610b 1981expand_builtin_classify_type (tree exp)
28f4ec01 1982{
5039610b
SL
1983 if (call_expr_nargs (exp))
1984 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
28f4ec01
BS
1985 return GEN_INT (no_type_class);
1986}
1987
ee5fd23a
MM
1988/* This helper macro, meant to be used in mathfn_built_in below, determines
1989 which among a set of builtin math functions is appropriate for a given type
1990 mode. The `F' (float) and `L' (long double) are automatically generated
1991 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1992 types, there are additional types that are considered with 'F32', 'F64',
1993 'F128', etc. suffixes. */
b03ff92e
RS
1994#define CASE_MATHFN(MATHFN) \
1995 CASE_CFN_##MATHFN: \
1996 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1997 fcodel = BUILT_IN_##MATHFN##L ; break;
ee5fd23a
MM
1998/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1999 types. */
2000#define CASE_MATHFN_FLOATN(MATHFN) \
2001 CASE_CFN_##MATHFN: \
2002 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2003 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2004 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2005 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2006 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2007 break;
bf460eec 2008/* Similar to above, but appends _R after any F/L suffix. */
b03ff92e
RS
2009#define CASE_MATHFN_REENT(MATHFN) \
2010 case CFN_BUILT_IN_##MATHFN##_R: \
2011 case CFN_BUILT_IN_##MATHFN##F_R: \
2012 case CFN_BUILT_IN_##MATHFN##L_R: \
2013 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2014 fcodel = BUILT_IN_##MATHFN##L_R ; break;
daa027cc 2015
5c1a2e63
RS
2016/* Return a function equivalent to FN but operating on floating-point
2017 values of type TYPE, or END_BUILTINS if no such function exists.
b03ff92e
RS
2018 This is purely an operation on function codes; it does not guarantee
2019 that the target actually has an implementation of the function. */
05f41289 2020
5c1a2e63 2021static built_in_function
b03ff92e 2022mathfn_built_in_2 (tree type, combined_fn fn)
272f51a3 2023{
ee5fd23a 2024 tree mtype;
5c1a2e63 2025 built_in_function fcode, fcodef, fcodel;
ee5fd23a
MM
2026 built_in_function fcodef16 = END_BUILTINS;
2027 built_in_function fcodef32 = END_BUILTINS;
2028 built_in_function fcodef64 = END_BUILTINS;
2029 built_in_function fcodef128 = END_BUILTINS;
2030 built_in_function fcodef32x = END_BUILTINS;
2031 built_in_function fcodef64x = END_BUILTINS;
2032 built_in_function fcodef128x = END_BUILTINS;
daa027cc
KG
2033
2034 switch (fn)
2035 {
b03ff92e
RS
2036 CASE_MATHFN (ACOS)
2037 CASE_MATHFN (ACOSH)
2038 CASE_MATHFN (ASIN)
2039 CASE_MATHFN (ASINH)
2040 CASE_MATHFN (ATAN)
2041 CASE_MATHFN (ATAN2)
2042 CASE_MATHFN (ATANH)
2043 CASE_MATHFN (CBRT)
c6cfa2bf 2044 CASE_MATHFN_FLOATN (CEIL)
b03ff92e 2045 CASE_MATHFN (CEXPI)
ee5fd23a 2046 CASE_MATHFN_FLOATN (COPYSIGN)
b03ff92e
RS
2047 CASE_MATHFN (COS)
2048 CASE_MATHFN (COSH)
2049 CASE_MATHFN (DREM)
2050 CASE_MATHFN (ERF)
2051 CASE_MATHFN (ERFC)
2052 CASE_MATHFN (EXP)
2053 CASE_MATHFN (EXP10)
2054 CASE_MATHFN (EXP2)
2055 CASE_MATHFN (EXPM1)
2056 CASE_MATHFN (FABS)
2057 CASE_MATHFN (FDIM)
c6cfa2bf 2058 CASE_MATHFN_FLOATN (FLOOR)
ee5fd23a
MM
2059 CASE_MATHFN_FLOATN (FMA)
2060 CASE_MATHFN_FLOATN (FMAX)
2061 CASE_MATHFN_FLOATN (FMIN)
b03ff92e
RS
2062 CASE_MATHFN (FMOD)
2063 CASE_MATHFN (FREXP)
2064 CASE_MATHFN (GAMMA)
2065 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2066 CASE_MATHFN (HUGE_VAL)
2067 CASE_MATHFN (HYPOT)
2068 CASE_MATHFN (ILOGB)
2069 CASE_MATHFN (ICEIL)
2070 CASE_MATHFN (IFLOOR)
2071 CASE_MATHFN (INF)
2072 CASE_MATHFN (IRINT)
2073 CASE_MATHFN (IROUND)
2074 CASE_MATHFN (ISINF)
2075 CASE_MATHFN (J0)
2076 CASE_MATHFN (J1)
2077 CASE_MATHFN (JN)
2078 CASE_MATHFN (LCEIL)
2079 CASE_MATHFN (LDEXP)
2080 CASE_MATHFN (LFLOOR)
2081 CASE_MATHFN (LGAMMA)
2082 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2083 CASE_MATHFN (LLCEIL)
2084 CASE_MATHFN (LLFLOOR)
2085 CASE_MATHFN (LLRINT)
2086 CASE_MATHFN (LLROUND)
2087 CASE_MATHFN (LOG)
2088 CASE_MATHFN (LOG10)
2089 CASE_MATHFN (LOG1P)
2090 CASE_MATHFN (LOG2)
2091 CASE_MATHFN (LOGB)
2092 CASE_MATHFN (LRINT)
2093 CASE_MATHFN (LROUND)
2094 CASE_MATHFN (MODF)
2095 CASE_MATHFN (NAN)
2096 CASE_MATHFN (NANS)
c6cfa2bf 2097 CASE_MATHFN_FLOATN (NEARBYINT)
b03ff92e
RS
2098 CASE_MATHFN (NEXTAFTER)
2099 CASE_MATHFN (NEXTTOWARD)
2100 CASE_MATHFN (POW)
2101 CASE_MATHFN (POWI)
2102 CASE_MATHFN (POW10)
2103 CASE_MATHFN (REMAINDER)
2104 CASE_MATHFN (REMQUO)
c6cfa2bf
MM
2105 CASE_MATHFN_FLOATN (RINT)
2106 CASE_MATHFN_FLOATN (ROUND)
7d7b99f9 2107 CASE_MATHFN_FLOATN (ROUNDEVEN)
b03ff92e
RS
2108 CASE_MATHFN (SCALB)
2109 CASE_MATHFN (SCALBLN)
2110 CASE_MATHFN (SCALBN)
2111 CASE_MATHFN (SIGNBIT)
2112 CASE_MATHFN (SIGNIFICAND)
2113 CASE_MATHFN (SIN)
2114 CASE_MATHFN (SINCOS)
2115 CASE_MATHFN (SINH)
ee5fd23a 2116 CASE_MATHFN_FLOATN (SQRT)
b03ff92e
RS
2117 CASE_MATHFN (TAN)
2118 CASE_MATHFN (TANH)
2119 CASE_MATHFN (TGAMMA)
c6cfa2bf 2120 CASE_MATHFN_FLOATN (TRUNC)
b03ff92e
RS
2121 CASE_MATHFN (Y0)
2122 CASE_MATHFN (Y1)
2123 CASE_MATHFN (YN)
daa027cc 2124
b03ff92e
RS
2125 default:
2126 return END_BUILTINS;
2127 }
daa027cc 2128
ee5fd23a
MM
2129 mtype = TYPE_MAIN_VARIANT (type);
2130 if (mtype == double_type_node)
5c1a2e63 2131 return fcode;
ee5fd23a 2132 else if (mtype == float_type_node)
5c1a2e63 2133 return fcodef;
ee5fd23a 2134 else if (mtype == long_double_type_node)
5c1a2e63 2135 return fcodel;
ee5fd23a
MM
2136 else if (mtype == float16_type_node)
2137 return fcodef16;
2138 else if (mtype == float32_type_node)
2139 return fcodef32;
2140 else if (mtype == float64_type_node)
2141 return fcodef64;
2142 else if (mtype == float128_type_node)
2143 return fcodef128;
2144 else if (mtype == float32x_type_node)
2145 return fcodef32x;
2146 else if (mtype == float64x_type_node)
2147 return fcodef64x;
2148 else if (mtype == float128x_type_node)
2149 return fcodef128x;
daa027cc 2150 else
5c1a2e63
RS
2151 return END_BUILTINS;
2152}
2153
2154/* Return mathematic function equivalent to FN but operating directly on TYPE,
2155 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2156 otherwise use the explicit declaration. If we can't do the conversion,
2157 return null. */
2158
2159static tree
b03ff92e 2160mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
5c1a2e63
RS
2161{
2162 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2163 if (fcode2 == END_BUILTINS)
5039610b 2164 return NULL_TREE;
e79983f4
MM
2165
2166 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2167 return NULL_TREE;
2168
2169 return builtin_decl_explicit (fcode2);
272f51a3
JH
2170}
2171
b03ff92e 2172/* Like mathfn_built_in_1, but always use the implicit array. */
05f41289
KG
2173
2174tree
b03ff92e 2175mathfn_built_in (tree type, combined_fn fn)
05f41289
KG
2176{
2177 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2178}
2179
b03ff92e
RS
2180/* Like mathfn_built_in_1, but take a built_in_function and
2181 always use the implicit array. */
2182
2183tree
2184mathfn_built_in (tree type, enum built_in_function fn)
2185{
2186 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2187}
2188
686ee971
RS
2189/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2190 return its code, otherwise return IFN_LAST. Note that this function
2191 only tests whether the function is defined in internals.def, not whether
2192 it is actually available on the target. */
2193
2194internal_fn
2195associated_internal_fn (tree fndecl)
2196{
2197 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2198 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2199 switch (DECL_FUNCTION_CODE (fndecl))
2200 {
2201#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2202 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
ee5fd23a
MM
2203#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2204 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2205 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
4959a752
RS
2206#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2207 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
686ee971
RS
2208#include "internal-fn.def"
2209
2210 CASE_FLT_FN (BUILT_IN_POW10):
2211 return IFN_EXP10;
2212
2213 CASE_FLT_FN (BUILT_IN_DREM):
2214 return IFN_REMAINDER;
2215
2216 CASE_FLT_FN (BUILT_IN_SCALBN):
2217 CASE_FLT_FN (BUILT_IN_SCALBLN):
2218 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2219 return IFN_LDEXP;
2220 return IFN_LAST;
2221
2222 default:
2223 return IFN_LAST;
2224 }
2225}
2226
2227/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2228 on the current target by a call to an internal function, return the
2229 code of that internal function, otherwise return IFN_LAST. The caller
2230 is responsible for ensuring that any side-effects of the built-in
2231 call are dealt with correctly. E.g. if CALL sets errno, the caller
2232 must decide that the errno result isn't needed or make it available
2233 in some other way. */
2234
2235internal_fn
2236replacement_internal_fn (gcall *call)
2237{
2238 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2239 {
2240 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2241 if (ifn != IFN_LAST)
2242 {
2243 tree_pair types = direct_internal_fn_types (ifn, call);
d95ab70a
RS
2244 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2245 if (direct_internal_fn_supported_p (ifn, types, opt_type))
686ee971
RS
2246 return ifn;
2247 }
2248 }
2249 return IFN_LAST;
2250}
2251
1b1562a5
MM
2252/* Expand a call to the builtin trinary math functions (fma).
2253 Return NULL_RTX if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function; if convenient, the result should be placed in TARGET.
2256 SUBTARGET may be used as the target for computing one of EXP's
2257 operands. */
2258
2259static rtx
2260expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2261{
2262 optab builtin_optab;
58f4cf2a
DM
2263 rtx op0, op1, op2, result;
2264 rtx_insn *insns;
1b1562a5
MM
2265 tree fndecl = get_callee_fndecl (exp);
2266 tree arg0, arg1, arg2;
ef4bddc2 2267 machine_mode mode;
1b1562a5
MM
2268
2269 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2270 return NULL_RTX;
2271
2272 arg0 = CALL_EXPR_ARG (exp, 0);
2273 arg1 = CALL_EXPR_ARG (exp, 1);
2274 arg2 = CALL_EXPR_ARG (exp, 2);
2275
2276 switch (DECL_FUNCTION_CODE (fndecl))
2277 {
2278 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 2279 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
2280 builtin_optab = fma_optab; break;
2281 default:
2282 gcc_unreachable ();
2283 }
2284
2285 /* Make a suitable register to place result in. */
2286 mode = TYPE_MODE (TREE_TYPE (exp));
2287
2288 /* Before working hard, check whether the instruction is available. */
2289 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2290 return NULL_RTX;
2291
04b80dbb 2292 result = gen_reg_rtx (mode);
1b1562a5
MM
2293
2294 /* Always stabilize the argument list. */
2295 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2296 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2297 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2298
2299 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2300 op1 = expand_normal (arg1);
2301 op2 = expand_normal (arg2);
2302
2303 start_sequence ();
2304
04b80dbb
RS
2305 /* Compute into RESULT.
2306 Set RESULT to wherever the result comes back. */
2307 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2308 result, 0);
1b1562a5
MM
2309
2310 /* If we were unable to expand via the builtin, stop the sequence
2311 (without outputting the insns) and call to the library function
2312 with the stabilized argument list. */
04b80dbb 2313 if (result == 0)
1b1562a5
MM
2314 {
2315 end_sequence ();
2316 return expand_call (exp, target, target == const0_rtx);
2317 }
2318
2319 /* Output the entire sequence. */
2320 insns = get_insns ();
2321 end_sequence ();
2322 emit_insn (insns);
2323
04b80dbb 2324 return result;
1b1562a5
MM
2325}
2326
6c7cf1f0 2327/* Expand a call to the builtin sin and cos math functions.
5039610b 2328 Return NULL_RTX if a normal call should be emitted rather than expanding the
6c7cf1f0
UB
2329 function in-line. EXP is the expression that is a call to the builtin
2330 function; if convenient, the result should be placed in TARGET.
2331 SUBTARGET may be used as the target for computing one of EXP's
2332 operands. */
2333
2334static rtx
2335expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2336{
2337 optab builtin_optab;
58f4cf2a
DM
2338 rtx op0;
2339 rtx_insn *insns;
6c7cf1f0 2340 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2341 machine_mode mode;
5799f732 2342 tree arg;
6c7cf1f0 2343
5039610b
SL
2344 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2345 return NULL_RTX;
6c7cf1f0 2346
5039610b 2347 arg = CALL_EXPR_ARG (exp, 0);
6c7cf1f0
UB
2348
2349 switch (DECL_FUNCTION_CODE (fndecl))
2350 {
ea6a6627
VR
2351 CASE_FLT_FN (BUILT_IN_SIN):
2352 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2353 builtin_optab = sincos_optab; break;
2354 default:
298e6adc 2355 gcc_unreachable ();
6c7cf1f0
UB
2356 }
2357
2358 /* Make a suitable register to place result in. */
2359 mode = TYPE_MODE (TREE_TYPE (exp));
2360
6c7cf1f0 2361 /* Check if sincos insn is available, otherwise fallback
9cf737f8 2362 to sin or cos insn. */
947131ba 2363 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6c7cf1f0
UB
2364 switch (DECL_FUNCTION_CODE (fndecl))
2365 {
ea6a6627 2366 CASE_FLT_FN (BUILT_IN_SIN):
6c7cf1f0 2367 builtin_optab = sin_optab; break;
ea6a6627 2368 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2369 builtin_optab = cos_optab; break;
2370 default:
298e6adc 2371 gcc_unreachable ();
6c7cf1f0 2372 }
6c7cf1f0
UB
2373
2374 /* Before working hard, check whether the instruction is available. */
947131ba 2375 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6c7cf1f0 2376 {
04b80dbb 2377 rtx result = gen_reg_rtx (mode);
6c7cf1f0
UB
2378
2379 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2380 need to expand the argument again. This way, we will not perform
2381 side-effects more the once. */
5799f732 2382 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6c7cf1f0 2383
49452c07 2384 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6c7cf1f0 2385
6c7cf1f0
UB
2386 start_sequence ();
2387
04b80dbb
RS
2388 /* Compute into RESULT.
2389 Set RESULT to wherever the result comes back. */
6c7cf1f0
UB
2390 if (builtin_optab == sincos_optab)
2391 {
04b80dbb 2392 int ok;
5906d013 2393
6c7cf1f0
UB
2394 switch (DECL_FUNCTION_CODE (fndecl))
2395 {
ea6a6627 2396 CASE_FLT_FN (BUILT_IN_SIN):
04b80dbb 2397 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6c7cf1f0 2398 break;
ea6a6627 2399 CASE_FLT_FN (BUILT_IN_COS):
04b80dbb 2400 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6c7cf1f0
UB
2401 break;
2402 default:
298e6adc 2403 gcc_unreachable ();
6c7cf1f0 2404 }
04b80dbb 2405 gcc_assert (ok);
6c7cf1f0
UB
2406 }
2407 else
04b80dbb 2408 result = expand_unop (mode, builtin_optab, op0, result, 0);
6c7cf1f0 2409
04b80dbb 2410 if (result != 0)
6c7cf1f0 2411 {
6c7cf1f0
UB
2412 /* Output the entire sequence. */
2413 insns = get_insns ();
2414 end_sequence ();
2415 emit_insn (insns);
04b80dbb 2416 return result;
6c7cf1f0
UB
2417 }
2418
2419 /* If we were unable to expand via the builtin, stop the sequence
2420 (without outputting the insns) and call to the library function
2421 with the stabilized argument list. */
2422 end_sequence ();
2423 }
2424
04b80dbb 2425 return expand_call (exp, target, target == const0_rtx);
6c7cf1f0
UB
2426}
2427
44e10129
MM
2428/* Given an interclass math builtin decl FNDECL and it's argument ARG
2429 return an RTL instruction code that implements the functionality.
2430 If that isn't possible or available return CODE_FOR_nothing. */
eaee4464 2431
44e10129
MM
2432static enum insn_code
2433interclass_mathfn_icode (tree arg, tree fndecl)
eaee4464 2434{
44e10129 2435 bool errno_set = false;
2225b9f2 2436 optab builtin_optab = unknown_optab;
ef4bddc2 2437 machine_mode mode;
eaee4464
UB
2438
2439 switch (DECL_FUNCTION_CODE (fndecl))
2440 {
2441 CASE_FLT_FN (BUILT_IN_ILOGB):
903c723b
TC
2442 errno_set = true; builtin_optab = ilogb_optab; break;
2443 CASE_FLT_FN (BUILT_IN_ISINF):
2444 builtin_optab = isinf_optab; break;
2445 case BUILT_IN_ISNORMAL:
2446 case BUILT_IN_ISFINITE:
2447 CASE_FLT_FN (BUILT_IN_FINITE):
2448 case BUILT_IN_FINITED32:
2449 case BUILT_IN_FINITED64:
2450 case BUILT_IN_FINITED128:
2451 case BUILT_IN_ISINFD32:
2452 case BUILT_IN_ISINFD64:
2453 case BUILT_IN_ISINFD128:
2454 /* These builtins have no optabs (yet). */
0c8d3c2b 2455 break;
eaee4464
UB
2456 default:
2457 gcc_unreachable ();
2458 }
2459
2460 /* There's no easy way to detect the case we need to set EDOM. */
2461 if (flag_errno_math && errno_set)
44e10129 2462 return CODE_FOR_nothing;
eaee4464
UB
2463
2464 /* Optab mode depends on the mode of the input argument. */
2465 mode = TYPE_MODE (TREE_TYPE (arg));
2466
0c8d3c2b 2467 if (builtin_optab)
947131ba 2468 return optab_handler (builtin_optab, mode);
44e10129
MM
2469 return CODE_FOR_nothing;
2470}
2471
2472/* Expand a call to one of the builtin math functions that operate on
903c723b
TC
2473 floating point argument and output an integer result (ilogb, isinf,
2474 isnan, etc).
44e10129
MM
2475 Return 0 if a normal call should be emitted rather than expanding the
2476 function in-line. EXP is the expression that is a call to the builtin
4359dc2a 2477 function; if convenient, the result should be placed in TARGET. */
44e10129
MM
2478
2479static rtx
4359dc2a 2480expand_builtin_interclass_mathfn (tree exp, rtx target)
44e10129
MM
2481{
2482 enum insn_code icode = CODE_FOR_nothing;
2483 rtx op0;
2484 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2485 machine_mode mode;
44e10129
MM
2486 tree arg;
2487
2488 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2489 return NULL_RTX;
2490
2491 arg = CALL_EXPR_ARG (exp, 0);
2492 icode = interclass_mathfn_icode (arg, fndecl);
2493 mode = TYPE_MODE (TREE_TYPE (arg));
2494
eaee4464
UB
2495 if (icode != CODE_FOR_nothing)
2496 {
99b1c316 2497 class expand_operand ops[1];
58f4cf2a 2498 rtx_insn *last = get_last_insn ();
8a0b1aa4 2499 tree orig_arg = arg;
eaee4464
UB
2500
2501 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2502 need to expand the argument again. This way, we will not perform
2503 side-effects more the once. */
5799f732 2504 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
eaee4464 2505
4359dc2a 2506 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
eaee4464
UB
2507
2508 if (mode != GET_MODE (op0))
2509 op0 = convert_to_mode (mode, op0, 0);
2510
a5c7d693
RS
2511 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2512 if (maybe_legitimize_operands (icode, 0, 1, ops)
2513 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2514 return ops[0].value;
2515
8a0b1aa4
MM
2516 delete_insns_since (last);
2517 CALL_EXPR_ARG (exp, 0) = orig_arg;
eaee4464
UB
2518 }
2519
44e10129 2520 return NULL_RTX;
eaee4464
UB
2521}
2522
403e54f0 2523/* Expand a call to the builtin sincos math function.
5039610b 2524 Return NULL_RTX if a normal call should be emitted rather than expanding the
403e54f0
RG
2525 function in-line. EXP is the expression that is a call to the builtin
2526 function. */
2527
2528static rtx
2529expand_builtin_sincos (tree exp)
2530{
2531 rtx op0, op1, op2, target1, target2;
ef4bddc2 2532 machine_mode mode;
403e54f0
RG
2533 tree arg, sinp, cosp;
2534 int result;
db3927fb 2535 location_t loc = EXPR_LOCATION (exp);
ca818bd9 2536 tree alias_type, alias_off;
403e54f0 2537
5039610b
SL
2538 if (!validate_arglist (exp, REAL_TYPE,
2539 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2540 return NULL_RTX;
403e54f0 2541
5039610b
SL
2542 arg = CALL_EXPR_ARG (exp, 0);
2543 sinp = CALL_EXPR_ARG (exp, 1);
2544 cosp = CALL_EXPR_ARG (exp, 2);
403e54f0
RG
2545
2546 /* Make a suitable register to place result in. */
2547 mode = TYPE_MODE (TREE_TYPE (arg));
2548
2549 /* Check if sincos insn is available, otherwise emit the call. */
947131ba 2550 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
403e54f0
RG
2551 return NULL_RTX;
2552
2553 target1 = gen_reg_rtx (mode);
2554 target2 = gen_reg_rtx (mode);
2555
84217346 2556 op0 = expand_normal (arg);
ca818bd9
RG
2557 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2558 alias_off = build_int_cst (alias_type, 0);
2559 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2560 sinp, alias_off));
2561 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2562 cosp, alias_off));
403e54f0
RG
2563
2564 /* Compute into target1 and target2.
2565 Set TARGET to wherever the result comes back. */
2566 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2567 gcc_assert (result);
2568
2569 /* Move target1 and target2 to the memory locations indicated
2570 by op1 and op2. */
2571 emit_move_insn (op1, target1);
2572 emit_move_insn (op2, target2);
2573
2574 return const0_rtx;
2575}
2576
75c7c595
RG
2577/* Expand a call to the internal cexpi builtin to the sincos math function.
2578 EXP is the expression that is a call to the builtin function; if convenient,
4359dc2a 2579 the result should be placed in TARGET. */
75c7c595
RG
2580
2581static rtx
4359dc2a 2582expand_builtin_cexpi (tree exp, rtx target)
75c7c595
RG
2583{
2584 tree fndecl = get_callee_fndecl (exp);
75c7c595 2585 tree arg, type;
ef4bddc2 2586 machine_mode mode;
75c7c595 2587 rtx op0, op1, op2;
db3927fb 2588 location_t loc = EXPR_LOCATION (exp);
75c7c595 2589
5039610b
SL
2590 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2591 return NULL_RTX;
75c7c595 2592
5039610b 2593 arg = CALL_EXPR_ARG (exp, 0);
75c7c595
RG
2594 type = TREE_TYPE (arg);
2595 mode = TYPE_MODE (TREE_TYPE (arg));
2596
2597 /* Try expanding via a sincos optab, fall back to emitting a libcall
b54c5497
RG
2598 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2599 is only generated from sincos, cexp or if we have either of them. */
947131ba 2600 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
75c7c595
RG
2601 {
2602 op1 = gen_reg_rtx (mode);
2603 op2 = gen_reg_rtx (mode);
2604
4359dc2a 2605 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2606
2607 /* Compute into op1 and op2. */
2608 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2609 }
d33d9e47 2610 else if (targetm.libc_has_function (function_sincos))
75c7c595 2611 {
5039610b 2612 tree call, fn = NULL_TREE;
75c7c595
RG
2613 tree top1, top2;
2614 rtx op1a, op2a;
2615
2616 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2617 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
75c7c595 2618 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2619 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
75c7c595 2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2621 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
5039610b
SL
2622 else
2623 gcc_unreachable ();
b8698a0f 2624
9474e8ab
MM
2625 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2626 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
18ae1560
UB
2627 op1a = copy_addr_to_reg (XEXP (op1, 0));
2628 op2a = copy_addr_to_reg (XEXP (op2, 0));
75c7c595
RG
2629 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2630 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2631
75c7c595
RG
2632 /* Make sure not to fold the sincos call again. */
2633 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5039610b
SL
2634 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2635 call, 3, arg, top1, top2));
75c7c595 2636 }
b54c5497
RG
2637 else
2638 {
9d972b2d 2639 tree call, fn = NULL_TREE, narg;
b54c5497
RG
2640 tree ctype = build_complex_type (type);
2641
9d972b2d 2642 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2643 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
9d972b2d 2644 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2645 fn = builtin_decl_explicit (BUILT_IN_CEXP);
9d972b2d 2646 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2647 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
5039610b
SL
2648 else
2649 gcc_unreachable ();
34a24c11
RG
2650
2651 /* If we don't have a decl for cexp create one. This is the
2652 friendliest fallback if the user calls __builtin_cexpi
2653 without full target C99 function support. */
2654 if (fn == NULL_TREE)
2655 {
2656 tree fntype;
2657 const char *name = NULL;
2658
2659 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2660 name = "cexpf";
2661 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2662 name = "cexp";
2663 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2664 name = "cexpl";
2665
2666 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2667 fn = build_fn_decl (name, fntype);
2668 }
2669
db3927fb 2670 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
b54c5497
RG
2671 build_real (type, dconst0), arg);
2672
2673 /* Make sure not to fold the cexp call again. */
2674 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
b8698a0f 2675 return expand_expr (build_call_nary (ctype, call, 1, narg),
49452c07 2676 target, VOIDmode, EXPAND_NORMAL);
b54c5497 2677 }
75c7c595
RG
2678
2679 /* Now build the proper return type. */
2680 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2681 make_tree (TREE_TYPE (arg), op2),
2682 make_tree (TREE_TYPE (arg), op1)),
49452c07 2683 target, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2684}
2685
44e10129
MM
2686/* Conveniently construct a function call expression. FNDECL names the
2687 function to be called, N is the number of arguments, and the "..."
2688 parameters are the argument expressions. Unlike build_call_exr
2689 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2690
2691static tree
2692build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2693{
2694 va_list ap;
2695 tree fntype = TREE_TYPE (fndecl);
2696 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2697
2698 va_start (ap, n);
2699 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2700 va_end (ap);
2701 SET_EXPR_LOCATION (fn, loc);
2702 return fn;
2703}
44e10129 2704
0bfa1541
RG
2705/* Expand a call to one of the builtin rounding functions gcc defines
2706 as an extension (lfloor and lceil). As these are gcc extensions we
2707 do not need to worry about setting errno to EDOM.
d8b42d06
UB
2708 If expanding via optab fails, lower expression to (int)(floor(x)).
2709 EXP is the expression that is a call to the builtin function;
1856c8dc 2710 if convenient, the result should be placed in TARGET. */
d8b42d06
UB
2711
2712static rtx
1856c8dc 2713expand_builtin_int_roundingfn (tree exp, rtx target)
d8b42d06 2714{
c3a4177f 2715 convert_optab builtin_optab;
58f4cf2a
DM
2716 rtx op0, tmp;
2717 rtx_insn *insns;
d8b42d06 2718 tree fndecl = get_callee_fndecl (exp);
d8b42d06
UB
2719 enum built_in_function fallback_fn;
2720 tree fallback_fndecl;
ef4bddc2 2721 machine_mode mode;
968fc3b6 2722 tree arg;
d8b42d06 2723
5039610b 2724 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1c178a5a 2725 return NULL_RTX;
d8b42d06 2726
5039610b 2727 arg = CALL_EXPR_ARG (exp, 0);
d8b42d06
UB
2728
2729 switch (DECL_FUNCTION_CODE (fndecl))
2730 {
6c32ee74 2731 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
2732 CASE_FLT_FN (BUILT_IN_LCEIL):
2733 CASE_FLT_FN (BUILT_IN_LLCEIL):
f94b1661
UB
2734 builtin_optab = lceil_optab;
2735 fallback_fn = BUILT_IN_CEIL;
2736 break;
2737
6c32ee74 2738 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627
VR
2739 CASE_FLT_FN (BUILT_IN_LFLOOR):
2740 CASE_FLT_FN (BUILT_IN_LLFLOOR):
d8b42d06
UB
2741 builtin_optab = lfloor_optab;
2742 fallback_fn = BUILT_IN_FLOOR;
2743 break;
2744
2745 default:
2746 gcc_unreachable ();
2747 }
2748
2749 /* Make a suitable register to place result in. */
2750 mode = TYPE_MODE (TREE_TYPE (exp));
2751
c3a4177f 2752 target = gen_reg_rtx (mode);
d8b42d06 2753
c3a4177f
RG
2754 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2755 need to expand the argument again. This way, we will not perform
2756 side-effects more the once. */
5799f732 2757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
d8b42d06 2758
1856c8dc 2759 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
d8b42d06 2760
c3a4177f 2761 start_sequence ();
d8b42d06 2762
c3a4177f
RG
2763 /* Compute into TARGET. */
2764 if (expand_sfix_optab (target, op0, builtin_optab))
2765 {
2766 /* Output the entire sequence. */
2767 insns = get_insns ();
d8b42d06 2768 end_sequence ();
c3a4177f
RG
2769 emit_insn (insns);
2770 return target;
d8b42d06
UB
2771 }
2772
c3a4177f
RG
2773 /* If we were unable to expand via the builtin, stop the sequence
2774 (without outputting the insns). */
2775 end_sequence ();
2776
d8b42d06
UB
2777 /* Fall back to floating point rounding optab. */
2778 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
34a24c11
RG
2779
2780 /* For non-C99 targets we may end up without a fallback fndecl here
2781 if the user called __builtin_lfloor directly. In this case emit
2782 a call to the floor/ceil variants nevertheless. This should result
2783 in the best user experience for not full C99 targets. */
2784 if (fallback_fndecl == NULL_TREE)
2785 {
2786 tree fntype;
2787 const char *name = NULL;
2788
2789 switch (DECL_FUNCTION_CODE (fndecl))
2790 {
6c32ee74 2791 case BUILT_IN_ICEIL:
34a24c11
RG
2792 case BUILT_IN_LCEIL:
2793 case BUILT_IN_LLCEIL:
2794 name = "ceil";
2795 break;
6c32ee74 2796 case BUILT_IN_ICEILF:
34a24c11
RG
2797 case BUILT_IN_LCEILF:
2798 case BUILT_IN_LLCEILF:
2799 name = "ceilf";
2800 break;
6c32ee74 2801 case BUILT_IN_ICEILL:
34a24c11
RG
2802 case BUILT_IN_LCEILL:
2803 case BUILT_IN_LLCEILL:
2804 name = "ceill";
2805 break;
6c32ee74 2806 case BUILT_IN_IFLOOR:
34a24c11
RG
2807 case BUILT_IN_LFLOOR:
2808 case BUILT_IN_LLFLOOR:
2809 name = "floor";
2810 break;
6c32ee74 2811 case BUILT_IN_IFLOORF:
34a24c11
RG
2812 case BUILT_IN_LFLOORF:
2813 case BUILT_IN_LLFLOORF:
2814 name = "floorf";
2815 break;
6c32ee74 2816 case BUILT_IN_IFLOORL:
34a24c11
RG
2817 case BUILT_IN_LFLOORL:
2818 case BUILT_IN_LLFLOORL:
2819 name = "floorl";
2820 break;
2821 default:
2822 gcc_unreachable ();
2823 }
2824
2825 fntype = build_function_type_list (TREE_TYPE (arg),
2826 TREE_TYPE (arg), NULL_TREE);
2827 fallback_fndecl = build_fn_decl (name, fntype);
2828 }
2829
aa493694 2830 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
d8b42d06 2831
39b1ec97 2832 tmp = expand_normal (exp);
9a002da8 2833 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
d8b42d06
UB
2834
2835 /* Truncate the result of floating point optab to integer
2836 via expand_fix (). */
2837 target = gen_reg_rtx (mode);
2838 expand_fix (target, tmp, 0);
2839
2840 return target;
2841}
2842
0bfa1541
RG
2843/* Expand a call to one of the builtin math functions doing integer
2844 conversion (lrint).
2845 Return 0 if a normal call should be emitted rather than expanding the
2846 function in-line. EXP is the expression that is a call to the builtin
1856c8dc 2847 function; if convenient, the result should be placed in TARGET. */
0bfa1541
RG
2848
2849static rtx
1856c8dc 2850expand_builtin_int_roundingfn_2 (tree exp, rtx target)
0bfa1541 2851{
bb7f0423 2852 convert_optab builtin_optab;
58f4cf2a
DM
2853 rtx op0;
2854 rtx_insn *insns;
0bfa1541 2855 tree fndecl = get_callee_fndecl (exp);
968fc3b6 2856 tree arg;
ef4bddc2 2857 machine_mode mode;
ff63ac4d 2858 enum built_in_function fallback_fn = BUILT_IN_NONE;
0bfa1541 2859
5039610b 2860 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1c178a5a 2861 return NULL_RTX;
b8698a0f 2862
5039610b 2863 arg = CALL_EXPR_ARG (exp, 0);
0bfa1541
RG
2864
2865 switch (DECL_FUNCTION_CODE (fndecl))
2866 {
6c32ee74 2867 CASE_FLT_FN (BUILT_IN_IRINT):
ff63ac4d 2868 fallback_fn = BUILT_IN_LRINT;
81fea426 2869 gcc_fallthrough ();
0bfa1541
RG
2870 CASE_FLT_FN (BUILT_IN_LRINT):
2871 CASE_FLT_FN (BUILT_IN_LLRINT):
ff63ac4d
JJ
2872 builtin_optab = lrint_optab;
2873 break;
6c32ee74
UB
2874
2875 CASE_FLT_FN (BUILT_IN_IROUND):
ff63ac4d 2876 fallback_fn = BUILT_IN_LROUND;
81fea426 2877 gcc_fallthrough ();
4d81bf84
RG
2878 CASE_FLT_FN (BUILT_IN_LROUND):
2879 CASE_FLT_FN (BUILT_IN_LLROUND):
ff63ac4d
JJ
2880 builtin_optab = lround_optab;
2881 break;
6c32ee74 2882
0bfa1541
RG
2883 default:
2884 gcc_unreachable ();
2885 }
2886
ff63ac4d
JJ
2887 /* There's no easy way to detect the case we need to set EDOM. */
2888 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2889 return NULL_RTX;
2890
0bfa1541
RG
2891 /* Make a suitable register to place result in. */
2892 mode = TYPE_MODE (TREE_TYPE (exp));
2893
ff63ac4d
JJ
2894 /* There's no easy way to detect the case we need to set EDOM. */
2895 if (!flag_errno_math)
2896 {
04b80dbb 2897 rtx result = gen_reg_rtx (mode);
0bfa1541 2898
ff63ac4d
JJ
2899 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2900 need to expand the argument again. This way, we will not perform
2901 side-effects more the once. */
2902 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
0bfa1541 2903
ff63ac4d 2904 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
0bfa1541 2905
ff63ac4d 2906 start_sequence ();
0bfa1541 2907
04b80dbb 2908 if (expand_sfix_optab (result, op0, builtin_optab))
ff63ac4d
JJ
2909 {
2910 /* Output the entire sequence. */
2911 insns = get_insns ();
2912 end_sequence ();
2913 emit_insn (insns);
04b80dbb 2914 return result;
ff63ac4d
JJ
2915 }
2916
2917 /* If we were unable to expand via the builtin, stop the sequence
2918 (without outputting the insns) and call to the library function
2919 with the stabilized argument list. */
0bfa1541
RG
2920 end_sequence ();
2921 }
2922
ff63ac4d
JJ
2923 if (fallback_fn != BUILT_IN_NONE)
2924 {
2925 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2926 targets, (int) round (x) should never be transformed into
2927 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2928 a call to lround in the hope that the target provides at least some
2929 C99 functions. This should result in the best user experience for
2930 not full C99 targets. */
b03ff92e
RS
2931 tree fallback_fndecl = mathfn_built_in_1
2932 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
ff63ac4d
JJ
2933
2934 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2935 fallback_fndecl, 1, arg);
2936
2937 target = expand_call (exp, NULL_RTX, target == const0_rtx);
9a002da8 2938 target = maybe_emit_group_store (target, TREE_TYPE (exp));
ff63ac4d
JJ
2939 return convert_to_mode (mode, target, 0);
2940 }
bb7f0423 2941
04b80dbb 2942 return expand_call (exp, target, target == const0_rtx);
0bfa1541
RG
2943}
2944
5039610b 2945/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
17684d46
RG
2946 a normal call should be emitted rather than expanding the function
2947 in-line. EXP is the expression that is a call to the builtin
2948 function; if convenient, the result should be placed in TARGET. */
2949
2950static rtx
4359dc2a 2951expand_builtin_powi (tree exp, rtx target)
17684d46 2952{
17684d46
RG
2953 tree arg0, arg1;
2954 rtx op0, op1;
ef4bddc2
RS
2955 machine_mode mode;
2956 machine_mode mode2;
17684d46 2957
5039610b
SL
2958 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2959 return NULL_RTX;
17684d46 2960
5039610b
SL
2961 arg0 = CALL_EXPR_ARG (exp, 0);
2962 arg1 = CALL_EXPR_ARG (exp, 1);
17684d46
RG
2963 mode = TYPE_MODE (TREE_TYPE (exp));
2964
17684d46
RG
2965 /* Emit a libcall to libgcc. */
2966
5039610b 2967 /* Mode of the 2nd argument must match that of an int. */
f4b31647 2968 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
0b8495ae 2969
17684d46
RG
2970 if (target == NULL_RTX)
2971 target = gen_reg_rtx (mode);
2972
4359dc2a 2973 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
17684d46
RG
2974 if (GET_MODE (op0) != mode)
2975 op0 = convert_to_mode (mode, op0, 0);
49452c07 2976 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
0b8495ae
FJ
2977 if (GET_MODE (op1) != mode2)
2978 op1 = convert_to_mode (mode2, op1, 0);
17684d46 2979
8a33f100 2980 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
db69559b 2981 target, LCT_CONST, mode,
0b8495ae 2982 op0, mode, op1, mode2);
17684d46
RG
2983
2984 return target;
2985}
2986
b8698a0f 2987/* Expand expression EXP which is a call to the strlen builtin. Return
781ff3d8 2988 NULL_RTX if we failed and the caller should emit a normal call, otherwise
0e9295cf 2989 try to get the result in TARGET, if convenient. */
3bdf5ad1 2990
28f4ec01 2991static rtx
5039610b 2992expand_builtin_strlen (tree exp, rtx target,
ef4bddc2 2993 machine_mode target_mode)
28f4ec01 2994{
5039610b
SL
2995 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2996 return NULL_RTX;
712b7a05 2997
99b1c316 2998 class expand_operand ops[4];
16155777
MS
2999 rtx pat;
3000 tree len;
3001 tree src = CALL_EXPR_ARG (exp, 0);
3002 rtx src_reg;
3003 rtx_insn *before_strlen;
3004 machine_mode insn_mode;
3005 enum insn_code icode = CODE_FOR_nothing;
3006 unsigned int align;
ae808627 3007
16155777
MS
3008 /* If the length can be computed at compile-time, return it. */
3009 len = c_strlen (src, 0);
3010 if (len)
3011 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3012
3013 /* If the length can be computed at compile-time and is constant
3014 integer, but there are side-effects in src, evaluate
3015 src for side-effects, then return len.
3016 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3017 can be optimized into: i++; x = 3; */
3018 len = c_strlen (src, 1);
3019 if (len && TREE_CODE (len) == INTEGER_CST)
3020 {
3021 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3022 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3023 }
28f4ec01 3024
16155777 3025 align = get_pointer_alignment (src) / BITS_PER_UNIT;
28f4ec01 3026
16155777
MS
3027 /* If SRC is not a pointer type, don't do this operation inline. */
3028 if (align == 0)
3029 return NULL_RTX;
3030
3031 /* Bail out if we can't compute strlen in the right mode. */
3032 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3033 {
3034 icode = optab_handler (strlen_optab, insn_mode);
3035 if (icode != CODE_FOR_nothing)
3036 break;
3037 }
3038 if (insn_mode == VOIDmode)
3039 return NULL_RTX;
28f4ec01 3040
16155777
MS
3041 /* Make a place to hold the source address. We will not expand
3042 the actual source until we are sure that the expansion will
3043 not fail -- there are trees that cannot be expanded twice. */
3044 src_reg = gen_reg_rtx (Pmode);
28f4ec01 3045
16155777
MS
3046 /* Mark the beginning of the strlen sequence so we can emit the
3047 source operand later. */
3048 before_strlen = get_last_insn ();
28f4ec01 3049
16155777
MS
3050 create_output_operand (&ops[0], target, insn_mode);
3051 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3052 create_integer_operand (&ops[2], 0);
3053 create_integer_operand (&ops[3], align);
3054 if (!maybe_expand_insn (icode, 4, ops))
3055 return NULL_RTX;
dd05e4fa 3056
16155777
MS
3057 /* Check to see if the argument was declared attribute nonstring
3058 and if so, issue a warning since at this point it's not known
3059 to be nul-terminated. */
3060 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6a33d0ff 3061
16155777
MS
3062 /* Now that we are assured of success, expand the source. */
3063 start_sequence ();
3064 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3065 if (pat != src_reg)
3066 {
fa465762 3067#ifdef POINTERS_EXTEND_UNSIGNED
16155777
MS
3068 if (GET_MODE (pat) != Pmode)
3069 pat = convert_to_mode (Pmode, pat,
3070 POINTERS_EXTEND_UNSIGNED);
fa465762 3071#endif
16155777
MS
3072 emit_move_insn (src_reg, pat);
3073 }
3074 pat = get_insns ();
3075 end_sequence ();
fca9f642 3076
16155777
MS
3077 if (before_strlen)
3078 emit_insn_after (pat, before_strlen);
3079 else
3080 emit_insn_before (pat, get_insns ());
28f4ec01 3081
16155777
MS
3082 /* Return the value in the proper mode for this function. */
3083 if (GET_MODE (ops[0].value) == target_mode)
3084 target = ops[0].value;
3085 else if (target != 0)
3086 convert_move (target, ops[0].value, 0);
3087 else
3088 target = convert_to_mode (target_mode, ops[0].value, 0);
dd05e4fa 3089
16155777 3090 return target;
28f4ec01
BS
3091}
3092
781ff3d8
MS
3093/* Expand call EXP to the strnlen built-in, returning the result
3094 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3095
3096static rtx
3097expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3098{
3099 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3100 return NULL_RTX;
3101
3102 tree src = CALL_EXPR_ARG (exp, 0);
3103 tree bound = CALL_EXPR_ARG (exp, 1);
3104
3105 if (!bound)
3106 return NULL_RTX;
3107
3108 location_t loc = UNKNOWN_LOCATION;
3109 if (EXPR_HAS_LOCATION (exp))
3110 loc = EXPR_LOCATION (exp);
3111
3112 tree maxobjsize = max_object_size ();
3113 tree func = get_callee_fndecl (exp);
3114
1583124e
MS
3115 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3116 so these conversions aren't necessary. */
e09aa5bd
MS
3117 c_strlen_data lendata = { };
3118 tree len = c_strlen (src, 0, &lendata, 1);
1583124e
MS
3119 if (len)
3120 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
781ff3d8
MS
3121
3122 if (TREE_CODE (bound) == INTEGER_CST)
3123 {
3124 if (!TREE_NO_WARNING (exp)
3125 && tree_int_cst_lt (maxobjsize, bound)
3126 && warning_at (loc, OPT_Wstringop_overflow_,
3127 "%K%qD specified bound %E "
3128 "exceeds maximum object size %E",
3129 exp, func, bound, maxobjsize))
1db01ff9 3130 TREE_NO_WARNING (exp) = true;
781ff3d8 3131
6c4aa5f6 3132 bool exact = true;
781ff3d8 3133 if (!len || TREE_CODE (len) != INTEGER_CST)
6c4aa5f6
MS
3134 {
3135 /* Clear EXACT if LEN may be less than SRC suggests,
3136 such as in
3137 strnlen (&a[i], sizeof a)
3138 where the value of i is unknown. Unless i's value is
3139 zero, the call is unsafe because the bound is greater. */
e09aa5bd
MS
3140 lendata.decl = unterminated_array (src, &len, &exact);
3141 if (!lendata.decl)
6c4aa5f6
MS
3142 return NULL_RTX;
3143 }
3144
7afa3b82 3145 if (lendata.decl && (tree_int_cst_lt (len, bound) || !exact))
6c4aa5f6
MS
3146 {
3147 location_t warnloc
3148 = expansion_point_location_if_in_system_header (loc);
3149
7afa3b82
JJ
3150 if (!TREE_NO_WARNING (exp)
3151 && warning_at (warnloc, OPT_Wstringop_overflow_,
3152 exact
3153 ? G_("%K%qD specified bound %E exceeds the size "
3154 "%E of unterminated array")
3155 : G_("%K%qD specified bound %E may exceed the "
3156 "size of at most %E of unterminated array"),
3157 exp, func, bound, len))
6c4aa5f6 3158 {
e09aa5bd 3159 inform (DECL_SOURCE_LOCATION (lendata.decl),
6c4aa5f6
MS
3160 "referenced argument declared here");
3161 TREE_NO_WARNING (exp) = true;
6c4aa5f6 3162 }
7afa3b82 3163 return NULL_RTX;
6c4aa5f6
MS
3164 }
3165
3166 if (!len)
781ff3d8
MS
3167 return NULL_RTX;
3168
781ff3d8
MS
3169 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3170 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3171 }
3172
3173 if (TREE_CODE (bound) != SSA_NAME)
3174 return NULL_RTX;
3175
3176 wide_int min, max;
54994253 3177 enum value_range_kind rng = get_range_info (bound, &min, &max);
781ff3d8
MS
3178 if (rng != VR_RANGE)
3179 return NULL_RTX;
3180
3181 if (!TREE_NO_WARNING (exp)
1a9b15a7 3182 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
781ff3d8
MS
3183 && warning_at (loc, OPT_Wstringop_overflow_,
3184 "%K%qD specified bound [%wu, %wu] "
3185 "exceeds maximum object size %E",
3186 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
1db01ff9 3187 TREE_NO_WARNING (exp) = true;
781ff3d8 3188
f3431652 3189 bool exact = true;
781ff3d8 3190 if (!len || TREE_CODE (len) != INTEGER_CST)
f3431652 3191 {
e09aa5bd
MS
3192 lendata.decl = unterminated_array (src, &len, &exact);
3193 if (!lendata.decl)
f3431652
MS
3194 return NULL_RTX;
3195 }
781ff3d8 3196
e09aa5bd 3197 if (lendata.decl
f3431652
MS
3198 && !TREE_NO_WARNING (exp)
3199 && (wi::ltu_p (wi::to_wide (len), min)
3200 || !exact))
6c4aa5f6 3201 {
f3431652
MS
3202 location_t warnloc
3203 = expansion_point_location_if_in_system_header (loc);
3204
3205 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3206 exact
3207 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3208 "the size %E of unterminated array")
3209 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3210 "the size of at most %E of unterminated array"),
3211 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3212 {
e09aa5bd 3213 inform (DECL_SOURCE_LOCATION (lendata.decl),
f3431652
MS
3214 "referenced argument declared here");
3215 TREE_NO_WARNING (exp) = true;
3216 }
6c4aa5f6
MS
3217 }
3218
e09aa5bd 3219 if (lendata.decl)
f3431652
MS
3220 return NULL_RTX;
3221
781ff3d8
MS
3222 if (wi::gtu_p (min, wi::to_wide (len)))
3223 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3224
3225 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3226 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3227}
3228
57814e5e
JJ
3229/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3230 bytes from constant string DATA + OFFSET and return it as target
3231 constant. */
3232
3233static rtx
4682ae04 3234builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 3235 scalar_int_mode mode)
57814e5e
JJ
3236{
3237 const char *str = (const char *) data;
3238
298e6adc
NS
3239 gcc_assert (offset >= 0
3240 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3241 <= strlen (str) + 1));
57814e5e
JJ
3242
3243 return c_readstr (str + offset, mode);
3244}
3245
3918b108 3246/* LEN specify length of the block of memcpy/memset operation.
82bb7d4e
JH
3247 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3248 In some cases we can make very likely guess on max size, then we
3249 set it into PROBABLE_MAX_SIZE. */
3918b108
JH
3250
3251static void
3252determine_block_size (tree len, rtx len_rtx,
3253 unsigned HOST_WIDE_INT *min_size,
82bb7d4e
JH
3254 unsigned HOST_WIDE_INT *max_size,
3255 unsigned HOST_WIDE_INT *probable_max_size)
3918b108
JH
3256{
3257 if (CONST_INT_P (len_rtx))
3258 {
2738b4c7 3259 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3918b108
JH
3260 return;
3261 }
3262 else
3263 {
807e902e 3264 wide_int min, max;
54994253 3265 enum value_range_kind range_type = VR_UNDEFINED;
82bb7d4e
JH
3266
3267 /* Determine bounds from the type. */
3268 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3269 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3270 else
3271 *min_size = 0;
3272 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2738b4c7
JJ
3273 *probable_max_size = *max_size
3274 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
82bb7d4e
JH
3275 else
3276 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3277
3278 if (TREE_CODE (len) == SSA_NAME)
3279 range_type = get_range_info (len, &min, &max);
3280 if (range_type == VR_RANGE)
3918b108 3281 {
807e902e 3282 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3918b108 3283 *min_size = min.to_uhwi ();
807e902e 3284 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
82bb7d4e 3285 *probable_max_size = *max_size = max.to_uhwi ();
3918b108 3286 }
82bb7d4e 3287 else if (range_type == VR_ANTI_RANGE)
3918b108 3288 {
70ec86ee 3289 /* Anti range 0...N lets us to determine minimal size to N+1. */
807e902e 3290 if (min == 0)
82bb7d4e 3291 {
807e902e
KZ
3292 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3293 *min_size = max.to_uhwi () + 1;
82bb7d4e
JH
3294 }
3295 /* Code like
3296
3297 int n;
3298 if (n < 100)
70ec86ee 3299 memcpy (a, b, n)
82bb7d4e
JH
3300
3301 Produce anti range allowing negative values of N. We still
3302 can use the information and make a guess that N is not negative.
3303 */
807e902e
KZ
3304 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3305 *probable_max_size = min.to_uhwi () - 1;
3918b108
JH
3306 }
3307 }
3308 gcc_checking_assert (*max_size <=
3309 (unsigned HOST_WIDE_INT)
3310 GET_MODE_MASK (GET_MODE (len_rtx)));
3311}
3312
b825a228
MS
3313/* For an expression EXP issue an access warning controlled by option OPT
3314 with access to a region SLEN bytes in size in the RANGE of sizes. */
3315
3316static bool
3317warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
3318 tree slen, bool access)
3319{
3320 bool warned = false;
3321
3322 if (access)
3323 {
3324 if (tree_int_cst_equal (range[0], range[1]))
3325 warned = (func
3326 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3327 "%K%qD reading %E byte from a region of size %E",
3328 "%K%qD reading %E bytes from a region of size %E",
3329 exp, func, range[0], slen)
3330 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3331 "%Kreading %E byte from a region of size %E",
3332 "%Kreading %E bytes from a region of size %E",
3333 exp, range[0], slen));
3334 else if (tree_int_cst_sign_bit (range[1]))
3335 {
3336 /* Avoid printing the upper bound if it's invalid. */
3337 warned = (func
3338 ? warning_at (loc, opt,
3339 "%K%qD reading %E or more bytes from a region "
3340 "of size %E",
3341 exp, func, range[0], slen)
3342 : warning_at (loc, opt,
3343 "%Kreading %E or more bytes from a region "
3344 "of size %E",
3345 exp, range[0], slen));
3346 }
3347 else
3348 warned = (func
3349 ? warning_at (loc, opt,
3350 "%K%qD reading between %E and %E bytes from "
3351 "a region of size %E",
3352 exp, func, range[0], range[1], slen)
3353 : warning_at (loc, opt,
3354 "%Kreading between %E and %E bytes from "
3355 "a region of size %E",
3356 exp, range[0], range[1], slen));
3357
3358 return warned;
3359 }
3360
3361 if (tree_int_cst_equal (range[0], range[1]))
3362 warned = (func
3363 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3364 "%K%qD epecting %E byte in a region of size %E",
3365 "%K%qD expecting %E bytes in a region of size %E",
3366 exp, func, range[0], slen)
3367 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3368 "%Kexpecting %E byte in a region of size %E",
3369 "%Kexpecting %E bytes in a region of size %E",
3370 exp, range[0], slen));
3371 else if (tree_int_cst_sign_bit (range[1]))
3372 {
3373 /* Avoid printing the upper bound if it's invalid. */
3374 warned = (func
3375 ? warning_at (loc, opt,
3376 "%K%qD expecting %E or more bytes in a region "
3377 "of size %E",
3378 exp, func, range[0], slen)
3379 : warning_at (loc, opt,
3380 "%Kexpecting %E or more bytes in a region "
3381 "of size %E",
3382 exp, range[0], slen));
3383 }
3384 else
3385 warned = (func
3386 ? warning_at (loc, opt,
3387 "%K%qD expecting between %E and %E bytes in "
3388 "a region of size %E",
3389 exp, func, range[0], range[1], slen)
3390 : warning_at (loc, opt,
3391 "%Kexpectting between %E and %E bytes in "
3392 "a region of size %E",
3393 exp, range[0], range[1], slen));
3394 return warned;
3395}
3396
ee92e7ba
MS
3397/* Try to verify that the sizes and lengths of the arguments to a string
3398 manipulation function given by EXP are within valid bounds and that
cc8bea0a
MS
3399 the operation does not lead to buffer overflow or read past the end.
3400 Arguments other than EXP may be null. When non-null, the arguments
3401 have the following meaning:
3402 DST is the destination of a copy call or NULL otherwise.
3403 SRC is the source of a copy call or NULL otherwise.
3404 DSTWRITE is the number of bytes written into the destination obtained
3405 from the user-supplied size argument to the function (such as in
3406 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3407 MAXREAD is the user-supplied bound on the length of the source sequence
ee92e7ba 3408 (such as in strncat(d, s, N). It specifies the upper limit on the number
cc8bea0a
MS
3409 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3410 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3411 expression EXP is a string function call (as opposed to a memory call
3412 like memcpy). As an exception, SRCSTR can also be an integer denoting
3413 the precomputed size of the source string or object (for functions like
3414 memcpy).
3415 DSTSIZE is the size of the destination object specified by the last
ee92e7ba 3416 argument to the _chk builtins, typically resulting from the expansion
cc8bea0a
MS
3417 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3418 DSTSIZE).
ee92e7ba 3419
cc8bea0a 3420 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
ee92e7ba
MS
3421 SIZE_MAX.
3422
b825a228
MS
3423 ACCESS is true for accesses, false for simple size checks in calls
3424 to functions that neither read from nor write to the region.
3425
cc8bea0a
MS
3426 If the call is successfully verified as safe return true, otherwise
3427 return false. */
ee92e7ba 3428
54aa6b58 3429bool
cc8bea0a 3430check_access (tree exp, tree, tree, tree dstwrite,
b825a228
MS
3431 tree maxread, tree srcstr, tree dstsize,
3432 bool access /* = true */)
ee92e7ba 3433{
cc8bea0a
MS
3434 int opt = OPT_Wstringop_overflow_;
3435
ee92e7ba 3436 /* The size of the largest object is half the address space, or
cc8bea0a
MS
3437 PTRDIFF_MAX. (This is way too permissive.) */
3438 tree maxobjsize = max_object_size ();
ee92e7ba 3439
cc8bea0a
MS
3440 /* Either the length of the source string for string functions or
3441 the size of the source object for raw memory functions. */
ee92e7ba
MS
3442 tree slen = NULL_TREE;
3443
d9c5a8b9
MS
3444 tree range[2] = { NULL_TREE, NULL_TREE };
3445
ee92e7ba
MS
3446 /* Set to true when the exact number of bytes written by a string
3447 function like strcpy is not known and the only thing that is
3448 known is that it must be at least one (for the terminating nul). */
3449 bool at_least_one = false;
cc8bea0a 3450 if (srcstr)
ee92e7ba 3451 {
cc8bea0a 3452 /* SRCSTR is normally a pointer to string but as a special case
ee92e7ba 3453 it can be an integer denoting the length of a string. */
cc8bea0a 3454 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
ee92e7ba
MS
3455 {
3456 /* Try to determine the range of lengths the source string
d9c5a8b9 3457 refers to. If it can be determined and is less than
cc8bea0a 3458 the upper bound given by MAXREAD add one to it for
ee92e7ba 3459 the terminating nul. Otherwise, set it to one for
cc8bea0a 3460 the same reason, or to MAXREAD as appropriate. */
5d6655eb
MS
3461 c_strlen_data lendata = { };
3462 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3463 range[0] = lendata.minlen;
a7160771 3464 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
cc8bea0a 3465 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
d9c5a8b9 3466 {
cc8bea0a
MS
3467 if (maxread && tree_int_cst_le (maxread, range[0]))
3468 range[0] = range[1] = maxread;
d9c5a8b9
MS
3469 else
3470 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3471 range[0], size_one_node);
3472
cc8bea0a
MS
3473 if (maxread && tree_int_cst_le (maxread, range[1]))
3474 range[1] = maxread;
d9c5a8b9
MS
3475 else if (!integer_all_onesp (range[1]))
3476 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3477 range[1], size_one_node);
3478
3479 slen = range[0];
3480 }
ee92e7ba
MS
3481 else
3482 {
3483 at_least_one = true;
3484 slen = size_one_node;
3485 }
3486 }
3487 else
cc8bea0a 3488 slen = srcstr;
ee92e7ba
MS
3489 }
3490
cc8bea0a 3491 if (!dstwrite && !maxread)
ee92e7ba
MS
3492 {
3493 /* When the only available piece of data is the object size
3494 there is nothing to do. */
3495 if (!slen)
3496 return true;
3497
3498 /* Otherwise, when the length of the source sequence is known
cc8bea0a 3499 (as with strlen), set DSTWRITE to it. */
d9c5a8b9 3500 if (!range[0])
cc8bea0a 3501 dstwrite = slen;
ee92e7ba
MS
3502 }
3503
cc8bea0a
MS
3504 if (!dstsize)
3505 dstsize = maxobjsize;
ee92e7ba 3506
cc8bea0a
MS
3507 if (dstwrite)
3508 get_size_range (dstwrite, range);
ee92e7ba 3509
cc8bea0a 3510 tree func = get_callee_fndecl (exp);
ee92e7ba
MS
3511
3512 /* First check the number of bytes to be written against the maximum
3513 object size. */
bfb9bd47
MS
3514 if (range[0]
3515 && TREE_CODE (range[0]) == INTEGER_CST
3516 && tree_int_cst_lt (maxobjsize, range[0]))
ee92e7ba 3517 {
781ff3d8
MS
3518 if (TREE_NO_WARNING (exp))
3519 return false;
3520
ee92e7ba 3521 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3522 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba 3523
781ff3d8 3524 bool warned;
ee92e7ba 3525 if (range[0] == range[1])
54aa6b58
MS
3526 warned = (func
3527 ? warning_at (loc, opt,
3528 "%K%qD specified size %E "
3529 "exceeds maximum object size %E",
3530 exp, func, range[0], maxobjsize)
3531 : warning_at (loc, opt,
3532 "%Kspecified size %E "
3533 "exceeds maximum object size %E",
3534 exp, range[0], maxobjsize));
781ff3d8 3535 else
54aa6b58
MS
3536 warned = (func
3537 ? warning_at (loc, opt,
3538 "%K%qD specified size between %E and %E "
3539 "exceeds maximum object size %E",
3540 exp, func,
3541 range[0], range[1], maxobjsize)
3542 : warning_at (loc, opt,
3543 "%Kspecified size between %E and %E "
3544 "exceeds maximum object size %E",
3545 exp, range[0], range[1], maxobjsize));
781ff3d8
MS
3546 if (warned)
3547 TREE_NO_WARNING (exp) = true;
3548
ee92e7ba
MS
3549 return false;
3550 }
3551
cc8bea0a
MS
3552 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3553 constant, and in range of unsigned HOST_WIDE_INT. */
3554 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3555
ee92e7ba
MS
3556 /* Next check the number of bytes to be written against the destination
3557 object size. */
cc8bea0a 3558 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
ee92e7ba
MS
3559 {
3560 if (range[0]
bfb9bd47 3561 && TREE_CODE (range[0]) == INTEGER_CST
cc8bea0a
MS
3562 && ((tree_fits_uhwi_p (dstsize)
3563 && tree_int_cst_lt (dstsize, range[0]))
bfb9bd47
MS
3564 || (dstwrite
3565 && tree_fits_uhwi_p (dstwrite)
cc8bea0a 3566 && tree_int_cst_lt (dstwrite, range[0]))))
ee92e7ba 3567 {
e0676e2e
MS
3568 if (TREE_NO_WARNING (exp))
3569 return false;
3570
ee92e7ba 3571 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3572 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba 3573
54aa6b58 3574 bool warned = false;
cc8bea0a 3575 if (dstwrite == slen && at_least_one)
d9c5a8b9
MS
3576 {
3577 /* This is a call to strcpy with a destination of 0 size
3578 and a source of unknown length. The call will write
3579 at least one byte past the end of the destination. */
54aa6b58
MS
3580 warned = (func
3581 ? warning_at (loc, opt,
3582 "%K%qD writing %E or more bytes into "
3583 "a region of size %E overflows "
3584 "the destination",
3585 exp, func, range[0], dstsize)
3586 : warning_at (loc, opt,
3587 "%Kwriting %E or more bytes into "
3588 "a region of size %E overflows "
3589 "the destination",
3590 exp, range[0], dstsize));
d9c5a8b9
MS
3591 }
3592 else if (tree_int_cst_equal (range[0], range[1]))
54aa6b58
MS
3593 warned = (func
3594 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3595 "%K%qD writing %E byte into a region "
3596 "of size %E overflows the destination",
3597 "%K%qD writing %E bytes into a region "
3598 "of size %E overflows the destination",
3599 exp, func, range[0], dstsize)
3600 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3601 "%Kwriting %E byte into a region "
3602 "of size %E overflows the destination",
3603 "%Kwriting %E bytes into a region "
3604 "of size %E overflows the destination",
3605 exp, range[0], dstsize));
d9c5a8b9
MS
3606 else if (tree_int_cst_sign_bit (range[1]))
3607 {
3608 /* Avoid printing the upper bound if it's invalid. */
54aa6b58
MS
3609 warned = (func
3610 ? warning_at (loc, opt,
3611 "%K%qD writing %E or more bytes into "
3612 "a region of size %E overflows "
3613 "the destination",
3614 exp, func, range[0], dstsize)
3615 : warning_at (loc, opt,
3616 "%Kwriting %E or more bytes into "
3617 "a region of size %E overflows "
3618 "the destination",
3619 exp, range[0], dstsize));
d9c5a8b9 3620 }
ee92e7ba 3621 else
54aa6b58
MS
3622 warned = (func
3623 ? warning_at (loc, opt,
3624 "%K%qD writing between %E and %E bytes "
3625 "into a region of size %E overflows "
3626 "the destination",
3627 exp, func, range[0], range[1],
3628 dstsize)
3629 : warning_at (loc, opt,
3630 "%Kwriting between %E and %E bytes "
3631 "into a region of size %E overflows "
3632 "the destination",
3633 exp, range[0], range[1],
3634 dstsize));
3635 if (warned)
3636 TREE_NO_WARNING (exp) = true;
ee92e7ba
MS
3637
3638 /* Return error when an overflow has been detected. */
3639 return false;
3640 }
3641 }
3642
3643 /* Check the maximum length of the source sequence against the size
3644 of the destination object if known, or against the maximum size
3645 of an object. */
cc8bea0a 3646 if (maxread)
ee92e7ba 3647 {
cc8bea0a 3648 get_size_range (maxread, range);
cc8bea0a 3649 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
ee92e7ba
MS
3650 {
3651 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3652 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba
MS
3653
3654 if (tree_int_cst_lt (maxobjsize, range[0]))
3655 {
e0676e2e
MS
3656 if (TREE_NO_WARNING (exp))
3657 return false;
3658
54aa6b58
MS
3659 bool warned = false;
3660
ee92e7ba
MS
3661 /* Warn about crazy big sizes first since that's more
3662 likely to be meaningful than saying that the bound
3663 is greater than the object size if both are big. */
3664 if (range[0] == range[1])
54aa6b58
MS
3665 warned = (func
3666 ? warning_at (loc, opt,
3667 "%K%qD specified bound %E "
3668 "exceeds maximum object size %E",
3669 exp, func, range[0], maxobjsize)
3670 : warning_at (loc, opt,
3671 "%Kspecified bound %E "
3672 "exceeds maximum object size %E",
3673 exp, range[0], maxobjsize));
ee92e7ba 3674 else
54aa6b58
MS
3675 warned = (func
3676 ? warning_at (loc, opt,
3677 "%K%qD specified bound between "
3678 "%E and %E exceeds maximum object "
3679 "size %E",
3680 exp, func,
3681 range[0], range[1], maxobjsize)
3682 : warning_at (loc, opt,
3683 "%Kspecified bound between "
3684 "%E and %E exceeds maximum object "
3685 "size %E",
3686 exp, range[0], range[1], maxobjsize));
3687 if (warned)
3688 TREE_NO_WARNING (exp) = true;
ee92e7ba
MS
3689
3690 return false;
3691 }
3692
cc8bea0a 3693 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
ee92e7ba 3694 {
e0676e2e
MS
3695 if (TREE_NO_WARNING (exp))
3696 return false;
3697
54aa6b58
MS
3698 bool warned = false;
3699
d9c5a8b9 3700 if (tree_int_cst_equal (range[0], range[1]))
54aa6b58
MS
3701 warned = (func
3702 ? warning_at (loc, opt,
3703 "%K%qD specified bound %E "
3704 "exceeds destination size %E",
3705 exp, func,
3706 range[0], dstsize)
3707 : warning_at (loc, opt,
3708 "%Kspecified bound %E "
3709 "exceeds destination size %E",
3710 exp, range[0], dstsize));
ee92e7ba 3711 else
54aa6b58
MS
3712 warned = (func
3713 ? warning_at (loc, opt,
3714 "%K%qD specified bound between %E "
3715 "and %E exceeds destination size %E",
3716 exp, func,
3717 range[0], range[1], dstsize)
3718 : warning_at (loc, opt,
3719 "%Kspecified bound between %E "
3720 "and %E exceeds destination size %E",
3721 exp,
3722 range[0], range[1], dstsize));
3723 if (warned)
3724 TREE_NO_WARNING (exp) = true;
3725
ee92e7ba
MS
3726 return false;
3727 }
3728 }
3729 }
3730
cc8bea0a 3731 /* Check for reading past the end of SRC. */
d9c5a8b9 3732 if (slen
cc8bea0a
MS
3733 && slen == srcstr
3734 && dstwrite && range[0]
d9c5a8b9
MS
3735 && tree_int_cst_lt (slen, range[0]))
3736 {
e0676e2e
MS
3737 if (TREE_NO_WARNING (exp))
3738 return false;
3739
d9c5a8b9 3740 location_t loc = tree_nonartificial_location (exp);
54aa6b58 3741 loc = expansion_point_location_if_in_system_header (loc);
d9c5a8b9 3742
b825a228 3743 if (warn_for_access (loc, func, exp, opt, range, slen, access))
54aa6b58
MS
3744 TREE_NO_WARNING (exp) = true;
3745
d9c5a8b9
MS
3746 return false;
3747 }
3748
ee92e7ba
MS
3749 return true;
3750}
3751
ef29b12c
MS
3752/* If STMT is a call to an allocation function, returns the constant
3753 size of the object allocated by the call represented as sizetype.
3754 If nonnull, sets RNG1[] to the range of the size. */
268209f3
MS
3755
3756tree
ef29b12c
MS
3757gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
3758 const vr_values *rvals /* = NULL */)
268209f3
MS
3759{
3760 if (!stmt)
3761 return NULL_TREE;
3762
3763 tree allocfntype;
3764 if (tree fndecl = gimple_call_fndecl (stmt))
3765 allocfntype = TREE_TYPE (fndecl);
3766 else
3767 allocfntype = gimple_call_fntype (stmt);
3768
3769 if (!allocfntype)
3770 return NULL_TREE;
3771
3772 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
3773 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
3774 if (!at)
3775 {
3776 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
3777 return NULL_TREE;
3778
3779 argidx1 = 0;
3780 }
3781
3782 unsigned nargs = gimple_call_num_args (stmt);
3783
3784 if (argidx1 == UINT_MAX)
3785 {
3786 tree atval = TREE_VALUE (at);
3787 if (!atval)
3788 return NULL_TREE;
3789
3790 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3791 if (nargs <= argidx1)
3792 return NULL_TREE;
3793
3794 atval = TREE_CHAIN (atval);
3795 if (atval)
3796 {
3797 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3798 if (nargs <= argidx2)
3799 return NULL_TREE;
3800 }
3801 }
3802
3803 tree size = gimple_call_arg (stmt, argidx1);
3804
ef29b12c
MS
3805 wide_int rng1_buf[2];
3806 /* If RNG1 is not set, use the buffer. */
3807 if (!rng1)
3808 rng1 = rng1_buf;
3809
3810 if (!get_range (size, rng1, rvals))
268209f3
MS
3811 return NULL_TREE;
3812
3813 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
a6ae300f 3814 return fold_convert (sizetype, size);
268209f3
MS
3815
3816 /* To handle ranges do the math in wide_int and return the product
3817 of the upper bounds as a constant. Ignore anti-ranges. */
3818 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
3819 wide_int rng2[2];
ef29b12c 3820 if (!get_range (n, rng2, rvals))
268209f3
MS
3821 return NULL_TREE;
3822
ef29b12c 3823 /* Extend to the maximum precision to avoid overflow. */
268209f3
MS
3824 const int prec = ADDR_MAX_PRECISION;
3825 rng1[0] = wide_int::from (rng1[0], prec, UNSIGNED);
3826 rng1[1] = wide_int::from (rng1[1], prec, UNSIGNED);
3827 rng2[0] = wide_int::from (rng2[0], prec, UNSIGNED);
3828 rng2[1] = wide_int::from (rng2[1], prec, UNSIGNED);
3829
ef29b12c
MS
3830 /* Compute products of both bounds for the caller but return the lesser
3831 of SIZE_MAX and the product of the upper bounds as a constant. */
268209f3
MS
3832 rng1[0] = rng1[0] * rng2[0];
3833 rng1[1] = rng1[1] * rng2[1];
3834 tree size_max = TYPE_MAX_VALUE (sizetype);
3835 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
3836 {
3837 rng1[1] = wi::to_wide (size_max);
3838 return size_max;
3839 }
3840
3841 return wide_int_to_tree (sizetype, rng1[1]);
3842}
3843
ef29b12c
MS
3844/* Helper for compute_objsize. Returns the constant size of the DEST
3845 if it refers to a variable or field and sets *PDECL to the DECL and
3846 *POFF to zero. Otherwise returns null for other nodes. */
3847
3848static tree
3849addr_decl_size (tree dest, tree *pdecl, tree *poff)
3850{
3851 if (TREE_CODE (dest) == ADDR_EXPR)
3852 dest = TREE_OPERAND (dest, 0);
3853
3854 if (DECL_P (dest))
3855 {
3856 *pdecl = dest;
3857 *poff = integer_zero_node;
3858 if (tree size = DECL_SIZE_UNIT (dest))
3859 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3860 }
3861
3862 if (TREE_CODE (dest) == COMPONENT_REF)
3863 {
3864 *pdecl = TREE_OPERAND (dest, 1);
3865 *poff = integer_zero_node;
3866 /* Only return constant sizes for now while callers depend on it. */
3867 if (tree size = component_ref_size (dest))
3868 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3869 }
3870
3871 return NULL_TREE;
3872}
3873
ee92e7ba 3874/* Helper to compute the size of the object referenced by the DEST
025d57f0 3875 expression which must have pointer type, using Object Size type
ef29b12c
MS
3876 OSTYPE (only the least significant 2 bits are used).
3877 Returns an estimate of the size of the object represented as
3878 a sizetype constant if successful or NULL when the size cannot
3879 be determined.
3880 When the referenced object involves a non-constant offset in some
3881 range the returned value represents the largest size given the
3882 smallest non-negative offset in the range.
3883 If nonnull, sets *PDECL to the decl of the referenced subobject
3884 if it can be determined, or to null otherwise. Likewise, when
3885 POFF is nonnull *POFF is set to the offset into *PDECL.
3886
464969eb
MS
3887 The function is intended for diagnostics and should not be used
3888 to influence code generation or optimization. */
ee92e7ba 3889
025d57f0 3890tree
268209f3 3891compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */,
ef29b12c 3892 tree *poff /* = NULL */, const vr_values *rvals /* = NULL */)
ee92e7ba 3893{
268209f3 3894 tree dummy_decl = NULL_TREE;
464969eb 3895 if (!pdecl)
268209f3
MS
3896 pdecl = &dummy_decl;
3897
ef29b12c 3898 tree dummy_off = NULL_TREE;
268209f3
MS
3899 if (!poff)
3900 poff = &dummy_off;
464969eb 3901
025d57f0
MS
3902 /* Only the two least significant bits are meaningful. */
3903 ostype &= 3;
3904
ef29b12c
MS
3905 if (ostype)
3906 /* Except for overly permissive calls to memcpy and other raw
3907 memory functions with zero OSTYPE, detect the size from simple
3908 DECLs first to more reliably than compute_builtin_object_size
3909 set *PDECL and *POFF. */
3910 if (tree size = addr_decl_size (dest, pdecl, poff))
3911 return size;
3912
3913 unsigned HOST_WIDE_INT size;
f7d86b5c 3914 if (compute_builtin_object_size (dest, ostype, &size, pdecl, poff))
ee92e7ba
MS
3915 return build_int_cst (sizetype, size);
3916
025d57f0
MS
3917 if (TREE_CODE (dest) == SSA_NAME)
3918 {
3919 gimple *stmt = SSA_NAME_DEF_STMT (dest);
268209f3
MS
3920 if (is_gimple_call (stmt))
3921 {
3922 /* If STMT is a call to an allocation function get the size
ef29b12c
MS
3923 from its argument(s). If successful, also set *PDECL to
3924 DEST for the caller to include in diagnostics. */
3925 if (tree size = gimple_call_alloc_size (stmt))
3926 {
3927 *pdecl = dest;
3928 *poff = integer_zero_node;
3929 return size;
3930 }
3931 return NULL_TREE;
268209f3
MS
3932 }
3933
025d57f0
MS
3934 if (!is_gimple_assign (stmt))
3935 return NULL_TREE;
3936
af3fa359
MS
3937 dest = gimple_assign_rhs1 (stmt);
3938
025d57f0 3939 tree_code code = gimple_assign_rhs_code (stmt);
af3fa359
MS
3940 if (code == POINTER_PLUS_EXPR)
3941 {
3942 /* compute_builtin_object_size fails for addresses with
3943 non-constant offsets. Try to determine the range of
e3329a78 3944 such an offset here and use it to adjust the constant
af3fa359
MS
3945 size. */
3946 tree off = gimple_assign_rhs2 (stmt);
e3329a78
MS
3947 if (TREE_CODE (off) == INTEGER_CST)
3948 {
268209f3 3949 if (tree size = compute_objsize (dest, ostype, pdecl, poff))
e3329a78
MS
3950 {
3951 wide_int wioff = wi::to_wide (off);
3952 wide_int wisiz = wi::to_wide (size);
3953
3954 /* Ignore negative offsets for now. For others,
3955 use the lower bound as the most optimistic
3956 estimate of the (remaining) size. */
ef29b12c 3957 if (wi::neg_p (wioff))
e3329a78 3958 ;
e3329a78 3959 else
268209f3 3960 {
ef29b12c
MS
3961 if (*poff)
3962 {
3963 *poff = fold_convert (ptrdiff_type_node, *poff);
3964 off = fold_convert (ptrdiff_type_node, *poff);
3965 *poff = size_binop (PLUS_EXPR, *poff, off);
3966 }
3967 else
3968 *poff = off;
3969 if (wi::ltu_p (wioff, wisiz))
3970 return wide_int_to_tree (TREE_TYPE (size),
3971 wi::sub (wisiz, wioff));
268209f3
MS
3972 return size_zero_node;
3973 }
e3329a78
MS
3974 }
3975 }
3976 else if (TREE_CODE (off) == SSA_NAME
f05b3724 3977 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
af3fa359
MS
3978 {
3979 wide_int min, max;
54994253 3980 enum value_range_kind rng = get_range_info (off, &min, &max);
af3fa359
MS
3981
3982 if (rng == VR_RANGE)
ef29b12c
MS
3983 if (tree size = compute_objsize (dest, ostype, pdecl, poff))
3984 {
3985 wide_int wisiz = wi::to_wide (size);
3986
3987 /* Ignore negative offsets for now. For others,
3988 use the lower bound as the most optimistic
3989 estimate of the (remaining)size. */
3990 if (wi::neg_p (min) || wi::neg_p (max))
3991 ;
3992 else
3993 {
3994 /* FIXME: For now, since the offset is non-constant,
3995 clear *POFF to keep it from being "misused."
3996 Eventually *POFF will need to become a range that
3997 can be properly added to the outer offset if it
3998 too is one. */
3999 *poff = NULL_TREE;
4000 if (wi::ltu_p (min, wisiz))
268209f3
MS
4001 return wide_int_to_tree (TREE_TYPE (size),
4002 wi::sub (wisiz, min));
ef29b12c
MS
4003 return size_zero_node;
4004 }
4005 }
af3fa359
MS
4006 }
4007 }
4008 else if (code != ADDR_EXPR)
025d57f0 4009 return NULL_TREE;
025d57f0
MS
4010 }
4011
af3fa359
MS
4012 /* Unless computing the largest size (for memcpy and other raw memory
4013 functions), try to determine the size of the object from its type. */
4014 if (!ostype)
4015 return NULL_TREE;
4016
464969eb
MS
4017 if (TREE_CODE (dest) == ARRAY_REF
4018 || TREE_CODE (dest) == MEM_REF)
b631bdb3
MS
4019 {
4020 tree ref = TREE_OPERAND (dest, 0);
2b5d3dc2
MS
4021 tree reftype = TREE_TYPE (ref);
4022 if (TREE_CODE (dest) == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
4023 {
4024 /* Give up for MEM_REFs of vector types; those may be synthesized
4025 from multiple assignments to consecutive data members. See PR
4026 93200.
4027 FIXME: Deal with this more generally, e.g., by marking up such
4028 MEM_REFs at the time they're created. */
4029 reftype = TREE_TYPE (reftype);
4030 if (TREE_CODE (reftype) == VECTOR_TYPE)
4031 return NULL_TREE;
4032 }
b631bdb3 4033 tree off = TREE_OPERAND (dest, 1);
268209f3 4034 if (tree size = compute_objsize (ref, ostype, pdecl, poff))
b631bdb3 4035 {
464969eb
MS
4036 /* If the declaration of the destination object is known
4037 to have zero size, return zero. */
268209f3
MS
4038 if (integer_zerop (size)
4039 && *pdecl && DECL_P (*pdecl)
4040 && *poff && integer_zerop (*poff))
f7d86b5c 4041 return size_zero_node;
464969eb 4042
ef29b12c
MS
4043 /* A valid offset into a declared object cannot be negative.
4044 A zero size with a zero "inner" offset is still zero size
4045 regardless of the "other" offset OFF. */
4046 if (*poff
4047 && ((integer_zerop (*poff) && integer_zerop (size))
4048 || (TREE_CODE (*poff) == INTEGER_CST
4049 && tree_int_cst_sgn (*poff) < 0)))
268209f3 4050 return size_zero_node;
464969eb 4051
ef29b12c
MS
4052 wide_int offrng[2];
4053 if (!get_range (off, offrng, rvals))
4054 return NULL_TREE;
4055
4056 /* Convert to the same precision to keep wide_int from "helpfully"
4057 crashing whenever it sees other arguments. */
4058 const unsigned sizprec = TYPE_PRECISION (sizetype);
4059 offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
4060 offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
4061
268209f3
MS
4062 /* Adjust SIZE either up or down by the sum of *POFF and OFF
4063 above. */
464969eb
MS
4064 if (TREE_CODE (dest) == ARRAY_REF)
4065 {
12603635
MS
4066 tree lowbnd = array_ref_low_bound (dest);
4067 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4068 {
4069 /* Adjust the offset by the low bound of the array
4070 domain (normally zero but 1 in Fortran). */
4071 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4072 offrng[0] -= lb;
4073 offrng[1] -= lb;
4074 }
4075
268209f3 4076 /* Convert the array index into a byte offset. */
464969eb 4077 tree eltype = TREE_TYPE (dest);
f05b3724
JJ
4078 tree tpsize = TYPE_SIZE_UNIT (eltype);
4079 if (tpsize && TREE_CODE (tpsize) == INTEGER_CST)
ef29b12c
MS
4080 {
4081 wide_int wsz = wi::to_wide (tpsize, offrng->get_precision ());
4082 offrng[0] *= wsz;
4083 offrng[1] *= wsz;
4084 }
464969eb
MS
4085 else
4086 return NULL_TREE;
4087 }
4088
ef29b12c
MS
4089 wide_int wisize = wi::to_wide (size);
4090
4091 if (!*poff)
268209f3 4092 {
ef29b12c
MS
4093 /* If the "inner" offset is unknown and the "outer" offset
4094 is either negative or less than SIZE, return the size
4095 minus the offset. This may be overly optimistic in
4096 the first case if the inner offset happens to be less
4097 than the absolute value of the outer offset. */
4098 if (wi::neg_p (offrng[0]))
4099 return size;
4100 if (wi::ltu_p (offrng[0], wisize))
4101 return build_int_cst (sizetype, (wisize - offrng[0]).to_uhwi ());
4102 return size_zero_node;
268209f3 4103 }
268209f3
MS
4104
4105 /* Convert to the same precision to keep wide_int from "helpfuly"
4106 crashing whenever it sees other argumments. */
ef29b12c
MS
4107 offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
4108 offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
268209f3
MS
4109
4110 tree dstoff = *poff;
4111 if (integer_zerop (*poff))
4112 *poff = off;
4113 else if (!integer_zerop (off))
4114 {
4115 *poff = fold_convert (ptrdiff_type_node, *poff);
4116 off = fold_convert (ptrdiff_type_node, off);
4117 *poff = size_binop (PLUS_EXPR, *poff, off);
4118 }
4119
ef29b12c 4120 if (!wi::neg_p (offrng[0]))
268209f3
MS
4121 {
4122 if (TREE_CODE (size) != INTEGER_CST)
4123 return NULL_TREE;
4124
4125 /* Return the difference between the size and the offset
4126 or zero if the offset is greater. */
ef29b12c 4127 wide_int wisize = wi::to_wide (size, sizprec);
268209f3
MS
4128 if (wi::ltu_p (wisize, offrng[0]))
4129 return size_zero_node;
4130
4131 return wide_int_to_tree (sizetype, wisize - offrng[0]);
4132 }
4133
4134 wide_int dstoffrng[2];
4135 if (TREE_CODE (dstoff) == INTEGER_CST)
4136 dstoffrng[0] = dstoffrng[1] = wi::to_wide (dstoff);
4137 else if (TREE_CODE (dstoff) == SSA_NAME)
4138 {
4139 enum value_range_kind rng
4140 = get_range_info (dstoff, dstoffrng, dstoffrng + 1);
4141 if (rng != VR_RANGE)
4142 return NULL_TREE;
4143 }
4144 else
4145 return NULL_TREE;
4146
ef29b12c
MS
4147 dstoffrng[0] = wide_int::from (dstoffrng[0], sizprec, SIGNED);
4148 dstoffrng[1] = wide_int::from (dstoffrng[1], sizprec, SIGNED);
268209f3 4149
ef29b12c
MS
4150 if (!wi::neg_p (dstoffrng[0]))
4151 wisize += dstoffrng[0];
268209f3
MS
4152
4153 offrng[1] += dstoffrng[1];
ef29b12c 4154 if (wi::neg_p (offrng[1]))
268209f3
MS
4155 return size_zero_node;
4156
ef29b12c 4157 return wide_int_to_tree (sizetype, wisize);
b631bdb3
MS
4158 }
4159
4160 return NULL_TREE;
4161 }
4162
ef29b12c
MS
4163 /* Try simple DECLs not handled above. */
4164 if (tree size = addr_decl_size (dest, pdecl, poff))
4165 return size;
464969eb 4166
025d57f0
MS
4167 tree type = TREE_TYPE (dest);
4168 if (TREE_CODE (type) == POINTER_TYPE)
4169 type = TREE_TYPE (type);
4170
4171 type = TYPE_MAIN_VARIANT (type);
268209f3
MS
4172 if (TREE_CODE (dest) == ADDR_EXPR)
4173 dest = TREE_OPERAND (dest, 0);
025d57f0
MS
4174
4175 if (TREE_CODE (type) == ARRAY_TYPE
268209f3 4176 && !array_at_struct_end_p (dest))
464969eb
MS
4177 {
4178 if (tree size = TYPE_SIZE_UNIT (type))
4179 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
025d57f0
MS
4180 }
4181
ee92e7ba
MS
4182 return NULL_TREE;
4183}
4184
4185/* Helper to determine and check the sizes of the source and the destination
d9c5a8b9
MS
4186 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
4187 call expression, DEST is the destination argument, SRC is the source
4188 argument or null, and LEN is the number of bytes. Use Object Size type-0
4189 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
ee92e7ba
MS
4190 (no overflow or invalid sizes), false otherwise. */
4191
4192static bool
cc8bea0a 4193check_memop_access (tree exp, tree dest, tree src, tree size)
ee92e7ba 4194{
ee92e7ba 4195 /* For functions like memset and memcpy that operate on raw memory
d9c5a8b9
MS
4196 try to determine the size of the largest source and destination
4197 object using type-0 Object Size regardless of the object size
4198 type specified by the option. */
4199 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
4200 tree dstsize = compute_objsize (dest, 0);
ee92e7ba 4201
cc8bea0a
MS
4202 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
4203 srcsize, dstsize);
d9c5a8b9
MS
4204}
4205
4206/* Validate memchr arguments without performing any expansion.
4207 Return NULL_RTX. */
4208
4209static rtx
4210expand_builtin_memchr (tree exp, rtx)
4211{
4212 if (!validate_arglist (exp,
4213 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4214 return NULL_RTX;
4215
4216 tree arg1 = CALL_EXPR_ARG (exp, 0);
4217 tree len = CALL_EXPR_ARG (exp, 2);
4218
4219 /* Diagnose calls where the specified length exceeds the size
4220 of the object. */
4221 if (warn_stringop_overflow)
4222 {
4223 tree size = compute_objsize (arg1, 0);
cc8bea0a
MS
4224 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4225 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
d9c5a8b9
MS
4226 }
4227
4228 return NULL_RTX;
ee92e7ba
MS
4229}
4230
5039610b
SL
4231/* Expand a call EXP to the memcpy builtin.
4232 Return NULL_RTX if we failed, the caller should emit a normal call,
9cb65f92 4233 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e 4234 mode MODE if that's convenient). */
5039610b 4235
28f4ec01 4236static rtx
44e10129 4237expand_builtin_memcpy (tree exp, rtx target)
28f4ec01 4238{
5039610b
SL
4239 if (!validate_arglist (exp,
4240 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4241 return NULL_RTX;
ee92e7ba
MS
4242
4243 tree dest = CALL_EXPR_ARG (exp, 0);
4244 tree src = CALL_EXPR_ARG (exp, 1);
4245 tree len = CALL_EXPR_ARG (exp, 2);
4246
cc8bea0a 4247 check_memop_access (exp, dest, src, len);
ee92e7ba 4248
671a00ee 4249 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
03a9b90a 4250 /*retmode=*/ RETURN_BEGIN, false);
edcf72f3 4251}
57814e5e 4252
e50d56a5
MS
4253/* Check a call EXP to the memmove built-in for validity.
4254 Return NULL_RTX on both success and failure. */
4255
4256static rtx
03a9b90a 4257expand_builtin_memmove (tree exp, rtx target)
e50d56a5
MS
4258{
4259 if (!validate_arglist (exp,
4260 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4261 return NULL_RTX;
4262
4263 tree dest = CALL_EXPR_ARG (exp, 0);
d9c5a8b9 4264 tree src = CALL_EXPR_ARG (exp, 1);
e50d56a5
MS
4265 tree len = CALL_EXPR_ARG (exp, 2);
4266
cc8bea0a 4267 check_memop_access (exp, dest, src, len);
e50d56a5 4268
03a9b90a
AS
4269 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4270 /*retmode=*/ RETURN_BEGIN, true);
e50d56a5
MS
4271}
4272
5039610b
SL
4273/* Expand a call EXP to the mempcpy builtin.
4274 Return NULL_RTX if we failed; the caller should emit a normal call,
e3e9f108 4275 otherwise try to get the result in TARGET, if convenient (and in
2ff5ffb6 4276 mode MODE if that's convenient). */
e3e9f108
JJ
4277
4278static rtx
671a00ee 4279expand_builtin_mempcpy (tree exp, rtx target)
e3e9f108 4280{
5039610b
SL
4281 if (!validate_arglist (exp,
4282 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4283 return NULL_RTX;
ee92e7ba
MS
4284
4285 tree dest = CALL_EXPR_ARG (exp, 0);
4286 tree src = CALL_EXPR_ARG (exp, 1);
4287 tree len = CALL_EXPR_ARG (exp, 2);
4288
af3fa359
MS
4289 /* Policy does not generally allow using compute_objsize (which
4290 is used internally by check_memop_size) to change code generation
4291 or drive optimization decisions.
4292
4293 In this instance it is safe because the code we generate has
4294 the same semantics regardless of the return value of
4295 check_memop_sizes. Exactly the same amount of data is copied
4296 and the return value is exactly the same in both cases.
4297
4298 Furthermore, check_memop_size always uses mode 0 for the call to
4299 compute_objsize, so the imprecise nature of compute_objsize is
4300 avoided. */
4301
ee92e7ba
MS
4302 /* Avoid expanding mempcpy into memcpy when the call is determined
4303 to overflow the buffer. This also prevents the same overflow
4304 from being diagnosed again when expanding memcpy. */
cc8bea0a 4305 if (!check_memop_access (exp, dest, src, len))
ee92e7ba
MS
4306 return NULL_RTX;
4307
4308 return expand_builtin_mempcpy_args (dest, src, len,
2ff5ffb6 4309 target, exp, /*retmode=*/ RETURN_END);
edcf72f3
IE
4310}
4311
671a00ee
ML
4312/* Helper function to do the actual work for expand of memory copy family
4313 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
2ff5ffb6
ML
4314 of memory from SRC to DEST and assign to TARGET if convenient. Return
4315 value is based on RETMODE argument. */
5039610b
SL
4316
4317static rtx
671a00ee 4318expand_builtin_memory_copy_args (tree dest, tree src, tree len,
03a9b90a
AS
4319 rtx target, tree exp, memop_ret retmode,
4320 bool might_overlap)
5039610b 4321{
671a00ee
ML
4322 const char *src_str;
4323 unsigned int src_align = get_pointer_alignment (src);
4324 unsigned int dest_align = get_pointer_alignment (dest);
4325 rtx dest_mem, src_mem, dest_addr, len_rtx;
4326 HOST_WIDE_INT expected_size = -1;
4327 unsigned int expected_align = 0;
4328 unsigned HOST_WIDE_INT min_size;
4329 unsigned HOST_WIDE_INT max_size;
4330 unsigned HOST_WIDE_INT probable_max_size;
edcf72f3 4331
db91c7cf
ML
4332 bool is_move_done;
4333
671a00ee
ML
4334 /* If DEST is not a pointer type, call the normal function. */
4335 if (dest_align == 0)
4336 return NULL_RTX;
c22cacf3 4337
671a00ee
ML
4338 /* If either SRC is not a pointer type, don't do this
4339 operation in-line. */
4340 if (src_align == 0)
4341 return NULL_RTX;
8fd3cf4e 4342
671a00ee
ML
4343 if (currently_expanding_gimple_stmt)
4344 stringop_block_profile (currently_expanding_gimple_stmt,
4345 &expected_align, &expected_size);
33521f7d 4346
671a00ee
ML
4347 if (expected_align < dest_align)
4348 expected_align = dest_align;
4349 dest_mem = get_memory_rtx (dest, len);
4350 set_mem_align (dest_mem, dest_align);
4351 len_rtx = expand_normal (len);
4352 determine_block_size (len, len_rtx, &min_size, &max_size,
4353 &probable_max_size);
4354 src_str = c_getstr (src);
e3e9f108 4355
03a9b90a
AS
4356 /* If SRC is a string constant and block move would be done by
4357 pieces, we can avoid loading the string from memory and only
4358 stored the computed constants. This works in the overlap
4359 (memmove) case as well because store_by_pieces just generates a
4360 series of stores of constants from the string constant returned
4361 by c_getstr(). */
671a00ee
ML
4362 if (src_str
4363 && CONST_INT_P (len_rtx)
4364 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
4365 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
4366 CONST_CAST (char *, src_str),
4367 dest_align, false))
4368 {
4369 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
4370 builtin_memcpy_read_str,
7d3eecca 4371 CONST_CAST (char *, src_str),
2ff5ffb6 4372 dest_align, false, retmode);
671a00ee
ML
4373 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4374 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4375 return dest_mem;
4376 }
e3e9f108 4377
671a00ee
ML
4378 src_mem = get_memory_rtx (src, len);
4379 set_mem_align (src_mem, src_align);
8fd3cf4e 4380
671a00ee 4381 /* Copy word part most expediently. */
fdd33254 4382 enum block_op_methods method = BLOCK_OP_NORMAL;
2ff5ffb6
ML
4383 if (CALL_EXPR_TAILCALL (exp)
4384 && (retmode == RETURN_BEGIN || target == const0_rtx))
fdd33254 4385 method = BLOCK_OP_TAILCALL;
db91c7cf
ML
4386 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
4387 && retmode == RETURN_END
03a9b90a 4388 && !might_overlap
db91c7cf
ML
4389 && target != const0_rtx);
4390 if (use_mempcpy_call)
fdd33254
ML
4391 method = BLOCK_OP_NO_LIBCALL_RET;
4392 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
671a00ee 4393 expected_align, expected_size,
db91c7cf 4394 min_size, max_size, probable_max_size,
03a9b90a 4395 use_mempcpy_call, &is_move_done, might_overlap);
db91c7cf
ML
4396
4397 /* Bail out when a mempcpy call would be expanded as libcall and when
4398 we have a target that provides a fast implementation
4399 of mempcpy routine. */
4400 if (!is_move_done)
4401 return NULL_RTX;
4402
fdd33254
ML
4403 if (dest_addr == pc_rtx)
4404 return NULL_RTX;
671a00ee
ML
4405
4406 if (dest_addr == 0)
4407 {
4408 dest_addr = force_operand (XEXP (dest_mem, 0), target);
4409 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4410 }
4411
2ff5ffb6 4412 if (retmode != RETURN_BEGIN && target != const0_rtx)
671a00ee
ML
4413 {
4414 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4415 /* stpcpy pointer to last byte. */
2ff5ffb6 4416 if (retmode == RETURN_END_MINUS_ONE)
671a00ee 4417 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
7d3eecca 4418 }
671a00ee
ML
4419
4420 return dest_addr;
4421}
4422
4423static rtx
4424expand_builtin_mempcpy_args (tree dest, tree src, tree len,
2ff5ffb6 4425 rtx target, tree orig_exp, memop_ret retmode)
671a00ee
ML
4426{
4427 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
03a9b90a 4428 retmode, false);
e3e9f108
JJ
4429}
4430
5039610b 4431/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
beed8fc0 4432 we failed, the caller should emit a normal call, otherwise try to
2ff5ffb6
ML
4433 get the result in TARGET, if convenient.
4434 Return value is based on RETMODE argument. */
beed8fc0
AO
4435
4436static rtx
2ff5ffb6 4437expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
beed8fc0 4438{
99b1c316 4439 class expand_operand ops[3];
beed8fc0
AO
4440 rtx dest_mem;
4441 rtx src_mem;
beed8fc0 4442
7cff0471 4443 if (!targetm.have_movstr ())
5039610b 4444 return NULL_RTX;
beed8fc0 4445
435bb2a1
JJ
4446 dest_mem = get_memory_rtx (dest, NULL);
4447 src_mem = get_memory_rtx (src, NULL);
2831adb5 4448 if (retmode == RETURN_BEGIN)
beed8fc0
AO
4449 {
4450 target = force_reg (Pmode, XEXP (dest_mem, 0));
4451 dest_mem = replace_equiv_address (dest_mem, target);
beed8fc0
AO
4452 }
4453
42bdb8f2
ML
4454 create_output_operand (&ops[0],
4455 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
a5c7d693
RS
4456 create_fixed_operand (&ops[1], dest_mem);
4457 create_fixed_operand (&ops[2], src_mem);
7cff0471 4458 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
7c5425fa 4459 return NULL_RTX;
beed8fc0 4460
2ff5ffb6 4461 if (retmode != RETURN_BEGIN && target != const0_rtx)
7ce3fc8f 4462 {
a5c7d693
RS
4463 target = ops[0].value;
4464 /* movstr is supposed to set end to the address of the NUL
4465 terminator. If the caller requested a mempcpy-like return value,
4466 adjust it. */
2ff5ffb6 4467 if (retmode == RETURN_END)
a5c7d693 4468 {
0a81f074
RS
4469 rtx tem = plus_constant (GET_MODE (target),
4470 gen_lowpart (GET_MODE (target), target), 1);
a5c7d693
RS
4471 emit_move_insn (target, force_operand (tem, NULL_RTX));
4472 }
7ce3fc8f 4473 }
beed8fc0
AO
4474 return target;
4475}
4476
ee92e7ba
MS
4477/* Do some very basic size validation of a call to the strcpy builtin
4478 given by EXP. Return NULL_RTX to have the built-in expand to a call
4479 to the library function. */
4480
4481static rtx
b5338fb3 4482expand_builtin_strcat (tree exp)
ee92e7ba
MS
4483{
4484 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4485 || !warn_stringop_overflow)
4486 return NULL_RTX;
4487
4488 tree dest = CALL_EXPR_ARG (exp, 0);
4489 tree src = CALL_EXPR_ARG (exp, 1);
4490
b5338fb3
MS
4491 /* Detect unterminated source (only). */
4492 if (!check_nul_terminated_array (exp, src))
4493 return NULL_RTX;
4494
ee92e7ba
MS
4495 /* There is no way here to determine the length of the string in
4496 the destination to which the SRC string is being appended so
4497 just diagnose cases when the souce string is longer than
4498 the destination object. */
4499
d9c5a8b9 4500 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba 4501
cc8bea0a
MS
4502 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4503 destsize);
ee92e7ba
MS
4504
4505 return NULL_RTX;
4506}
4507
b8698a0f
L
4508/* Expand expression EXP, which is a call to the strcpy builtin. Return
4509 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 4510 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 4511 convenient). */
fed3cef0 4512
28f4ec01 4513static rtx
44e10129 4514expand_builtin_strcpy (tree exp, rtx target)
28f4ec01 4515{
ee92e7ba
MS
4516 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4517 return NULL_RTX;
4518
4519 tree dest = CALL_EXPR_ARG (exp, 0);
4520 tree src = CALL_EXPR_ARG (exp, 1);
4521
4522 if (warn_stringop_overflow)
4523 {
d9c5a8b9 4524 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
cc8bea0a
MS
4525 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4526 src, destsize);
ee92e7ba
MS
4527 }
4528
e08341bb 4529 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
36537a1c
MS
4530 {
4531 /* Check to see if the argument was declared attribute nonstring
4532 and if so, issue a warning since at this point it's not known
4533 to be nul-terminated. */
4534 tree fndecl = get_callee_fndecl (exp);
4535 maybe_warn_nonstring_arg (fndecl, exp);
4536 return ret;
4537 }
4538
4539 return NULL_RTX;
5039610b
SL
4540}
4541
4542/* Helper function to do the actual work for expand_builtin_strcpy. The
4543 arguments to the builtin_strcpy call DEST and SRC are broken out
4544 so that this can also be called without constructing an actual CALL_EXPR.
4545 The other arguments and return value are the same as for
4546 expand_builtin_strcpy. */
4547
4548static rtx
e08341bb 4549expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
5039610b 4550{
e08341bb
MS
4551 /* Detect strcpy calls with unterminated arrays.. */
4552 if (tree nonstr = unterminated_array (src))
4553 {
4554 /* NONSTR refers to the non-nul terminated constant array. */
4555 if (!TREE_NO_WARNING (exp))
4556 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4557 return NULL_RTX;
4558 }
4559
2ff5ffb6 4560 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
28f4ec01
BS
4561}
4562
5039610b
SL
4563/* Expand a call EXP to the stpcpy builtin.
4564 Return NULL_RTX if we failed the caller should emit a normal call,
9cb65f92
KG
4565 otherwise try to get the result in TARGET, if convenient (and in
4566 mode MODE if that's convenient). */
4567
4568static rtx
3ce4cdb2 4569expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
9cb65f92 4570{
5039610b 4571 tree dst, src;
db3927fb 4572 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
4573
4574 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4575 return NULL_RTX;
4576
4577 dst = CALL_EXPR_ARG (exp, 0);
4578 src = CALL_EXPR_ARG (exp, 1);
4579
e50d56a5
MS
4580 if (warn_stringop_overflow)
4581 {
d9c5a8b9 4582 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
cc8bea0a
MS
4583 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4584 src, destsize);
e50d56a5
MS
4585 }
4586
beed8fc0 4587 /* If return value is ignored, transform stpcpy into strcpy. */
e79983f4 4588 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
ad4319ec 4589 {
e79983f4 4590 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
aa493694 4591 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
0d2a6e08 4592 return expand_expr (result, target, mode, EXPAND_NORMAL);
ad4319ec 4593 }
9cb65f92
KG
4594 else
4595 {
5039610b 4596 tree len, lenp1;
beed8fc0 4597 rtx ret;
e3e9f108 4598
8fd3cf4e 4599 /* Ensure we get an actual string whose length can be evaluated at
c22cacf3
MS
4600 compile-time, not an expression containing a string. This is
4601 because the latter will potentially produce pessimized code
4602 when used to produce the return value. */
e09aa5bd 4603 c_strlen_data lendata = { };
01b0acb7 4604 if (!c_getstr (src, NULL)
e09aa5bd 4605 || !(len = c_strlen (src, 0, &lendata, 1)))
2ff5ffb6
ML
4606 return expand_movstr (dst, src, target,
4607 /*retmode=*/ RETURN_END_MINUS_ONE);
9cb65f92 4608
e09aa5bd
MS
4609 if (lendata.decl && !TREE_NO_WARNING (exp))
4610 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
01b0acb7 4611
db3927fb 4612 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
44e10129 4613 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
2ff5ffb6
ML
4614 target, exp,
4615 /*retmode=*/ RETURN_END_MINUS_ONE);
beed8fc0
AO
4616
4617 if (ret)
4618 return ret;
4619
4620 if (TREE_CODE (len) == INTEGER_CST)
4621 {
84217346 4622 rtx len_rtx = expand_normal (len);
beed8fc0 4623
481683e1 4624 if (CONST_INT_P (len_rtx))
beed8fc0 4625 {
e08341bb 4626 ret = expand_builtin_strcpy_args (exp, dst, src, target);
beed8fc0
AO
4627
4628 if (ret)
4629 {
4630 if (! target)
58ec6ece
SE
4631 {
4632 if (mode != VOIDmode)
4633 target = gen_reg_rtx (mode);
4634 else
4635 target = gen_reg_rtx (GET_MODE (ret));
4636 }
beed8fc0
AO
4637 if (GET_MODE (target) != GET_MODE (ret))
4638 ret = gen_lowpart (GET_MODE (target), ret);
4639
0a81f074 4640 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
7ce3fc8f 4641 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
298e6adc 4642 gcc_assert (ret);
beed8fc0
AO
4643
4644 return target;
4645 }
4646 }
4647 }
4648
2ff5ffb6
ML
4649 return expand_movstr (dst, src, target,
4650 /*retmode=*/ RETURN_END_MINUS_ONE);
9cb65f92
KG
4651 }
4652}
4653
3ce4cdb2
MS
4654/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4655 arguments while being careful to avoid duplicate warnings (which could
4656 be issued if the expander were to expand the call, resulting in it
4657 being emitted in expand_call(). */
4658
4659static rtx
4660expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4661{
4662 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4663 {
4664 /* The call has been successfully expanded. Check for nonstring
4665 arguments and issue warnings as appropriate. */
4666 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4667 return ret;
4668 }
4669
4670 return NULL_RTX;
4671}
4672
e50d56a5
MS
4673/* Check a call EXP to the stpncpy built-in for validity.
4674 Return NULL_RTX on both success and failure. */
4675
4676static rtx
4677expand_builtin_stpncpy (tree exp, rtx)
4678{
4679 if (!validate_arglist (exp,
4680 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4681 || !warn_stringop_overflow)
4682 return NULL_RTX;
4683
c6c02519 4684 /* The source and destination of the call. */
e50d56a5
MS
4685 tree dest = CALL_EXPR_ARG (exp, 0);
4686 tree src = CALL_EXPR_ARG (exp, 1);
4687
c6c02519 4688 /* The exact number of bytes to write (not the maximum). */
e50d56a5 4689 tree len = CALL_EXPR_ARG (exp, 2);
b5338fb3
MS
4690 if (!check_nul_terminated_array (exp, src, len))
4691 return NULL_RTX;
e50d56a5 4692
c6c02519 4693 /* The size of the destination object. */
d9c5a8b9 4694 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
e50d56a5 4695
cc8bea0a 4696 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
e50d56a5
MS
4697
4698 return NULL_RTX;
4699}
4700
57814e5e
JJ
4701/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4702 bytes from constant string DATA + OFFSET and return it as target
4703 constant. */
4704
14a43348 4705rtx
4682ae04 4706builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 4707 scalar_int_mode mode)
57814e5e
JJ
4708{
4709 const char *str = (const char *) data;
4710
4711 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4712 return const0_rtx;
4713
4714 return c_readstr (str + offset, mode);
4715}
4716
ee92e7ba
MS
4717/* Helper to check the sizes of sequences and the destination of calls
4718 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4719 success (no overflow or invalid sizes), false otherwise. */
4720
4721static bool
4722check_strncat_sizes (tree exp, tree objsize)
4723{
4724 tree dest = CALL_EXPR_ARG (exp, 0);
4725 tree src = CALL_EXPR_ARG (exp, 1);
cc8bea0a 4726 tree maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
4727
4728 /* Try to determine the range of lengths that the source expression
4729 refers to. */
5d6655eb
MS
4730 c_strlen_data lendata = { };
4731 get_range_strlen (src, &lendata, /* eltsize = */ 1);
ee92e7ba
MS
4732
4733 /* Try to verify that the destination is big enough for the shortest
4734 string. */
4735
4736 if (!objsize && warn_stringop_overflow)
4737 {
4738 /* If it hasn't been provided by __strncat_chk, try to determine
4739 the size of the destination object into which the source is
4740 being copied. */
d9c5a8b9 4741 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
4742 }
4743
4744 /* Add one for the terminating nul. */
5d6655eb
MS
4745 tree srclen = (lendata.minlen
4746 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
ee92e7ba
MS
4747 size_one_node)
4748 : NULL_TREE);
4749
cc8bea0a
MS
4750 /* The strncat function copies at most MAXREAD bytes and always appends
4751 the terminating nul so the specified upper bound should never be equal
4752 to (or greater than) the size of the destination. */
4753 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4754 && tree_int_cst_equal (objsize, maxread))
ee92e7ba 4755 {
e50d56a5
MS
4756 location_t loc = tree_nonartificial_location (exp);
4757 loc = expansion_point_location_if_in_system_header (loc);
4758
4759 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 4760 "%K%qD specified bound %E equals destination size",
cc8bea0a 4761 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
4762
4763 return false;
4764 }
4765
4766 if (!srclen
cc8bea0a 4767 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 4768 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
4769 && tree_int_cst_lt (maxread, srclen)))
4770 srclen = maxread;
ee92e7ba 4771
cc8bea0a 4772 /* The number of bytes to write is LEN but check_access will also
ee92e7ba 4773 check SRCLEN if LEN's value isn't known. */
cc8bea0a
MS
4774 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4775 objsize);
ee92e7ba
MS
4776}
4777
4778/* Similar to expand_builtin_strcat, do some very basic size validation
4779 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4780 the built-in expand to a call to the library function. */
4781
4782static rtx
4783expand_builtin_strncat (tree exp, rtx)
4784{
4785 if (!validate_arglist (exp,
4786 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4787 || !warn_stringop_overflow)
4788 return NULL_RTX;
4789
4790 tree dest = CALL_EXPR_ARG (exp, 0);
4791 tree src = CALL_EXPR_ARG (exp, 1);
4792 /* The upper bound on the number of bytes to write. */
cc8bea0a 4793 tree maxread = CALL_EXPR_ARG (exp, 2);
b5338fb3
MS
4794
4795 /* Detect unterminated source (only). */
4796 if (!check_nul_terminated_array (exp, src, maxread))
4797 return NULL_RTX;
4798
ee92e7ba
MS
4799 /* The length of the source sequence. */
4800 tree slen = c_strlen (src, 1);
4801
4802 /* Try to determine the range of lengths that the source expression
5d6655eb
MS
4803 refers to. Since the lengths are only used for warning and not
4804 for code generation disable strict mode below. */
4805 tree maxlen = slen;
4806 if (!maxlen)
4807 {
4808 c_strlen_data lendata = { };
4809 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4810 maxlen = lendata.maxbound;
4811 }
ee92e7ba
MS
4812
4813 /* Try to verify that the destination is big enough for the shortest
4814 string. First try to determine the size of the destination object
4815 into which the source is being copied. */
d9c5a8b9 4816 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
4817
4818 /* Add one for the terminating nul. */
5d6655eb
MS
4819 tree srclen = (maxlen
4820 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
ee92e7ba
MS
4821 size_one_node)
4822 : NULL_TREE);
4823
cc8bea0a
MS
4824 /* The strncat function copies at most MAXREAD bytes and always appends
4825 the terminating nul so the specified upper bound should never be equal
4826 to (or greater than) the size of the destination. */
4827 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4828 && tree_int_cst_equal (destsize, maxread))
ee92e7ba 4829 {
e50d56a5
MS
4830 location_t loc = tree_nonartificial_location (exp);
4831 loc = expansion_point_location_if_in_system_header (loc);
4832
4833 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 4834 "%K%qD specified bound %E equals destination size",
cc8bea0a 4835 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
4836
4837 return NULL_RTX;
4838 }
4839
4840 if (!srclen
cc8bea0a 4841 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 4842 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
4843 && tree_int_cst_lt (maxread, srclen)))
4844 srclen = maxread;
ee92e7ba 4845
cc8bea0a
MS
4846 /* The number of bytes to write is SRCLEN. */
4847 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
ee92e7ba
MS
4848
4849 return NULL_RTX;
4850}
4851
b8698a0f 4852/* Expand expression EXP, which is a call to the strncpy builtin. Return
5039610b 4853 NULL_RTX if we failed the caller should emit a normal call. */
da9e9f08
KG
4854
4855static rtx
44e10129 4856expand_builtin_strncpy (tree exp, rtx target)
da9e9f08 4857{
db3927fb 4858 location_t loc = EXPR_LOCATION (exp);
5039610b 4859
b5338fb3
MS
4860 if (!validate_arglist (exp,
4861 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4862 return NULL_RTX;
4863 tree dest = CALL_EXPR_ARG (exp, 0);
4864 tree src = CALL_EXPR_ARG (exp, 1);
4865 /* The number of bytes to write (not the maximum). */
4866 tree len = CALL_EXPR_ARG (exp, 2);
57814e5e 4867
b5338fb3
MS
4868 if (!check_nul_terminated_array (exp, src, len))
4869 return NULL_RTX;
cc8bea0a 4870
b5338fb3
MS
4871 /* The length of the source sequence. */
4872 tree slen = c_strlen (src, 1);
ee92e7ba 4873
b5338fb3
MS
4874 if (warn_stringop_overflow)
4875 {
4876 tree destsize = compute_objsize (dest,
4877 warn_stringop_overflow - 1);
da9e9f08 4878
b5338fb3
MS
4879 /* The number of bytes to write is LEN but check_access will also
4880 check SLEN if LEN's value isn't known. */
4881 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4882 destsize);
4883 }
da9e9f08 4884
b5338fb3
MS
4885 /* We must be passed a constant len and src parameter. */
4886 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4887 return NULL_RTX;
57814e5e 4888
b5338fb3
MS
4889 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4890
4891 /* We're required to pad with trailing zeros if the requested
4892 len is greater than strlen(s2)+1. In that case try to
4893 use store_by_pieces, if it fails, punt. */
4894 if (tree_int_cst_lt (slen, len))
4895 {
4896 unsigned int dest_align = get_pointer_alignment (dest);
4897 const char *p = c_getstr (src);
4898 rtx dest_mem;
4899
4900 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4901 || !can_store_by_pieces (tree_to_uhwi (len),
4902 builtin_strncpy_read_str,
4903 CONST_CAST (char *, p),
4904 dest_align, false))
4905 return NULL_RTX;
4906
4907 dest_mem = get_memory_rtx (dest, len);
4908 store_by_pieces (dest_mem, tree_to_uhwi (len),
4909 builtin_strncpy_read_str,
4910 CONST_CAST (char *, p), dest_align, false,
4911 RETURN_BEGIN);
4912 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4913 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4914 return dest_mem;
da9e9f08 4915 }
b5338fb3 4916
5039610b 4917 return NULL_RTX;
da9e9f08
KG
4918}
4919
ab937357
JJ
4920/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4921 bytes from constant string DATA + OFFSET and return it as target
4922 constant. */
4923
34d85166 4924rtx
4682ae04 4925builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4926 scalar_int_mode mode)
ab937357
JJ
4927{
4928 const char *c = (const char *) data;
f883e0a7 4929 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ab937357
JJ
4930
4931 memset (p, *c, GET_MODE_SIZE (mode));
4932
4933 return c_readstr (p, mode);
4934}
4935
1a887f86
RS
4936/* Callback routine for store_by_pieces. Return the RTL of a register
4937 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4938 char value given in the RTL register data. For example, if mode is
4939 4 bytes wide, return the RTL for 0x01010101*data. */
4940
4941static rtx
4682ae04 4942builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4943 scalar_int_mode mode)
1a887f86
RS
4944{
4945 rtx target, coeff;
4946 size_t size;
4947 char *p;
4948
4949 size = GET_MODE_SIZE (mode);
5ab2f7b7
KH
4950 if (size == 1)
4951 return (rtx) data;
1a887f86 4952
f883e0a7 4953 p = XALLOCAVEC (char, size);
1a887f86
RS
4954 memset (p, 1, size);
4955 coeff = c_readstr (p, mode);
4956
5ab2f7b7 4957 target = convert_to_mode (mode, (rtx) data, 1);
1a887f86
RS
4958 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4959 return force_reg (mode, target);
4960}
4961
b8698a0f
L
4962/* Expand expression EXP, which is a call to the memset builtin. Return
4963 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 4964 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 4965 convenient). */
fed3cef0 4966
28f4ec01 4967static rtx
ef4bddc2 4968expand_builtin_memset (tree exp, rtx target, machine_mode mode)
28f4ec01 4969{
5039610b
SL
4970 if (!validate_arglist (exp,
4971 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4972 return NULL_RTX;
ee92e7ba
MS
4973
4974 tree dest = CALL_EXPR_ARG (exp, 0);
4975 tree val = CALL_EXPR_ARG (exp, 1);
4976 tree len = CALL_EXPR_ARG (exp, 2);
4977
cc8bea0a 4978 check_memop_access (exp, dest, NULL_TREE, len);
ee92e7ba
MS
4979
4980 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5039610b 4981}
28f4ec01 4982
5039610b
SL
4983/* Helper function to do the actual work for expand_builtin_memset. The
4984 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4985 so that this can also be called without constructing an actual CALL_EXPR.
4986 The other arguments and return value are the same as for
4987 expand_builtin_memset. */
880864cf 4988
5039610b
SL
4989static rtx
4990expand_builtin_memset_args (tree dest, tree val, tree len,
ef4bddc2 4991 rtx target, machine_mode mode, tree orig_exp)
5039610b
SL
4992{
4993 tree fndecl, fn;
4994 enum built_in_function fcode;
ef4bddc2 4995 machine_mode val_mode;
5039610b
SL
4996 char c;
4997 unsigned int dest_align;
4998 rtx dest_mem, dest_addr, len_rtx;
4999 HOST_WIDE_INT expected_size = -1;
5000 unsigned int expected_align = 0;
3918b108
JH
5001 unsigned HOST_WIDE_INT min_size;
5002 unsigned HOST_WIDE_INT max_size;
82bb7d4e 5003 unsigned HOST_WIDE_INT probable_max_size;
28f4ec01 5004
0eb77834 5005 dest_align = get_pointer_alignment (dest);
079a182e 5006
5039610b
SL
5007 /* If DEST is not a pointer type, don't do this operation in-line. */
5008 if (dest_align == 0)
5009 return NULL_RTX;
c2bd38e8 5010
a5883ba0
MM
5011 if (currently_expanding_gimple_stmt)
5012 stringop_block_profile (currently_expanding_gimple_stmt,
5013 &expected_align, &expected_size);
726a989a 5014
5039610b
SL
5015 if (expected_align < dest_align)
5016 expected_align = dest_align;
880864cf 5017
5039610b
SL
5018 /* If the LEN parameter is zero, return DEST. */
5019 if (integer_zerop (len))
5020 {
5021 /* Evaluate and ignore VAL in case it has side-effects. */
5022 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
5023 return expand_expr (dest, target, mode, EXPAND_NORMAL);
5024 }
57e84f18 5025
5039610b
SL
5026 /* Stabilize the arguments in case we fail. */
5027 dest = builtin_save_expr (dest);
5028 val = builtin_save_expr (val);
5029 len = builtin_save_expr (len);
1a887f86 5030
5039610b 5031 len_rtx = expand_normal (len);
82bb7d4e
JH
5032 determine_block_size (len, len_rtx, &min_size, &max_size,
5033 &probable_max_size);
5039610b 5034 dest_mem = get_memory_rtx (dest, len);
8a445129 5035 val_mode = TYPE_MODE (unsigned_char_type_node);
1a887f86 5036
5039610b
SL
5037 if (TREE_CODE (val) != INTEGER_CST)
5038 {
5039 rtx val_rtx;
1a887f86 5040
5039610b 5041 val_rtx = expand_normal (val);
8a445129 5042 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
28f4ec01 5043
5039610b
SL
5044 /* Assume that we can memset by pieces if we can store
5045 * the coefficients by pieces (in the required modes).
5046 * We can't pass builtin_memset_gen_str as that emits RTL. */
5047 c = 1;
cc269bb6 5048 if (tree_fits_uhwi_p (len)
ae7e9ddd 5049 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
5050 builtin_memset_read_str, &c, dest_align,
5051 true))
5039610b 5052 {
8a445129 5053 val_rtx = force_reg (val_mode, val_rtx);
ae7e9ddd 5054 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150 5055 builtin_memset_gen_str, val_rtx, dest_align,
2ff5ffb6 5056 true, RETURN_BEGIN);
5039610b
SL
5057 }
5058 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
5059 dest_align, expected_align,
82bb7d4e
JH
5060 expected_size, min_size, max_size,
5061 probable_max_size))
880864cf 5062 goto do_libcall;
b8698a0f 5063
5039610b
SL
5064 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5065 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5066 return dest_mem;
5067 }
28f4ec01 5068
5039610b
SL
5069 if (target_char_cast (val, &c))
5070 goto do_libcall;
ab937357 5071
5039610b
SL
5072 if (c)
5073 {
cc269bb6 5074 if (tree_fits_uhwi_p (len)
ae7e9ddd 5075 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
5076 builtin_memset_read_str, &c, dest_align,
5077 true))
ae7e9ddd 5078 store_by_pieces (dest_mem, tree_to_uhwi (len),
2ff5ffb6
ML
5079 builtin_memset_read_str, &c, dest_align, true,
5080 RETURN_BEGIN);
8a445129
RS
5081 else if (!set_storage_via_setmem (dest_mem, len_rtx,
5082 gen_int_mode (c, val_mode),
5039610b 5083 dest_align, expected_align,
82bb7d4e
JH
5084 expected_size, min_size, max_size,
5085 probable_max_size))
5039610b 5086 goto do_libcall;
b8698a0f 5087
5039610b
SL
5088 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5089 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5090 return dest_mem;
5091 }
ab937357 5092
5039610b
SL
5093 set_mem_align (dest_mem, dest_align);
5094 dest_addr = clear_storage_hints (dest_mem, len_rtx,
5095 CALL_EXPR_TAILCALL (orig_exp)
5096 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3918b108 5097 expected_align, expected_size,
82bb7d4e
JH
5098 min_size, max_size,
5099 probable_max_size);
28f4ec01 5100
5039610b
SL
5101 if (dest_addr == 0)
5102 {
5103 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5104 dest_addr = convert_memory_address (ptr_mode, dest_addr);
5105 }
28f4ec01 5106
5039610b 5107 return dest_addr;
880864cf 5108
5039610b
SL
5109 do_libcall:
5110 fndecl = get_callee_fndecl (orig_exp);
5111 fcode = DECL_FUNCTION_CODE (fndecl);
31db0fe0 5112 if (fcode == BUILT_IN_MEMSET)
aa493694
JJ
5113 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
5114 dest, val, len);
5039610b 5115 else if (fcode == BUILT_IN_BZERO)
aa493694
JJ
5116 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
5117 dest, len);
5039610b
SL
5118 else
5119 gcc_unreachable ();
44e10129
MM
5120 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5121 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5039610b 5122 return expand_call (fn, target, target == const0_rtx);
28f4ec01
BS
5123}
5124
b8698a0f 5125/* Expand expression EXP, which is a call to the bzero builtin. Return
5039610b 5126 NULL_RTX if we failed the caller should emit a normal call. */
5197bd50 5127
e3a709be 5128static rtx
8148fe65 5129expand_builtin_bzero (tree exp)
e3a709be 5130{
5039610b 5131 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3477addf 5132 return NULL_RTX;
e3a709be 5133
ee92e7ba
MS
5134 tree dest = CALL_EXPR_ARG (exp, 0);
5135 tree size = CALL_EXPR_ARG (exp, 1);
5136
cc8bea0a 5137 check_memop_access (exp, dest, NULL_TREE, size);
8d51ecf8 5138
3477addf 5139 /* New argument list transforming bzero(ptr x, int y) to
c2bd38e8
RS
5140 memset(ptr x, int 0, size_t y). This is done this way
5141 so that if it isn't expanded inline, we fallback to
5142 calling bzero instead of memset. */
8d51ecf8 5143
ee92e7ba
MS
5144 location_t loc = EXPR_LOCATION (exp);
5145
5039610b 5146 return expand_builtin_memset_args (dest, integer_zero_node,
0d82a1c8
RG
5147 fold_convert_loc (loc,
5148 size_type_node, size),
5039610b 5149 const0_rtx, VOIDmode, exp);
e3a709be
KG
5150}
5151
a666df60
RS
5152/* Try to expand cmpstr operation ICODE with the given operands.
5153 Return the result rtx on success, otherwise return null. */
5154
5155static rtx
5156expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
5157 HOST_WIDE_INT align)
5158{
5159 machine_mode insn_mode = insn_data[icode].operand[0].mode;
5160
5161 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
5162 target = NULL_RTX;
5163
99b1c316 5164 class expand_operand ops[4];
a666df60
RS
5165 create_output_operand (&ops[0], target, insn_mode);
5166 create_fixed_operand (&ops[1], arg1_rtx);
5167 create_fixed_operand (&ops[2], arg2_rtx);
5168 create_integer_operand (&ops[3], align);
5169 if (maybe_expand_insn (icode, 4, ops))
5170 return ops[0].value;
5171 return NULL_RTX;
5172}
5173
2be3b5ce 5174/* Expand expression EXP, which is a call to the memcmp built-in function.
9b0f6f5e 5175 Return NULL_RTX if we failed and the caller should emit a normal call,
36b85e43
BS
5176 otherwise try to get the result in TARGET, if convenient.
5177 RESULT_EQ is true if we can relax the returned value to be either zero
5178 or nonzero, without caring about the sign. */
5197bd50 5179
28f4ec01 5180static rtx
36b85e43 5181expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
28f4ec01 5182{
5039610b
SL
5183 if (!validate_arglist (exp,
5184 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5185 return NULL_RTX;
c2bd38e8 5186
7f9f48be
RS
5187 tree arg1 = CALL_EXPR_ARG (exp, 0);
5188 tree arg2 = CALL_EXPR_ARG (exp, 1);
5189 tree len = CALL_EXPR_ARG (exp, 2);
b2272b13
QZ
5190 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
5191 bool no_overflow = true;
d9c5a8b9
MS
5192
5193 /* Diagnose calls where the specified length exceeds the size of either
5194 object. */
b2272b13
QZ
5195 tree size = compute_objsize (arg1, 0);
5196 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5197 len, /*maxread=*/NULL_TREE, size,
5198 /*objsize=*/NULL_TREE);
10a0e2a9 5199 if (no_overflow)
b2272b13
QZ
5200 {
5201 size = compute_objsize (arg2, 0);
5202 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5203 len, /*maxread=*/NULL_TREE, size,
5204 /*objsize=*/NULL_TREE);
10a0e2a9 5205 }
b2272b13 5206
b99d7d97
QZ
5207 /* If the specified length exceeds the size of either object,
5208 call the function. */
5209 if (!no_overflow)
5210 return NULL_RTX;
5211
10a0e2a9 5212 /* Due to the performance benefit, always inline the calls first
b2272b13
QZ
5213 when result_eq is false. */
5214 rtx result = NULL_RTX;
10a0e2a9 5215
b99d7d97 5216 if (!result_eq && fcode != BUILT_IN_BCMP)
d9c5a8b9 5217 {
523a59ff 5218 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
5219 if (result)
5220 return result;
d9c5a8b9
MS
5221 }
5222
36b85e43
BS
5223 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5224 location_t loc = EXPR_LOCATION (exp);
358b8f01 5225
7f9f48be
RS
5226 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5227 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
28f4ec01 5228
7f9f48be
RS
5229 /* If we don't have POINTER_TYPE, call the function. */
5230 if (arg1_align == 0 || arg2_align == 0)
5231 return NULL_RTX;
28f4ec01 5232
7f9f48be
RS
5233 rtx arg1_rtx = get_memory_rtx (arg1, len);
5234 rtx arg2_rtx = get_memory_rtx (arg2, len);
36b85e43 5235 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
28f4ec01 5236
7f9f48be 5237 /* Set MEM_SIZE as appropriate. */
36b85e43 5238 if (CONST_INT_P (len_rtx))
7f9f48be 5239 {
36b85e43
BS
5240 set_mem_size (arg1_rtx, INTVAL (len_rtx));
5241 set_mem_size (arg2_rtx, INTVAL (len_rtx));
7f9f48be 5242 }
6cbaec9e 5243
36b85e43
BS
5244 by_pieces_constfn constfn = NULL;
5245
d0d7f887
BS
5246 const char *src_str = c_getstr (arg2);
5247 if (result_eq && src_str == NULL)
5248 {
5249 src_str = c_getstr (arg1);
5250 if (src_str != NULL)
4f353581 5251 std::swap (arg1_rtx, arg2_rtx);
d0d7f887 5252 }
36b85e43
BS
5253
5254 /* If SRC is a string constant and block move would be done
5255 by pieces, we can avoid loading the string from memory
5256 and only stored the computed constants. */
5257 if (src_str
5258 && CONST_INT_P (len_rtx)
5259 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
5260 constfn = builtin_memcpy_read_str;
5261
b2272b13
QZ
5262 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
5263 TREE_TYPE (len), target,
5264 result_eq, constfn,
5265 CONST_CAST (char *, src_str));
36b85e43 5266
7f9f48be
RS
5267 if (result)
5268 {
5269 /* Return the value in the proper mode for this function. */
5270 if (GET_MODE (result) == mode)
5271 return result;
6cbaec9e 5272
7f9f48be
RS
5273 if (target != 0)
5274 {
5275 convert_move (target, result, 0);
5276 return target;
5277 }
8878e913 5278
28f4ec01 5279 return convert_to_mode (mode, result, 0);
7f9f48be 5280 }
28f4ec01 5281
ee516de9 5282 return NULL_RTX;
c2bd38e8
RS
5283}
5284
5039610b 5285/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
28f4ec01
BS
5286 if we failed the caller should emit a normal call, otherwise try to get
5287 the result in TARGET, if convenient. */
fed3cef0 5288
28f4ec01 5289static rtx
44e10129 5290expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
28f4ec01 5291{
5039610b
SL
5292 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5293 return NULL_RTX;
8d51ecf8 5294
b5338fb3
MS
5295 tree arg1 = CALL_EXPR_ARG (exp, 0);
5296 tree arg2 = CALL_EXPR_ARG (exp, 1);
5297
5298 if (!check_nul_terminated_array (exp, arg1)
5299 || !check_nul_terminated_array (exp, arg2))
5300 return NULL_RTX;
5301
b2272b13
QZ
5302 /* Due to the performance benefit, always inline the calls first. */
5303 rtx result = NULL_RTX;
523a59ff 5304 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
5305 if (result)
5306 return result;
5307
a666df60
RS
5308 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
5309 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
5310 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
5311 return NULL_RTX;
c22cacf3 5312
16155777
MS
5313 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5314 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
2be3b5ce 5315
16155777
MS
5316 /* If we don't have POINTER_TYPE, call the function. */
5317 if (arg1_align == 0 || arg2_align == 0)
5318 return NULL_RTX;
2be3b5ce 5319
16155777
MS
5320 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
5321 arg1 = builtin_save_expr (arg1);
5322 arg2 = builtin_save_expr (arg2);
28f4ec01 5323
16155777
MS
5324 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
5325 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
a666df60 5326
16155777
MS
5327 /* Try to call cmpstrsi. */
5328 if (cmpstr_icode != CODE_FOR_nothing)
5329 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
5330 MIN (arg1_align, arg2_align));
40c1d5f8 5331
16155777
MS
5332 /* Try to determine at least one length and call cmpstrnsi. */
5333 if (!result && cmpstrn_icode != CODE_FOR_nothing)
5334 {
5335 tree len;
5336 rtx arg3_rtx;
5337
5338 tree len1 = c_strlen (arg1, 1);
5339 tree len2 = c_strlen (arg2, 1);
5340
5341 if (len1)
5342 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5343 if (len2)
5344 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5345
5346 /* If we don't have a constant length for the first, use the length
5347 of the second, if we know it. We don't require a constant for
5348 this case; some cost analysis could be done if both are available
5349 but neither is constant. For now, assume they're equally cheap,
5350 unless one has side effects. If both strings have constant lengths,
5351 use the smaller. */
5352
5353 if (!len1)
5354 len = len2;
5355 else if (!len2)
5356 len = len1;
5357 else if (TREE_SIDE_EFFECTS (len1))
5358 len = len2;
5359 else if (TREE_SIDE_EFFECTS (len2))
5360 len = len1;
5361 else if (TREE_CODE (len1) != INTEGER_CST)
5362 len = len2;
5363 else if (TREE_CODE (len2) != INTEGER_CST)
5364 len = len1;
5365 else if (tree_int_cst_lt (len1, len2))
5366 len = len1;
5367 else
5368 len = len2;
c43fa1f5 5369
16155777
MS
5370 /* If both arguments have side effects, we cannot optimize. */
5371 if (len && !TREE_SIDE_EFFECTS (len))
40c1d5f8 5372 {
16155777
MS
5373 arg3_rtx = expand_normal (len);
5374 result = expand_cmpstrn_or_cmpmem
5375 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5376 arg3_rtx, MIN (arg1_align, arg2_align));
40c1d5f8 5377 }
16155777
MS
5378 }
5379
16155777 5380 tree fndecl = get_callee_fndecl (exp);
16155777
MS
5381 if (result)
5382 {
36537a1c
MS
5383 /* Check to see if the argument was declared attribute nonstring
5384 and if so, issue a warning since at this point it's not known
5385 to be nul-terminated. */
5386 maybe_warn_nonstring_arg (fndecl, exp);
5387
16155777
MS
5388 /* Return the value in the proper mode for this function. */
5389 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5390 if (GET_MODE (result) == mode)
5391 return result;
5392 if (target == 0)
5393 return convert_to_mode (mode, result, 0);
5394 convert_move (target, result, 0);
5395 return target;
40c1d5f8 5396 }
16155777
MS
5397
5398 /* Expand the library call ourselves using a stabilized argument
5399 list to avoid re-evaluating the function's arguments twice. */
5400 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5401 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5402 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5403 return expand_call (fn, target, target == const0_rtx);
2dee4af1 5404}
28f4ec01 5405
b8698a0f 5406/* Expand expression EXP, which is a call to the strncmp builtin. Return
5039610b 5407 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
da9e9f08 5408 the result in TARGET, if convenient. */
5197bd50 5409
da9e9f08 5410static rtx
44e10129 5411expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
ef4bddc2 5412 ATTRIBUTE_UNUSED machine_mode mode)
da9e9f08 5413{
5039610b
SL
5414 if (!validate_arglist (exp,
5415 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5416 return NULL_RTX;
da9e9f08 5417
b5338fb3
MS
5418 tree arg1 = CALL_EXPR_ARG (exp, 0);
5419 tree arg2 = CALL_EXPR_ARG (exp, 1);
5420 tree arg3 = CALL_EXPR_ARG (exp, 2);
5421
5422 if (!check_nul_terminated_array (exp, arg1, arg3)
5423 || !check_nul_terminated_array (exp, arg2, arg3))
5424 return NULL_RTX;
5425
b2272b13
QZ
5426 /* Due to the performance benefit, always inline the calls first. */
5427 rtx result = NULL_RTX;
523a59ff 5428 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
5429 if (result)
5430 return result;
5431
819c1488 5432 /* If c_strlen can determine an expression for one of the string
40c1d5f8 5433 lengths, and it doesn't have side effects, then emit cmpstrnsi
2be3b5ce 5434 using length MIN(strlen(string)+1, arg3). */
a666df60 5435 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
5436 if (cmpstrn_icode == CODE_FOR_nothing)
5437 return NULL_RTX;
5197bd50 5438
16155777
MS
5439 tree len;
5440
16155777
MS
5441 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5442 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5443
5444 tree len1 = c_strlen (arg1, 1);
5445 tree len2 = c_strlen (arg2, 1);
5446
5447 location_t loc = EXPR_LOCATION (exp);
5448
5449 if (len1)
5450 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5451 if (len2)
5452 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5453
5454 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5455
5456 /* If we don't have a constant length for the first, use the length
5457 of the second, if we know it. If neither string is constant length,
5458 use the given length argument. We don't require a constant for
5459 this case; some cost analysis could be done if both are available
5460 but neither is constant. For now, assume they're equally cheap,
5461 unless one has side effects. If both strings have constant lengths,
5462 use the smaller. */
5463
5464 if (!len1 && !len2)
5465 len = len3;
5466 else if (!len1)
5467 len = len2;
5468 else if (!len2)
5469 len = len1;
5470 else if (TREE_SIDE_EFFECTS (len1))
5471 len = len2;
5472 else if (TREE_SIDE_EFFECTS (len2))
5473 len = len1;
5474 else if (TREE_CODE (len1) != INTEGER_CST)
5475 len = len2;
5476 else if (TREE_CODE (len2) != INTEGER_CST)
5477 len = len1;
5478 else if (tree_int_cst_lt (len1, len2))
5479 len = len1;
5480 else
5481 len = len2;
5482
5483 /* If we are not using the given length, we must incorporate it here.
5484 The actual new length parameter will be MIN(len,arg3) in this case. */
5485 if (len != len3)
75e96bc8
MS
5486 {
5487 len = fold_convert_loc (loc, sizetype, len);
5488 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5489 }
16155777
MS
5490 rtx arg1_rtx = get_memory_rtx (arg1, len);
5491 rtx arg2_rtx = get_memory_rtx (arg2, len);
5492 rtx arg3_rtx = expand_normal (len);
b2272b13
QZ
5493 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5494 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5495 MIN (arg1_align, arg2_align));
16155777 5496
16155777 5497 tree fndecl = get_callee_fndecl (exp);
16155777
MS
5498 if (result)
5499 {
36537a1c
MS
5500 /* Check to see if the argument was declared attribute nonstring
5501 and if so, issue a warning since at this point it's not known
5502 to be nul-terminated. */
5503 maybe_warn_nonstring_arg (fndecl, exp);
5504
16155777
MS
5505 /* Return the value in the proper mode for this function. */
5506 mode = TYPE_MODE (TREE_TYPE (exp));
5507 if (GET_MODE (result) == mode)
5508 return result;
5509 if (target == 0)
5510 return convert_to_mode (mode, result, 0);
5511 convert_move (target, result, 0);
5512 return target;
5513 }
5514
5515 /* Expand the library call ourselves using a stabilized argument
5516 list to avoid re-evaluating the function's arguments twice. */
5517 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5518 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5519 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5520 return expand_call (fn, target, target == const0_rtx);
d118937d
KG
5521}
5522
d3707adb
RH
5523/* Expand a call to __builtin_saveregs, generating the result in TARGET,
5524 if that's convenient. */
fed3cef0 5525
d3707adb 5526rtx
4682ae04 5527expand_builtin_saveregs (void)
28f4ec01 5528{
58f4cf2a
DM
5529 rtx val;
5530 rtx_insn *seq;
28f4ec01
BS
5531
5532 /* Don't do __builtin_saveregs more than once in a function.
5533 Save the result of the first call and reuse it. */
5534 if (saveregs_value != 0)
5535 return saveregs_value;
28f4ec01 5536
d3707adb
RH
5537 /* When this function is called, it means that registers must be
5538 saved on entry to this function. So we migrate the call to the
5539 first insn of this function. */
5540
5541 start_sequence ();
28f4ec01 5542
d3707adb 5543 /* Do whatever the machine needs done in this case. */
61f71b34 5544 val = targetm.calls.expand_builtin_saveregs ();
28f4ec01 5545
d3707adb
RH
5546 seq = get_insns ();
5547 end_sequence ();
28f4ec01 5548
d3707adb 5549 saveregs_value = val;
28f4ec01 5550
2f937369
DM
5551 /* Put the insns after the NOTE that starts the function. If this
5552 is inside a start_sequence, make the outer-level insn chain current, so
d3707adb
RH
5553 the code is placed at the start of the function. */
5554 push_topmost_sequence ();
242229bb 5555 emit_insn_after (seq, entry_of_function ());
d3707adb
RH
5556 pop_topmost_sequence ();
5557
5558 return val;
28f4ec01
BS
5559}
5560
8870e212 5561/* Expand a call to __builtin_next_arg. */
5197bd50 5562
28f4ec01 5563static rtx
8870e212 5564expand_builtin_next_arg (void)
28f4ec01 5565{
8870e212
JJ
5566 /* Checking arguments is already done in fold_builtin_next_arg
5567 that must be called before this function. */
4319e38c 5568 return expand_binop (ptr_mode, add_optab,
38173d38
JH
5569 crtl->args.internal_arg_pointer,
5570 crtl->args.arg_offset_rtx,
28f4ec01
BS
5571 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5572}
5573
d3707adb
RH
5574/* Make it easier for the backends by protecting the valist argument
5575 from multiple evaluations. */
5576
5577static tree
db3927fb 5578stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
d3707adb 5579{
35cbb299
KT
5580 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5581
70f34814
RG
5582 /* The current way of determining the type of valist is completely
5583 bogus. We should have the information on the va builtin instead. */
5584 if (!vatype)
5585 vatype = targetm.fn_abi_va_list (cfun->decl);
35cbb299
KT
5586
5587 if (TREE_CODE (vatype) == ARRAY_TYPE)
d3707adb 5588 {
9f720c3e
GK
5589 if (TREE_SIDE_EFFECTS (valist))
5590 valist = save_expr (valist);
8ebecc3b 5591
9f720c3e 5592 /* For this case, the backends will be expecting a pointer to
35cbb299
KT
5593 vatype, but it's possible we've actually been given an array
5594 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
9f720c3e
GK
5595 So fix it. */
5596 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
daf68dd7 5597 {
35cbb299 5598 tree p1 = build_pointer_type (TREE_TYPE (vatype));
db3927fb 5599 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
daf68dd7 5600 }
d3707adb 5601 }
8ebecc3b 5602 else
d3707adb 5603 {
70f34814 5604 tree pt = build_pointer_type (vatype);
8ebecc3b 5605
9f720c3e
GK
5606 if (! needs_lvalue)
5607 {
8ebecc3b
RH
5608 if (! TREE_SIDE_EFFECTS (valist))
5609 return valist;
8d51ecf8 5610
db3927fb 5611 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
d3707adb 5612 TREE_SIDE_EFFECTS (valist) = 1;
d3707adb 5613 }
9f720c3e 5614
8ebecc3b 5615 if (TREE_SIDE_EFFECTS (valist))
9f720c3e 5616 valist = save_expr (valist);
70f34814
RG
5617 valist = fold_build2_loc (loc, MEM_REF,
5618 vatype, valist, build_int_cst (pt, 0));
d3707adb
RH
5619 }
5620
5621 return valist;
5622}
5623
c35d187f
RH
5624/* The "standard" definition of va_list is void*. */
5625
5626tree
5627std_build_builtin_va_list (void)
5628{
5629 return ptr_type_node;
5630}
5631
35cbb299
KT
5632/* The "standard" abi va_list is va_list_type_node. */
5633
5634tree
5635std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5636{
5637 return va_list_type_node;
5638}
5639
5640/* The "standard" type of va_list is va_list_type_node. */
5641
5642tree
5643std_canonical_va_list_type (tree type)
5644{
5645 tree wtype, htype;
5646
35cbb299
KT
5647 wtype = va_list_type_node;
5648 htype = type;
431e31a9
TV
5649
5650 if (TREE_CODE (wtype) == ARRAY_TYPE)
35cbb299
KT
5651 {
5652 /* If va_list is an array type, the argument may have decayed
5653 to a pointer type, e.g. by being passed to another function.
5654 In that case, unwrap both types so that we can compare the
5655 underlying records. */
5656 if (TREE_CODE (htype) == ARRAY_TYPE
5657 || POINTER_TYPE_P (htype))
5658 {
5659 wtype = TREE_TYPE (wtype);
5660 htype = TREE_TYPE (htype);
5661 }
5662 }
5663 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5664 return va_list_type_node;
5665
5666 return NULL_TREE;
5667}
5668
d3707adb
RH
5669/* The "standard" implementation of va_start: just assign `nextarg' to
5670 the variable. */
5197bd50 5671
d3707adb 5672void
4682ae04 5673std_expand_builtin_va_start (tree valist, rtx nextarg)
d3707adb 5674{
508dabda
ILT
5675 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5676 convert_move (va_r, nextarg, 0);
d3707adb
RH
5677}
5678
5039610b 5679/* Expand EXP, a call to __builtin_va_start. */
5197bd50 5680
d3707adb 5681static rtx
5039610b 5682expand_builtin_va_start (tree exp)
d3707adb
RH
5683{
5684 rtx nextarg;
5039610b 5685 tree valist;
db3927fb 5686 location_t loc = EXPR_LOCATION (exp);
d3707adb 5687
5039610b 5688 if (call_expr_nargs (exp) < 2)
c69c9b36 5689 {
db3927fb 5690 error_at (loc, "too few arguments to function %<va_start%>");
c69c9b36
JM
5691 return const0_rtx;
5692 }
d3707adb 5693
5039610b 5694 if (fold_builtin_next_arg (exp, true))
8870e212 5695 return const0_rtx;
d3147f64 5696
8870e212 5697 nextarg = expand_builtin_next_arg ();
db3927fb 5698 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
d3707adb 5699
d7bd8aeb
JJ
5700 if (targetm.expand_builtin_va_start)
5701 targetm.expand_builtin_va_start (valist, nextarg);
5702 else
5703 std_expand_builtin_va_start (valist, nextarg);
d3707adb
RH
5704
5705 return const0_rtx;
5706}
5707
5039610b 5708/* Expand EXP, a call to __builtin_va_end. */
3bdf5ad1 5709
d3707adb 5710static rtx
5039610b 5711expand_builtin_va_end (tree exp)
d3707adb 5712{
5039610b 5713 tree valist = CALL_EXPR_ARG (exp, 0);
daf68dd7 5714
daf68dd7
RH
5715 /* Evaluate for side effects, if needed. I hate macros that don't
5716 do that. */
5717 if (TREE_SIDE_EFFECTS (valist))
5718 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
d3707adb
RH
5719
5720 return const0_rtx;
5721}
5722
5039610b 5723/* Expand EXP, a call to __builtin_va_copy. We do this as a
d3707adb
RH
5724 builtin rather than just as an assignment in stdarg.h because of the
5725 nastiness of array-type va_list types. */
3bdf5ad1 5726
d3707adb 5727static rtx
5039610b 5728expand_builtin_va_copy (tree exp)
d3707adb
RH
5729{
5730 tree dst, src, t;
db3927fb 5731 location_t loc = EXPR_LOCATION (exp);
d3707adb 5732
5039610b
SL
5733 dst = CALL_EXPR_ARG (exp, 0);
5734 src = CALL_EXPR_ARG (exp, 1);
d3707adb 5735
db3927fb
AH
5736 dst = stabilize_va_list_loc (loc, dst, 1);
5737 src = stabilize_va_list_loc (loc, src, 0);
d3707adb 5738
35cbb299
KT
5739 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5740
5741 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
d3707adb 5742 {
35cbb299 5743 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
d3707adb
RH
5744 TREE_SIDE_EFFECTS (t) = 1;
5745 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5746 }
5747 else
5748 {
8ebecc3b
RH
5749 rtx dstb, srcb, size;
5750
5751 /* Evaluate to pointers. */
5752 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5753 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
35cbb299
KT
5754 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5755 NULL_RTX, VOIDmode, EXPAND_NORMAL);
8ebecc3b 5756
5ae6cd0d
MM
5757 dstb = convert_memory_address (Pmode, dstb);
5758 srcb = convert_memory_address (Pmode, srcb);
ce2d32cd 5759
8ebecc3b
RH
5760 /* "Dereference" to BLKmode memories. */
5761 dstb = gen_rtx_MEM (BLKmode, dstb);
ba4828e0 5762 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
35cbb299 5763 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b 5764 srcb = gen_rtx_MEM (BLKmode, srcb);
ba4828e0 5765 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
35cbb299 5766 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b
RH
5767
5768 /* Copy. */
44bb111a 5769 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
d3707adb
RH
5770 }
5771
5772 return const0_rtx;
5773}
5774
28f4ec01
BS
5775/* Expand a call to one of the builtin functions __builtin_frame_address or
5776 __builtin_return_address. */
5197bd50 5777
28f4ec01 5778static rtx
5039610b 5779expand_builtin_frame_address (tree fndecl, tree exp)
28f4ec01 5780{
28f4ec01
BS
5781 /* The argument must be a nonnegative integer constant.
5782 It counts the number of frames to scan up the stack.
8423e57c
MS
5783 The value is either the frame pointer value or the return
5784 address saved in that frame. */
5039610b 5785 if (call_expr_nargs (exp) == 0)
28f4ec01
BS
5786 /* Warning about missing arg was already issued. */
5787 return const0_rtx;
cc269bb6 5788 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
28f4ec01 5789 {
8423e57c 5790 error ("invalid argument to %qD", fndecl);
28f4ec01
BS
5791 return const0_rtx;
5792 }
5793 else
5794 {
8423e57c
MS
5795 /* Number of frames to scan up the stack. */
5796 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5797
5798 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
28f4ec01
BS
5799
5800 /* Some ports cannot access arbitrary stack frames. */
5801 if (tem == NULL)
5802 {
8423e57c 5803 warning (0, "unsupported argument to %qD", fndecl);
28f4ec01
BS
5804 return const0_rtx;
5805 }
5806
8423e57c
MS
5807 if (count)
5808 {
5809 /* Warn since no effort is made to ensure that any frame
5810 beyond the current one exists or can be safely reached. */
5811 warning (OPT_Wframe_address, "calling %qD with "
5812 "a nonzero argument is unsafe", fndecl);
5813 }
5814
28f4ec01
BS
5815 /* For __builtin_frame_address, return what we've got. */
5816 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5817 return tem;
5818
f8cfc6aa 5819 if (!REG_P (tem)
28f4ec01 5820 && ! CONSTANT_P (tem))
18ae1560 5821 tem = copy_addr_to_reg (tem);
28f4ec01
BS
5822 return tem;
5823 }
5824}
5825
d3c12306 5826/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
b7e52782 5827 failed and the caller should emit a normal call. */
d5457140 5828
28f4ec01 5829static rtx
b7e52782 5830expand_builtin_alloca (tree exp)
28f4ec01
BS
5831{
5832 rtx op0;
d5457140 5833 rtx result;
13e49da9 5834 unsigned int align;
8bd9f164 5835 tree fndecl = get_callee_fndecl (exp);
9e878cf1
EB
5836 HOST_WIDE_INT max_size;
5837 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
b7e52782 5838 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
8bd9f164 5839 bool valid_arglist
9e878cf1
EB
5840 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5841 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5842 VOID_TYPE)
5843 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5844 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5845 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
13e49da9
TV
5846
5847 if (!valid_arglist)
5039610b 5848 return NULL_RTX;
28f4ec01 5849
00abf86c
MS
5850 if ((alloca_for_var
5851 && warn_vla_limit >= HOST_WIDE_INT_MAX
5852 && warn_alloc_size_limit < warn_vla_limit)
5853 || (!alloca_for_var
5854 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5855 && warn_alloc_size_limit < warn_alloca_limit
5856 ))
8bd9f164 5857 {
00abf86c
MS
5858 /* -Walloca-larger-than and -Wvla-larger-than settings of
5859 less than HOST_WIDE_INT_MAX override the more general
5860 -Walloc-size-larger-than so unless either of the former
5861 options is smaller than the last one (wchich would imply
5862 that the call was already checked), check the alloca
5863 arguments for overflow. */
8bd9f164
MS
5864 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5865 int idx[] = { 0, -1 };
5866 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5867 }
5868
28f4ec01 5869 /* Compute the argument. */
5039610b 5870 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
28f4ec01 5871
13e49da9 5872 /* Compute the alignment. */
9e878cf1
EB
5873 align = (fcode == BUILT_IN_ALLOCA
5874 ? BIGGEST_ALIGNMENT
5875 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5876
5877 /* Compute the maximum size. */
5878 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5879 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5880 : -1);
13e49da9 5881
b7e52782
EB
5882 /* Allocate the desired space. If the allocation stems from the declaration
5883 of a variable-sized object, it cannot accumulate. */
9e878cf1
EB
5884 result
5885 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5ae6cd0d 5886 result = convert_memory_address (ptr_mode, result);
d5457140 5887
3cf3da88
EB
5888 /* Dynamic allocations for variables are recorded during gimplification. */
5889 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5890 record_dynamic_alloc (exp);
5891
d5457140 5892 return result;
28f4ec01
BS
5893}
5894
7504c3bf
JJ
5895/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5896 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5897 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5898 handle_builtin_stack_restore function. */
e3174bdf
MO
5899
5900static rtx
5901expand_asan_emit_allocas_unpoison (tree exp)
5902{
5903 tree arg0 = CALL_EXPR_ARG (exp, 0);
7504c3bf 5904 tree arg1 = CALL_EXPR_ARG (exp, 1);
8f4956ca 5905 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7504c3bf
JJ
5906 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5907 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5908 stack_pointer_rtx, NULL_RTX, 0,
5909 OPTAB_LIB_WIDEN);
5910 off = convert_modes (ptr_mode, Pmode, off, 0);
5911 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5912 OPTAB_LIB_WIDEN);
e3174bdf 5913 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
db69559b
RS
5914 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5915 top, ptr_mode, bot, ptr_mode);
e3174bdf
MO
5916 return ret;
5917}
5918
ac868f29
EB
5919/* Expand a call to bswap builtin in EXP.
5920 Return NULL_RTX if a normal call should be emitted rather than expanding the
5921 function in-line. If convenient, the result should be placed in TARGET.
5922 SUBTARGET may be used as the target for computing one of EXP's operands. */
167fa32c
EC
5923
5924static rtx
ef4bddc2 5925expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
ac868f29 5926 rtx subtarget)
167fa32c 5927{
167fa32c
EC
5928 tree arg;
5929 rtx op0;
5930
5039610b
SL
5931 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5932 return NULL_RTX;
167fa32c 5933
5039610b 5934 arg = CALL_EXPR_ARG (exp, 0);
ac868f29
EB
5935 op0 = expand_expr (arg,
5936 subtarget && GET_MODE (subtarget) == target_mode
5937 ? subtarget : NULL_RTX,
5938 target_mode, EXPAND_NORMAL);
5939 if (GET_MODE (op0) != target_mode)
5940 op0 = convert_to_mode (target_mode, op0, 1);
167fa32c 5941
ac868f29 5942 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
167fa32c
EC
5943
5944 gcc_assert (target);
5945
ac868f29 5946 return convert_to_mode (target_mode, target, 1);
167fa32c
EC
5947}
5948
5039610b
SL
5949/* Expand a call to a unary builtin in EXP.
5950 Return NULL_RTX if a normal call should be emitted rather than expanding the
28f4ec01
BS
5951 function in-line. If convenient, the result should be placed in TARGET.
5952 SUBTARGET may be used as the target for computing one of EXP's operands. */
d5457140 5953
28f4ec01 5954static rtx
ef4bddc2 5955expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4682ae04 5956 rtx subtarget, optab op_optab)
28f4ec01
BS
5957{
5958 rtx op0;
5039610b
SL
5959
5960 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5961 return NULL_RTX;
28f4ec01
BS
5962
5963 /* Compute the argument. */
4359dc2a
JJ
5964 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5965 (subtarget
5966 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5967 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
49452c07 5968 VOIDmode, EXPAND_NORMAL);
2928cd7a 5969 /* Compute op, into TARGET if possible.
28f4ec01 5970 Set TARGET to wherever the result comes back. */
5039610b 5971 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
146aef0b 5972 op_optab, op0, target, op_optab != clrsb_optab);
298e6adc 5973 gcc_assert (target);
5906d013 5974
6c537d03 5975 return convert_to_mode (target_mode, target, 0);
28f4ec01 5976}
994a57cd 5977
b8698a0f 5978/* Expand a call to __builtin_expect. We just return our argument
ef950eba
JH
5979 as the builtin_expect semantic should've been already executed by
5980 tree branch prediction pass. */
994a57cd
RH
5981
5982static rtx
5039610b 5983expand_builtin_expect (tree exp, rtx target)
994a57cd 5984{
451409e4 5985 tree arg;
994a57cd 5986
5039610b 5987 if (call_expr_nargs (exp) < 2)
994a57cd 5988 return const0_rtx;
5039610b 5989 arg = CALL_EXPR_ARG (exp, 0);
994a57cd 5990
5039610b 5991 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
ef950eba 5992 /* When guessing was done, the hints should be already stripped away. */
1d8381f1 5993 gcc_assert (!flag_guess_branch_prob
1da2ed5f 5994 || optimize == 0 || seen_error ());
994a57cd
RH
5995 return target;
5996}
5f2d6cfa 5997
1e9168b2
ML
5998/* Expand a call to __builtin_expect_with_probability. We just return our
5999 argument as the builtin_expect semantic should've been already executed by
6000 tree branch prediction pass. */
6001
6002static rtx
6003expand_builtin_expect_with_probability (tree exp, rtx target)
6004{
6005 tree arg;
6006
6007 if (call_expr_nargs (exp) < 3)
6008 return const0_rtx;
6009 arg = CALL_EXPR_ARG (exp, 0);
6010
6011 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6012 /* When guessing was done, the hints should be already stripped away. */
6013 gcc_assert (!flag_guess_branch_prob
6014 || optimize == 0 || seen_error ());
6015 return target;
6016}
6017
6018
45d439ac
JJ
6019/* Expand a call to __builtin_assume_aligned. We just return our first
6020 argument as the builtin_assume_aligned semantic should've been already
6021 executed by CCP. */
6022
6023static rtx
6024expand_builtin_assume_aligned (tree exp, rtx target)
6025{
6026 if (call_expr_nargs (exp) < 2)
6027 return const0_rtx;
6028 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
6029 EXPAND_NORMAL);
6030 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
6031 && (call_expr_nargs (exp) < 3
6032 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
6033 return target;
6034}
6035
1e188d1e 6036void
4682ae04 6037expand_builtin_trap (void)
9602f5a0 6038{
eb6f47fb 6039 if (targetm.have_trap ())
206604dc 6040 {
eb6f47fb 6041 rtx_insn *insn = emit_insn (targetm.gen_trap ());
206604dc
JJ
6042 /* For trap insns when not accumulating outgoing args force
6043 REG_ARGS_SIZE note to prevent crossjumping of calls with
6044 different args sizes. */
6045 if (!ACCUMULATE_OUTGOING_ARGS)
68184180 6046 add_args_size_note (insn, stack_pointer_delta);
206604dc 6047 }
9602f5a0 6048 else
ee516de9
EB
6049 {
6050 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
6051 tree call_expr = build_call_expr (fn, 0);
6052 expand_call (call_expr, NULL_RTX, false);
6053 }
6054
9602f5a0
RH
6055 emit_barrier ();
6056}
075ec276 6057
468059bc
DD
6058/* Expand a call to __builtin_unreachable. We do nothing except emit
6059 a barrier saying that control flow will not pass here.
6060
6061 It is the responsibility of the program being compiled to ensure
6062 that control flow does never reach __builtin_unreachable. */
6063static void
6064expand_builtin_unreachable (void)
6065{
6066 emit_barrier ();
6067}
6068
5039610b
SL
6069/* Expand EXP, a call to fabs, fabsf or fabsl.
6070 Return NULL_RTX if a normal call should be emitted rather than expanding
075ec276
RS
6071 the function inline. If convenient, the result should be placed
6072 in TARGET. SUBTARGET may be used as the target for computing
6073 the operand. */
6074
6075static rtx
5039610b 6076expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
075ec276 6077{
ef4bddc2 6078 machine_mode mode;
075ec276
RS
6079 tree arg;
6080 rtx op0;
6081
5039610b
SL
6082 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6083 return NULL_RTX;
075ec276 6084
5039610b 6085 arg = CALL_EXPR_ARG (exp, 0);
4cd8e76f 6086 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
075ec276 6087 mode = TYPE_MODE (TREE_TYPE (arg));
49452c07 6088 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
075ec276
RS
6089 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6090}
6091
5039610b 6092/* Expand EXP, a call to copysign, copysignf, or copysignl.
046625fa
RH
6093 Return NULL is a normal call should be emitted rather than expanding the
6094 function inline. If convenient, the result should be placed in TARGET.
6095 SUBTARGET may be used as the target for computing the operand. */
6096
6097static rtx
5039610b 6098expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
046625fa
RH
6099{
6100 rtx op0, op1;
6101 tree arg;
6102
5039610b
SL
6103 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6104 return NULL_RTX;
046625fa 6105
5039610b 6106 arg = CALL_EXPR_ARG (exp, 0);
84217346 6107 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
046625fa 6108
5039610b 6109 arg = CALL_EXPR_ARG (exp, 1);
84217346 6110 op1 = expand_normal (arg);
046625fa
RH
6111
6112 return expand_copysign (op0, op1, target);
6113}
6114
677feb77
DD
6115/* Expand a call to __builtin___clear_cache. */
6116
6117static rtx
f2cf13bd 6118expand_builtin___clear_cache (tree exp)
677feb77 6119{
f2cf13bd
RS
6120 if (!targetm.code_for_clear_cache)
6121 {
677feb77 6122#ifdef CLEAR_INSN_CACHE
f2cf13bd
RS
6123 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6124 does something. Just do the default expansion to a call to
6125 __clear_cache(). */
6126 return NULL_RTX;
677feb77 6127#else
f2cf13bd
RS
6128 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6129 does nothing. There is no need to call it. Do nothing. */
6130 return const0_rtx;
677feb77 6131#endif /* CLEAR_INSN_CACHE */
f2cf13bd
RS
6132 }
6133
677feb77
DD
6134 /* We have a "clear_cache" insn, and it will handle everything. */
6135 tree begin, end;
6136 rtx begin_rtx, end_rtx;
677feb77
DD
6137
6138 /* We must not expand to a library call. If we did, any
6139 fallback library function in libgcc that might contain a call to
6140 __builtin___clear_cache() would recurse infinitely. */
6141 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6142 {
6143 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6144 return const0_rtx;
6145 }
6146
f2cf13bd 6147 if (targetm.have_clear_cache ())
677feb77 6148 {
99b1c316 6149 class expand_operand ops[2];
677feb77
DD
6150
6151 begin = CALL_EXPR_ARG (exp, 0);
6152 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77
DD
6153
6154 end = CALL_EXPR_ARG (exp, 1);
6155 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77 6156
a5c7d693
RS
6157 create_address_operand (&ops[0], begin_rtx);
6158 create_address_operand (&ops[1], end_rtx);
f2cf13bd 6159 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
a5c7d693 6160 return const0_rtx;
677feb77
DD
6161 }
6162 return const0_rtx;
677feb77
DD
6163}
6164
6de9cd9a
DN
6165/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6166
6167static rtx
6168round_trampoline_addr (rtx tramp)
6169{
6170 rtx temp, addend, mask;
6171
6172 /* If we don't need too much alignment, we'll have been guaranteed
6173 proper alignment by get_trampoline_type. */
6174 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6175 return tramp;
6176
6177 /* Round address up to desired boundary. */
6178 temp = gen_reg_rtx (Pmode);
2f1cd2eb
RS
6179 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6180 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6de9cd9a
DN
6181
6182 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6183 temp, 0, OPTAB_LIB_WIDEN);
6184 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6185 temp, 0, OPTAB_LIB_WIDEN);
6186
6187 return tramp;
6188}
6189
6190static rtx
183dd130 6191expand_builtin_init_trampoline (tree exp, bool onstack)
6de9cd9a
DN
6192{
6193 tree t_tramp, t_func, t_chain;
531ca746 6194 rtx m_tramp, r_tramp, r_chain, tmp;
6de9cd9a 6195
5039610b 6196 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6de9cd9a
DN
6197 POINTER_TYPE, VOID_TYPE))
6198 return NULL_RTX;
6199
5039610b
SL
6200 t_tramp = CALL_EXPR_ARG (exp, 0);
6201 t_func = CALL_EXPR_ARG (exp, 1);
6202 t_chain = CALL_EXPR_ARG (exp, 2);
6de9cd9a 6203
84217346 6204 r_tramp = expand_normal (t_tramp);
531ca746
RH
6205 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6206 MEM_NOTRAP_P (m_tramp) = 1;
6207
183dd130
ILT
6208 /* If ONSTACK, the TRAMP argument should be the address of a field
6209 within the local function's FRAME decl. Either way, let's see if
6210 we can fill in the MEM_ATTRs for this memory. */
531ca746 6211 if (TREE_CODE (t_tramp) == ADDR_EXPR)
ad2e5b71 6212 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
531ca746 6213
183dd130
ILT
6214 /* Creator of a heap trampoline is responsible for making sure the
6215 address is aligned to at least STACK_BOUNDARY. Normally malloc
6216 will ensure this anyhow. */
531ca746
RH
6217 tmp = round_trampoline_addr (r_tramp);
6218 if (tmp != r_tramp)
6219 {
6220 m_tramp = change_address (m_tramp, BLKmode, tmp);
6221 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
f5541398 6222 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
531ca746
RH
6223 }
6224
6225 /* The FUNC argument should be the address of the nested function.
6226 Extract the actual function decl to pass to the hook. */
6227 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6228 t_func = TREE_OPERAND (t_func, 0);
6229 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6230
84217346 6231 r_chain = expand_normal (t_chain);
6de9cd9a
DN
6232
6233 /* Generate insns to initialize the trampoline. */
531ca746 6234 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6de9cd9a 6235
183dd130
ILT
6236 if (onstack)
6237 {
6238 trampolines_created = 1;
8ffadef9 6239
4c640e26
EB
6240 if (targetm.calls.custom_function_descriptors != 0)
6241 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6242 "trampoline generated for nested function %qD", t_func);
183dd130 6243 }
8ffadef9 6244
6de9cd9a
DN
6245 return const0_rtx;
6246}
6247
6248static rtx
5039610b 6249expand_builtin_adjust_trampoline (tree exp)
6de9cd9a
DN
6250{
6251 rtx tramp;
6252
5039610b 6253 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
6254 return NULL_RTX;
6255
5039610b 6256 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6de9cd9a 6257 tramp = round_trampoline_addr (tramp);
531ca746
RH
6258 if (targetm.calls.trampoline_adjust_address)
6259 tramp = targetm.calls.trampoline_adjust_address (tramp);
6de9cd9a
DN
6260
6261 return tramp;
6262}
6263
4c640e26
EB
6264/* Expand a call to the builtin descriptor initialization routine.
6265 A descriptor is made up of a couple of pointers to the static
6266 chain and the code entry in this order. */
6267
6268static rtx
6269expand_builtin_init_descriptor (tree exp)
6270{
6271 tree t_descr, t_func, t_chain;
6272 rtx m_descr, r_descr, r_func, r_chain;
6273
6274 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6275 VOID_TYPE))
6276 return NULL_RTX;
6277
6278 t_descr = CALL_EXPR_ARG (exp, 0);
6279 t_func = CALL_EXPR_ARG (exp, 1);
6280 t_chain = CALL_EXPR_ARG (exp, 2);
6281
6282 r_descr = expand_normal (t_descr);
6283 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6284 MEM_NOTRAP_P (m_descr) = 1;
0bdf9f92 6285 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
4c640e26
EB
6286
6287 r_func = expand_normal (t_func);
6288 r_chain = expand_normal (t_chain);
6289
6290 /* Generate insns to initialize the descriptor. */
6291 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6292 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6293 POINTER_SIZE / BITS_PER_UNIT), r_func);
6294
6295 return const0_rtx;
6296}
6297
6298/* Expand a call to the builtin descriptor adjustment routine. */
6299
6300static rtx
6301expand_builtin_adjust_descriptor (tree exp)
6302{
6303 rtx tramp;
6304
6305 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6306 return NULL_RTX;
6307
6308 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6309
6310 /* Unalign the descriptor to allow runtime identification. */
6311 tramp = plus_constant (ptr_mode, tramp,
6312 targetm.calls.custom_function_descriptors);
6313
6314 return force_operand (tramp, NULL_RTX);
6315}
6316
0f67fa83
WG
6317/* Expand the call EXP to the built-in signbit, signbitf or signbitl
6318 function. The function first checks whether the back end provides
6319 an insn to implement signbit for the respective mode. If not, it
6320 checks whether the floating point format of the value is such that
61717a45
FXC
6321 the sign bit can be extracted. If that is not the case, error out.
6322 EXP is the expression that is a call to the builtin function; if
6323 convenient, the result should be placed in TARGET. */
ef79730c
RS
6324static rtx
6325expand_builtin_signbit (tree exp, rtx target)
6326{
6327 const struct real_format *fmt;
b5f2d801 6328 scalar_float_mode fmode;
095a2d76 6329 scalar_int_mode rmode, imode;
5039610b 6330 tree arg;
e4fbead1 6331 int word, bitpos;
d0c9d431 6332 enum insn_code icode;
ef79730c 6333 rtx temp;
db3927fb 6334 location_t loc = EXPR_LOCATION (exp);
ef79730c 6335
5039610b
SL
6336 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6337 return NULL_RTX;
ef79730c 6338
5039610b 6339 arg = CALL_EXPR_ARG (exp, 0);
b5f2d801 6340 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
7a504f33 6341 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
ef79730c
RS
6342 fmt = REAL_MODE_FORMAT (fmode);
6343
0f67fa83
WG
6344 arg = builtin_save_expr (arg);
6345
6346 /* Expand the argument yielding a RTX expression. */
6347 temp = expand_normal (arg);
6348
6349 /* Check if the back end provides an insn that handles signbit for the
6350 argument's mode. */
947131ba 6351 icode = optab_handler (signbit_optab, fmode);
d0c9d431 6352 if (icode != CODE_FOR_nothing)
0f67fa83 6353 {
58f4cf2a 6354 rtx_insn *last = get_last_insn ();
0f67fa83 6355 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8a0b1aa4
MM
6356 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
6357 return target;
6358 delete_insns_since (last);
0f67fa83
WG
6359 }
6360
ef79730c
RS
6361 /* For floating point formats without a sign bit, implement signbit
6362 as "ARG < 0.0". */
b87a0206 6363 bitpos = fmt->signbit_ro;
e4fbead1 6364 if (bitpos < 0)
ef79730c
RS
6365 {
6366 /* But we can't do this if the format supports signed zero. */
61717a45 6367 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
ef79730c 6368
db3927fb 6369 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
987b67bc 6370 build_real (TREE_TYPE (arg), dconst0));
ef79730c
RS
6371 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6372 }
6373
e4fbead1 6374 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
ef79730c 6375 {
304b9962 6376 imode = int_mode_for_mode (fmode).require ();
e4fbead1 6377 temp = gen_lowpart (imode, temp);
254878ea
RS
6378 }
6379 else
6380 {
e4fbead1
RS
6381 imode = word_mode;
6382 /* Handle targets with different FP word orders. */
6383 if (FLOAT_WORDS_BIG_ENDIAN)
c22cacf3 6384 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
e4fbead1 6385 else
c22cacf3 6386 word = bitpos / BITS_PER_WORD;
e4fbead1
RS
6387 temp = operand_subword_force (temp, word, fmode);
6388 bitpos = bitpos % BITS_PER_WORD;
6389 }
6390
210e1852
RS
6391 /* Force the intermediate word_mode (or narrower) result into a
6392 register. This avoids attempting to create paradoxical SUBREGs
6393 of floating point modes below. */
6394 temp = force_reg (imode, temp);
6395
e4fbead1
RS
6396 /* If the bitpos is within the "result mode" lowpart, the operation
6397 can be implement with a single bitwise AND. Otherwise, we need
6398 a right shift and an AND. */
6399
6400 if (bitpos < GET_MODE_BITSIZE (rmode))
6401 {
807e902e 6402 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
ef79730c 6403
515e442a 6404 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
e4fbead1 6405 temp = gen_lowpart (rmode, temp);
254878ea 6406 temp = expand_binop (rmode, and_optab, temp,
807e902e 6407 immed_wide_int_const (mask, rmode),
e4fbead1 6408 NULL_RTX, 1, OPTAB_LIB_WIDEN);
ef79730c 6409 }
e4fbead1
RS
6410 else
6411 {
6412 /* Perform a logical right shift to place the signbit in the least
c22cacf3 6413 significant bit, then truncate the result to the desired mode
e4fbead1 6414 and mask just this bit. */
eb6c3df1 6415 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
e4fbead1
RS
6416 temp = gen_lowpart (rmode, temp);
6417 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6418 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6419 }
6420
ef79730c
RS
6421 return temp;
6422}
d1c38823
ZD
6423
6424/* Expand fork or exec calls. TARGET is the desired target of the
5039610b 6425 call. EXP is the call. FN is the
d1c38823
ZD
6426 identificator of the actual function. IGNORE is nonzero if the
6427 value is to be ignored. */
6428
6429static rtx
5039610b 6430expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
d1c38823
ZD
6431{
6432 tree id, decl;
6433 tree call;
6434
b5338fb3
MS
6435 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
6436 {
6437 /* Detect unterminated path. */
6438 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6439 return NULL_RTX;
6440
6441 /* Also detect unterminated first argument. */
6442 switch (DECL_FUNCTION_CODE (fn))
6443 {
6444 case BUILT_IN_EXECL:
6445 case BUILT_IN_EXECLE:
6446 case BUILT_IN_EXECLP:
6447 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6448 return NULL_RTX;
6449 default:
6450 break;
6451 }
6452 }
6453
6454
d1c38823
ZD
6455 /* If we are not profiling, just call the function. */
6456 if (!profile_arc_flag)
6457 return NULL_RTX;
6458
6459 /* Otherwise call the wrapper. This should be equivalent for the rest of
6460 compiler, so the code does not diverge, and the wrapper may run the
2b8a92de 6461 code necessary for keeping the profiling sane. */
d1c38823
ZD
6462
6463 switch (DECL_FUNCTION_CODE (fn))
6464 {
6465 case BUILT_IN_FORK:
6466 id = get_identifier ("__gcov_fork");
6467 break;
6468
6469 case BUILT_IN_EXECL:
6470 id = get_identifier ("__gcov_execl");
6471 break;
6472
6473 case BUILT_IN_EXECV:
6474 id = get_identifier ("__gcov_execv");
6475 break;
6476
6477 case BUILT_IN_EXECLP:
6478 id = get_identifier ("__gcov_execlp");
6479 break;
6480
6481 case BUILT_IN_EXECLE:
6482 id = get_identifier ("__gcov_execle");
6483 break;
6484
6485 case BUILT_IN_EXECVP:
6486 id = get_identifier ("__gcov_execvp");
6487 break;
6488
6489 case BUILT_IN_EXECVE:
6490 id = get_identifier ("__gcov_execve");
6491 break;
6492
6493 default:
298e6adc 6494 gcc_unreachable ();
d1c38823
ZD
6495 }
6496
c2255bc4
AH
6497 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6498 FUNCTION_DECL, id, TREE_TYPE (fn));
d1c38823
ZD
6499 DECL_EXTERNAL (decl) = 1;
6500 TREE_PUBLIC (decl) = 1;
6501 DECL_ARTIFICIAL (decl) = 1;
6502 TREE_NOTHROW (decl) = 1;
ac382b62
JM
6503 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6504 DECL_VISIBILITY_SPECIFIED (decl) = 1;
db3927fb 6505 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
d1c38823 6506 return expand_call (call, target, ignore);
5039610b 6507 }
b8698a0f 6508
48ae6c13
RH
6509
6510\f
02ee605c
RH
6511/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6512 the pointer in these functions is void*, the tree optimizers may remove
6513 casts. The mode computed in expand_builtin isn't reliable either, due
6514 to __sync_bool_compare_and_swap.
6515
6516 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6517 group of builtins. This gives us log2 of the mode size. */
6518
ef4bddc2 6519static inline machine_mode
02ee605c
RH
6520get_builtin_sync_mode (int fcode_diff)
6521{
2de0aa52
HPN
6522 /* The size is not negotiable, so ask not to get BLKmode in return
6523 if the target indicates that a smaller size would be better. */
f4b31647 6524 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
02ee605c
RH
6525}
6526
1387fef3
AS
6527/* Expand the memory expression LOC and return the appropriate memory operand
6528 for the builtin_sync operations. */
6529
6530static rtx
ef4bddc2 6531get_builtin_sync_mem (tree loc, machine_mode mode)
1387fef3
AS
6532{
6533 rtx addr, mem;
b6895597
AS
6534 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6535 ? TREE_TYPE (TREE_TYPE (loc))
6536 : TREE_TYPE (loc));
6537 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
1387fef3 6538
b6895597 6539 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
1413a419 6540 addr = convert_memory_address (addr_mode, addr);
1387fef3
AS
6541
6542 /* Note that we explicitly do not want any alias information for this
6543 memory, so that we kill all other live memories. Otherwise we don't
6544 satisfy the full barrier semantics of the intrinsic. */
b6895597
AS
6545 mem = gen_rtx_MEM (mode, addr);
6546
6547 set_mem_addr_space (mem, addr_space);
6548
6549 mem = validize_mem (mem);
1387fef3 6550
1be38ccb
RG
6551 /* The alignment needs to be at least according to that of the mode. */
6552 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
0eb77834 6553 get_pointer_alignment (loc)));
9cd9e512 6554 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
1387fef3
AS
6555 MEM_VOLATILE_P (mem) = 1;
6556
6557 return mem;
6558}
6559
86951993
AM
6560/* Make sure an argument is in the right mode.
6561 EXP is the tree argument.
6562 MODE is the mode it should be in. */
6563
6564static rtx
ef4bddc2 6565expand_expr_force_mode (tree exp, machine_mode mode)
86951993
AM
6566{
6567 rtx val;
ef4bddc2 6568 machine_mode old_mode;
86951993
AM
6569
6570 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6571 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6572 of CONST_INTs, where we know the old_mode only from the call argument. */
6573
6574 old_mode = GET_MODE (val);
6575 if (old_mode == VOIDmode)
6576 old_mode = TYPE_MODE (TREE_TYPE (exp));
6577 val = convert_modes (mode, old_mode, val, 1);
6578 return val;
6579}
6580
6581
48ae6c13 6582/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5039610b 6583 EXP is the CALL_EXPR. CODE is the rtx code
48ae6c13
RH
6584 that corresponds to the arithmetic or logical operation from the name;
6585 an exception here is that NOT actually means NAND. TARGET is an optional
6586 place for us to store the results; AFTER is true if this is the
86951993 6587 fetch_and_xxx form. */
48ae6c13
RH
6588
6589static rtx
ef4bddc2 6590expand_builtin_sync_operation (machine_mode mode, tree exp,
02ee605c 6591 enum rtx_code code, bool after,
86951993 6592 rtx target)
48ae6c13 6593{
1387fef3 6594 rtx val, mem;
c2255bc4 6595 location_t loc = EXPR_LOCATION (exp);
48ae6c13 6596
23462d4d
UB
6597 if (code == NOT && warn_sync_nand)
6598 {
6599 tree fndecl = get_callee_fndecl (exp);
6600 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6601
6602 static bool warned_f_a_n, warned_n_a_f;
6603
6604 switch (fcode)
6605 {
e0a8ecf2
AM
6606 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6607 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6608 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6609 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6610 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
23462d4d
UB
6611 if (warned_f_a_n)
6612 break;
6613
e79983f4 6614 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
c2255bc4 6615 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
6616 warned_f_a_n = true;
6617 break;
6618
e0a8ecf2
AM
6619 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6620 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6621 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6622 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6623 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
23462d4d
UB
6624 if (warned_n_a_f)
6625 break;
6626
e79983f4 6627 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
c2255bc4 6628 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
6629 warned_n_a_f = true;
6630 break;
6631
6632 default:
6633 gcc_unreachable ();
6634 }
6635 }
6636
48ae6c13 6637 /* Expand the operands. */
5039610b 6638 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993 6639 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
48ae6c13 6640
46b35980 6641 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
86951993 6642 after);
48ae6c13
RH
6643}
6644
6645/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5039610b 6646 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
48ae6c13
RH
6647 true if this is the boolean form. TARGET is a place for us to store the
6648 results; this is NOT optional if IS_BOOL is true. */
6649
6650static rtx
ef4bddc2 6651expand_builtin_compare_and_swap (machine_mode mode, tree exp,
02ee605c 6652 bool is_bool, rtx target)
48ae6c13 6653{
1387fef3 6654 rtx old_val, new_val, mem;
f0409b19 6655 rtx *pbool, *poval;
48ae6c13
RH
6656
6657 /* Expand the operands. */
5039610b 6658 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
6659 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6660 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
48ae6c13 6661
f0409b19
RH
6662 pbool = poval = NULL;
6663 if (target != const0_rtx)
6664 {
6665 if (is_bool)
6666 pbool = &target;
6667 else
6668 poval = &target;
6669 }
6670 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
46b35980
AM
6671 false, MEMMODEL_SYNC_SEQ_CST,
6672 MEMMODEL_SYNC_SEQ_CST))
86951993 6673 return NULL_RTX;
5039610b 6674
86951993 6675 return target;
48ae6c13
RH
6676}
6677
6678/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6679 general form is actually an atomic exchange, and some targets only
6680 support a reduced form with the second argument being a constant 1.
b8698a0f 6681 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5039610b 6682 the results. */
48ae6c13
RH
6683
6684static rtx
ef4bddc2 6685expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
86951993 6686 rtx target)
48ae6c13 6687{
1387fef3 6688 rtx val, mem;
48ae6c13
RH
6689
6690 /* Expand the operands. */
5039610b 6691 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
6692 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6693
744accb2 6694 return expand_sync_lock_test_and_set (target, mem, val);
86951993
AM
6695}
6696
6697/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6698
6699static void
ef4bddc2 6700expand_builtin_sync_lock_release (machine_mode mode, tree exp)
86951993
AM
6701{
6702 rtx mem;
6703
6704 /* Expand the operands. */
6705 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6706
46b35980 6707 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
86951993
AM
6708}
6709
6710/* Given an integer representing an ``enum memmodel'', verify its
6711 correctness and return the memory model enum. */
6712
6713static enum memmodel
6714get_memmodel (tree exp)
6715{
6716 rtx op;
5dcfdccd 6717 unsigned HOST_WIDE_INT val;
620e594b 6718 location_t loc
8d9fdb49 6719 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
6720
6721 /* If the parameter is not a constant, it's a run time value so we'll just
6722 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6723 if (TREE_CODE (exp) != INTEGER_CST)
6724 return MEMMODEL_SEQ_CST;
6725
6726 op = expand_normal (exp);
5dcfdccd
KY
6727
6728 val = INTVAL (op);
6729 if (targetm.memmodel_check)
6730 val = targetm.memmodel_check (val);
6731 else if (val & ~MEMMODEL_MASK)
6732 {
8d9fdb49
MP
6733 warning_at (loc, OPT_Winvalid_memory_model,
6734 "unknown architecture specifier in memory model to builtin");
5dcfdccd
KY
6735 return MEMMODEL_SEQ_CST;
6736 }
6737
46b35980
AM
6738 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6739 if (memmodel_base (val) >= MEMMODEL_LAST)
86951993 6740 {
8d9fdb49
MP
6741 warning_at (loc, OPT_Winvalid_memory_model,
6742 "invalid memory model argument to builtin");
86951993
AM
6743 return MEMMODEL_SEQ_CST;
6744 }
5dcfdccd 6745
8673b671
AM
6746 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6747 be conservative and promote consume to acquire. */
6748 if (val == MEMMODEL_CONSUME)
6749 val = MEMMODEL_ACQUIRE;
6750
5dcfdccd 6751 return (enum memmodel) val;
86951993
AM
6752}
6753
6754/* Expand the __atomic_exchange intrinsic:
6755 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6756 EXP is the CALL_EXPR.
6757 TARGET is an optional place for us to store the results. */
6758
6759static rtx
ef4bddc2 6760expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
86951993
AM
6761{
6762 rtx val, mem;
6763 enum memmodel model;
6764
6765 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
86951993
AM
6766
6767 if (!flag_inline_atomics)
6768 return NULL_RTX;
6769
6770 /* Expand the operands. */
6771 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6772 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6773
744accb2 6774 return expand_atomic_exchange (target, mem, val, model);
86951993
AM
6775}
6776
6777/* Expand the __atomic_compare_exchange intrinsic:
6778 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6779 TYPE desired, BOOL weak,
6780 enum memmodel success,
6781 enum memmodel failure)
6782 EXP is the CALL_EXPR.
6783 TARGET is an optional place for us to store the results. */
6784
6785static rtx
ef4bddc2 6786expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
86951993
AM
6787 rtx target)
6788{
58f4cf2a
DM
6789 rtx expect, desired, mem, oldval;
6790 rtx_code_label *label;
86951993
AM
6791 enum memmodel success, failure;
6792 tree weak;
6793 bool is_weak;
620e594b 6794 location_t loc
8d9fdb49 6795 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
6796
6797 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6798 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6799
77df5327
AM
6800 if (failure > success)
6801 {
8d9fdb49
MP
6802 warning_at (loc, OPT_Winvalid_memory_model,
6803 "failure memory model cannot be stronger than success "
6804 "memory model for %<__atomic_compare_exchange%>");
77df5327
AM
6805 success = MEMMODEL_SEQ_CST;
6806 }
6807
46b35980 6808 if (is_mm_release (failure) || is_mm_acq_rel (failure))
86951993 6809 {
8d9fdb49
MP
6810 warning_at (loc, OPT_Winvalid_memory_model,
6811 "invalid failure memory model for "
6812 "%<__atomic_compare_exchange%>");
77df5327
AM
6813 failure = MEMMODEL_SEQ_CST;
6814 success = MEMMODEL_SEQ_CST;
86951993
AM
6815 }
6816
77df5327 6817
86951993
AM
6818 if (!flag_inline_atomics)
6819 return NULL_RTX;
6820
6821 /* Expand the operands. */
6822 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6823
6824 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6825 expect = convert_memory_address (Pmode, expect);
215770ad 6826 expect = gen_rtx_MEM (mode, expect);
86951993
AM
6827 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6828
6829 weak = CALL_EXPR_ARG (exp, 3);
6830 is_weak = false;
9439e9a1 6831 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
86951993
AM
6832 is_weak = true;
6833
672ce939
RH
6834 if (target == const0_rtx)
6835 target = NULL;
672ce939 6836
2fdc29e8
RH
6837 /* Lest the rtl backend create a race condition with an imporoper store
6838 to memory, always create a new pseudo for OLDVAL. */
6839 oldval = NULL;
6840
6841 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
f0409b19 6842 is_weak, success, failure))
86951993
AM
6843 return NULL_RTX;
6844
672ce939
RH
6845 /* Conditionally store back to EXPECT, lest we create a race condition
6846 with an improper store to memory. */
6847 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6848 the normal case where EXPECT is totally private, i.e. a register. At
6849 which point the store can be unconditional. */
6850 label = gen_label_rtx ();
f8940d4a
JG
6851 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6852 GET_MODE (target), 1, label);
672ce939
RH
6853 emit_move_insn (expect, oldval);
6854 emit_label (label);
215770ad 6855
86951993
AM
6856 return target;
6857}
6858
849a76a5
JJ
6859/* Helper function for expand_ifn_atomic_compare_exchange - expand
6860 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6861 call. The weak parameter must be dropped to match the expected parameter
6862 list and the expected argument changed from value to pointer to memory
6863 slot. */
6864
6865static void
6866expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6867{
6868 unsigned int z;
6869 vec<tree, va_gc> *vec;
6870
6871 vec_alloc (vec, 5);
6872 vec->quick_push (gimple_call_arg (call, 0));
6873 tree expected = gimple_call_arg (call, 1);
6874 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6875 TREE_TYPE (expected));
6876 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6877 if (expd != x)
6878 emit_move_insn (x, expd);
6879 tree v = make_tree (TREE_TYPE (expected), x);
6880 vec->quick_push (build1 (ADDR_EXPR,
6881 build_pointer_type (TREE_TYPE (expected)), v));
6882 vec->quick_push (gimple_call_arg (call, 2));
6883 /* Skip the boolean weak parameter. */
6884 for (z = 4; z < 6; z++)
6885 vec->quick_push (gimple_call_arg (call, z));
4871e1ed 6886 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
cf098191 6887 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
4871e1ed 6888 gcc_assert (bytes_log2 < 5);
849a76a5
JJ
6889 built_in_function fncode
6890 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
4871e1ed 6891 + bytes_log2);
849a76a5
JJ
6892 tree fndecl = builtin_decl_explicit (fncode);
6893 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6894 fndecl);
6895 tree exp = build_call_vec (boolean_type_node, fn, vec);
6896 tree lhs = gimple_call_lhs (call);
6897 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6898 if (lhs)
6899 {
6900 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6901 if (GET_MODE (boolret) != mode)
6902 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6903 x = force_reg (mode, x);
6904 write_complex_part (target, boolret, true);
6905 write_complex_part (target, x, false);
6906 }
6907}
6908
6909/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6910
6911void
6912expand_ifn_atomic_compare_exchange (gcall *call)
6913{
6914 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6915 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
f4b31647 6916 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
849a76a5
JJ
6917 rtx expect, desired, mem, oldval, boolret;
6918 enum memmodel success, failure;
6919 tree lhs;
6920 bool is_weak;
620e594b 6921 location_t loc
849a76a5
JJ
6922 = expansion_point_location_if_in_system_header (gimple_location (call));
6923
6924 success = get_memmodel (gimple_call_arg (call, 4));
6925 failure = get_memmodel (gimple_call_arg (call, 5));
6926
6927 if (failure > success)
6928 {
6929 warning_at (loc, OPT_Winvalid_memory_model,
6930 "failure memory model cannot be stronger than success "
6931 "memory model for %<__atomic_compare_exchange%>");
6932 success = MEMMODEL_SEQ_CST;
6933 }
6934
6935 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6936 {
6937 warning_at (loc, OPT_Winvalid_memory_model,
6938 "invalid failure memory model for "
6939 "%<__atomic_compare_exchange%>");
6940 failure = MEMMODEL_SEQ_CST;
6941 success = MEMMODEL_SEQ_CST;
6942 }
6943
6944 if (!flag_inline_atomics)
6945 {
6946 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6947 return;
6948 }
6949
6950 /* Expand the operands. */
6951 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6952
6953 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6954 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6955
6956 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6957
6958 boolret = NULL;
6959 oldval = NULL;
6960
6961 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6962 is_weak, success, failure))
6963 {
6964 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6965 return;
6966 }
6967
6968 lhs = gimple_call_lhs (call);
6969 if (lhs)
6970 {
6971 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6972 if (GET_MODE (boolret) != mode)
6973 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6974 write_complex_part (target, boolret, true);
6975 write_complex_part (target, oldval, false);
6976 }
6977}
6978
86951993
AM
6979/* Expand the __atomic_load intrinsic:
6980 TYPE __atomic_load (TYPE *object, enum memmodel)
6981 EXP is the CALL_EXPR.
6982 TARGET is an optional place for us to store the results. */
6983
6984static rtx
ef4bddc2 6985expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
86951993
AM
6986{
6987 rtx mem;
6988 enum memmodel model;
6989
6990 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
46b35980 6991 if (is_mm_release (model) || is_mm_acq_rel (model))
86951993 6992 {
620e594b 6993 location_t loc
8d9fdb49
MP
6994 = expansion_point_location_if_in_system_header (input_location);
6995 warning_at (loc, OPT_Winvalid_memory_model,
6996 "invalid memory model for %<__atomic_load%>");
77df5327 6997 model = MEMMODEL_SEQ_CST;
86951993
AM
6998 }
6999
7000 if (!flag_inline_atomics)
7001 return NULL_RTX;
7002
7003 /* Expand the operand. */
7004 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7005
7006 return expand_atomic_load (target, mem, model);
7007}
7008
7009
7010/* Expand the __atomic_store intrinsic:
7011 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
7012 EXP is the CALL_EXPR.
7013 TARGET is an optional place for us to store the results. */
7014
7015static rtx
ef4bddc2 7016expand_builtin_atomic_store (machine_mode mode, tree exp)
86951993
AM
7017{
7018 rtx mem, val;
7019 enum memmodel model;
7020
7021 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
46b35980
AM
7022 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
7023 || is_mm_release (model)))
86951993 7024 {
620e594b 7025 location_t loc
8d9fdb49
MP
7026 = expansion_point_location_if_in_system_header (input_location);
7027 warning_at (loc, OPT_Winvalid_memory_model,
7028 "invalid memory model for %<__atomic_store%>");
77df5327 7029 model = MEMMODEL_SEQ_CST;
86951993
AM
7030 }
7031
7032 if (!flag_inline_atomics)
7033 return NULL_RTX;
7034
7035 /* Expand the operands. */
7036 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7037 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7038
0669295b 7039 return expand_atomic_store (mem, val, model, false);
86951993
AM
7040}
7041
7042/* Expand the __atomic_fetch_XXX intrinsic:
7043 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
7044 EXP is the CALL_EXPR.
7045 TARGET is an optional place for us to store the results.
7046 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
7047 FETCH_AFTER is true if returning the result of the operation.
7048 FETCH_AFTER is false if returning the value before the operation.
7049 IGNORE is true if the result is not used.
7050 EXT_CALL is the correct builtin for an external call if this cannot be
7051 resolved to an instruction sequence. */
7052
7053static rtx
ef4bddc2 7054expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
86951993
AM
7055 enum rtx_code code, bool fetch_after,
7056 bool ignore, enum built_in_function ext_call)
7057{
7058 rtx val, mem, ret;
7059 enum memmodel model;
7060 tree fndecl;
7061 tree addr;
7062
7063 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7064
7065 /* Expand the operands. */
7066 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7067 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7068
7069 /* Only try generating instructions if inlining is turned on. */
7070 if (flag_inline_atomics)
7071 {
7072 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
7073 if (ret)
7074 return ret;
7075 }
7076
7077 /* Return if a different routine isn't needed for the library call. */
7078 if (ext_call == BUILT_IN_NONE)
7079 return NULL_RTX;
7080
7081 /* Change the call to the specified function. */
7082 fndecl = get_callee_fndecl (exp);
7083 addr = CALL_EXPR_FN (exp);
7084 STRIP_NOPS (addr);
7085
7086 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
c3284718 7087 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
86951993 7088
67914693 7089 /* If we will emit code after the call, the call cannot be a tail call.
08c273bb
SB
7090 If it is emitted as a tail call, a barrier is emitted after it, and
7091 then all trailing code is removed. */
7092 if (!ignore)
7093 CALL_EXPR_TAILCALL (exp) = 0;
7094
86951993
AM
7095 /* Expand the call here so we can emit trailing code. */
7096 ret = expand_call (exp, target, ignore);
7097
7098 /* Replace the original function just in case it matters. */
7099 TREE_OPERAND (addr, 0) = fndecl;
7100
7101 /* Then issue the arithmetic correction to return the right result. */
7102 if (!ignore)
154b68db
AM
7103 {
7104 if (code == NOT)
7105 {
7106 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
7107 OPTAB_LIB_WIDEN);
7108 ret = expand_simple_unop (mode, NOT, ret, target, true);
7109 }
7110 else
7111 ret = expand_simple_binop (mode, code, ret, val, target, true,
7112 OPTAB_LIB_WIDEN);
7113 }
86951993
AM
7114 return ret;
7115}
7116
adedd5c1
JJ
7117/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7118
7119void
7120expand_ifn_atomic_bit_test_and (gcall *call)
7121{
7122 tree ptr = gimple_call_arg (call, 0);
7123 tree bit = gimple_call_arg (call, 1);
7124 tree flag = gimple_call_arg (call, 2);
7125 tree lhs = gimple_call_lhs (call);
7126 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7127 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7128 enum rtx_code code;
7129 optab optab;
99b1c316 7130 class expand_operand ops[5];
adedd5c1
JJ
7131
7132 gcc_assert (flag_inline_atomics);
7133
7134 if (gimple_call_num_args (call) == 4)
7135 model = get_memmodel (gimple_call_arg (call, 3));
7136
7137 rtx mem = get_builtin_sync_mem (ptr, mode);
7138 rtx val = expand_expr_force_mode (bit, mode);
7139
7140 switch (gimple_call_internal_fn (call))
7141 {
7142 case IFN_ATOMIC_BIT_TEST_AND_SET:
7143 code = IOR;
7144 optab = atomic_bit_test_and_set_optab;
7145 break;
7146 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7147 code = XOR;
7148 optab = atomic_bit_test_and_complement_optab;
7149 break;
7150 case IFN_ATOMIC_BIT_TEST_AND_RESET:
7151 code = AND;
7152 optab = atomic_bit_test_and_reset_optab;
7153 break;
7154 default:
7155 gcc_unreachable ();
7156 }
7157
7158 if (lhs == NULL_TREE)
7159 {
7160 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7161 val, NULL_RTX, true, OPTAB_DIRECT);
7162 if (code == AND)
7163 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7164 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
7165 return;
7166 }
7167
7168 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7169 enum insn_code icode = direct_optab_handler (optab, mode);
7170 gcc_assert (icode != CODE_FOR_nothing);
7171 create_output_operand (&ops[0], target, mode);
7172 create_fixed_operand (&ops[1], mem);
7173 create_convert_operand_to (&ops[2], val, mode, true);
7174 create_integer_operand (&ops[3], model);
7175 create_integer_operand (&ops[4], integer_onep (flag));
7176 if (maybe_expand_insn (icode, 5, ops))
7177 return;
7178
7179 rtx bitval = val;
7180 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7181 val, NULL_RTX, true, OPTAB_DIRECT);
7182 rtx maskval = val;
7183 if (code == AND)
7184 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7185 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7186 code, model, false);
7187 if (integer_onep (flag))
7188 {
7189 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7190 NULL_RTX, true, OPTAB_DIRECT);
7191 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7192 true, OPTAB_DIRECT);
7193 }
7194 else
7195 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7196 OPTAB_DIRECT);
7197 if (result != target)
7198 emit_move_insn (target, result);
7199}
7200
d660c35e
AM
7201/* Expand an atomic clear operation.
7202 void _atomic_clear (BOOL *obj, enum memmodel)
7203 EXP is the call expression. */
7204
7205static rtx
7206expand_builtin_atomic_clear (tree exp)
7207{
ef4bddc2 7208 machine_mode mode;
d660c35e
AM
7209 rtx mem, ret;
7210 enum memmodel model;
7211
f4b31647 7212 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
7213 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7214 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7215
46b35980 7216 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
d660c35e 7217 {
620e594b 7218 location_t loc
8d9fdb49
MP
7219 = expansion_point_location_if_in_system_header (input_location);
7220 warning_at (loc, OPT_Winvalid_memory_model,
7221 "invalid memory model for %<__atomic_store%>");
77df5327 7222 model = MEMMODEL_SEQ_CST;
d660c35e
AM
7223 }
7224
7225 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7226 Failing that, a store is issued by __atomic_store. The only way this can
7227 fail is if the bool type is larger than a word size. Unlikely, but
7228 handle it anyway for completeness. Assume a single threaded model since
7229 there is no atomic support in this case, and no barriers are required. */
7230 ret = expand_atomic_store (mem, const0_rtx, model, true);
7231 if (!ret)
7232 emit_move_insn (mem, const0_rtx);
7233 return const0_rtx;
7234}
7235
7236/* Expand an atomic test_and_set operation.
7237 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7238 EXP is the call expression. */
7239
7240static rtx
744accb2 7241expand_builtin_atomic_test_and_set (tree exp, rtx target)
d660c35e 7242{
744accb2 7243 rtx mem;
d660c35e 7244 enum memmodel model;
ef4bddc2 7245 machine_mode mode;
d660c35e 7246
f4b31647 7247 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
7248 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7249 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7250
744accb2 7251 return expand_atomic_test_and_set (target, mem, model);
d660c35e
AM
7252}
7253
7254
86951993
AM
7255/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7256 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7257
7258static tree
7259fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7260{
7261 int size;
ef4bddc2 7262 machine_mode mode;
86951993
AM
7263 unsigned int mode_align, type_align;
7264
7265 if (TREE_CODE (arg0) != INTEGER_CST)
7266 return NULL_TREE;
48ae6c13 7267
f4b31647 7268 /* We need a corresponding integer mode for the access to be lock-free. */
86951993 7269 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
f4b31647
RS
7270 if (!int_mode_for_size (size, 0).exists (&mode))
7271 return boolean_false_node;
7272
86951993
AM
7273 mode_align = GET_MODE_ALIGNMENT (mode);
7274
310055e7
JW
7275 if (TREE_CODE (arg1) == INTEGER_CST)
7276 {
7277 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7278
7279 /* Either this argument is null, or it's a fake pointer encoding
7280 the alignment of the object. */
146ec50f 7281 val = least_bit_hwi (val);
310055e7
JW
7282 val *= BITS_PER_UNIT;
7283
7284 if (val == 0 || mode_align < val)
7285 type_align = mode_align;
7286 else
7287 type_align = val;
7288 }
86951993
AM
7289 else
7290 {
7291 tree ttype = TREE_TYPE (arg1);
7292
7293 /* This function is usually invoked and folded immediately by the front
7294 end before anything else has a chance to look at it. The pointer
7295 parameter at this point is usually cast to a void *, so check for that
7296 and look past the cast. */
7d9cf801
JJ
7297 if (CONVERT_EXPR_P (arg1)
7298 && POINTER_TYPE_P (ttype)
7299 && VOID_TYPE_P (TREE_TYPE (ttype))
7300 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
86951993
AM
7301 arg1 = TREE_OPERAND (arg1, 0);
7302
7303 ttype = TREE_TYPE (arg1);
7304 gcc_assert (POINTER_TYPE_P (ttype));
7305
7306 /* Get the underlying type of the object. */
7307 ttype = TREE_TYPE (ttype);
7308 type_align = TYPE_ALIGN (ttype);
7309 }
7310
026c3cfd 7311 /* If the object has smaller alignment, the lock free routines cannot
86951993
AM
7312 be used. */
7313 if (type_align < mode_align)
58d38fd2 7314 return boolean_false_node;
86951993
AM
7315
7316 /* Check if a compare_and_swap pattern exists for the mode which represents
7317 the required size. The pattern is not allowed to fail, so the existence
969a32ce
TR
7318 of the pattern indicates support is present. Also require that an
7319 atomic load exists for the required size. */
7320 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
58d38fd2 7321 return boolean_true_node;
86951993 7322 else
58d38fd2 7323 return boolean_false_node;
86951993
AM
7324}
7325
7326/* Return true if the parameters to call EXP represent an object which will
7327 always generate lock free instructions. The first argument represents the
7328 size of the object, and the second parameter is a pointer to the object
7329 itself. If NULL is passed for the object, then the result is based on
7330 typical alignment for an object of the specified size. Otherwise return
7331 false. */
7332
7333static rtx
7334expand_builtin_atomic_always_lock_free (tree exp)
7335{
7336 tree size;
7337 tree arg0 = CALL_EXPR_ARG (exp, 0);
7338 tree arg1 = CALL_EXPR_ARG (exp, 1);
7339
7340 if (TREE_CODE (arg0) != INTEGER_CST)
7341 {
a9c697b8 7342 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
86951993
AM
7343 return const0_rtx;
7344 }
7345
7346 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
58d38fd2 7347 if (size == boolean_true_node)
86951993
AM
7348 return const1_rtx;
7349 return const0_rtx;
7350}
7351
7352/* Return a one or zero if it can be determined that object ARG1 of size ARG
7353 is lock free on this architecture. */
7354
7355static tree
7356fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7357{
7358 if (!flag_inline_atomics)
7359 return NULL_TREE;
7360
7361 /* If it isn't always lock free, don't generate a result. */
58d38fd2
JJ
7362 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7363 return boolean_true_node;
86951993
AM
7364
7365 return NULL_TREE;
7366}
7367
7368/* Return true if the parameters to call EXP represent an object which will
7369 always generate lock free instructions. The first argument represents the
7370 size of the object, and the second parameter is a pointer to the object
7371 itself. If NULL is passed for the object, then the result is based on
7372 typical alignment for an object of the specified size. Otherwise return
7373 NULL*/
7374
7375static rtx
7376expand_builtin_atomic_is_lock_free (tree exp)
7377{
7378 tree size;
7379 tree arg0 = CALL_EXPR_ARG (exp, 0);
7380 tree arg1 = CALL_EXPR_ARG (exp, 1);
7381
7382 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7383 {
a9c697b8 7384 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
86951993
AM
7385 return NULL_RTX;
7386 }
7387
7388 if (!flag_inline_atomics)
7389 return NULL_RTX;
7390
7391 /* If the value is known at compile time, return the RTX for it. */
7392 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
58d38fd2 7393 if (size == boolean_true_node)
86951993
AM
7394 return const1_rtx;
7395
7396 return NULL_RTX;
7397}
7398
86951993
AM
7399/* Expand the __atomic_thread_fence intrinsic:
7400 void __atomic_thread_fence (enum memmodel)
7401 EXP is the CALL_EXPR. */
7402
7403static void
7404expand_builtin_atomic_thread_fence (tree exp)
7405{
c39169c8
RH
7406 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7407 expand_mem_thread_fence (model);
86951993
AM
7408}
7409
7410/* Expand the __atomic_signal_fence intrinsic:
7411 void __atomic_signal_fence (enum memmodel)
7412 EXP is the CALL_EXPR. */
7413
7414static void
7415expand_builtin_atomic_signal_fence (tree exp)
7416{
c39169c8
RH
7417 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7418 expand_mem_signal_fence (model);
48ae6c13
RH
7419}
7420
7421/* Expand the __sync_synchronize intrinsic. */
7422
7423static void
e0a8ecf2 7424expand_builtin_sync_synchronize (void)
48ae6c13 7425{
46b35980 7426 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
48ae6c13
RH
7427}
7428
f959607b
CLT
7429static rtx
7430expand_builtin_thread_pointer (tree exp, rtx target)
7431{
7432 enum insn_code icode;
7433 if (!validate_arglist (exp, VOID_TYPE))
7434 return const0_rtx;
7435 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7436 if (icode != CODE_FOR_nothing)
7437 {
99b1c316 7438 class expand_operand op;
b8a542c6
AP
7439 /* If the target is not sutitable then create a new target. */
7440 if (target == NULL_RTX
7441 || !REG_P (target)
7442 || GET_MODE (target) != Pmode)
f959607b
CLT
7443 target = gen_reg_rtx (Pmode);
7444 create_output_operand (&op, target, Pmode);
7445 expand_insn (icode, 1, &op);
7446 return target;
7447 }
a3f9f006 7448 error ("%<__builtin_thread_pointer%> is not supported on this target");
f959607b
CLT
7449 return const0_rtx;
7450}
7451
7452static void
7453expand_builtin_set_thread_pointer (tree exp)
7454{
7455 enum insn_code icode;
7456 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7457 return;
7458 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7459 if (icode != CODE_FOR_nothing)
7460 {
99b1c316 7461 class expand_operand op;
f959607b
CLT
7462 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7463 Pmode, EXPAND_NORMAL);
5440a1b0 7464 create_input_operand (&op, val, Pmode);
f959607b
CLT
7465 expand_insn (icode, 1, &op);
7466 return;
7467 }
a3f9f006 7468 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
f959607b
CLT
7469}
7470
28f4ec01 7471\f
862d0b35
DN
7472/* Emit code to restore the current value of stack. */
7473
7474static void
7475expand_stack_restore (tree var)
7476{
58f4cf2a
DM
7477 rtx_insn *prev;
7478 rtx sa = expand_normal (var);
862d0b35
DN
7479
7480 sa = convert_memory_address (Pmode, sa);
7481
7482 prev = get_last_insn ();
7483 emit_stack_restore (SAVE_BLOCK, sa);
d33606c3
EB
7484
7485 record_new_stack_level ();
7486
862d0b35
DN
7487 fixup_args_size_notes (prev, get_last_insn (), 0);
7488}
7489
862d0b35
DN
7490/* Emit code to save the current value of stack. */
7491
7492static rtx
7493expand_stack_save (void)
7494{
7495 rtx ret = NULL_RTX;
7496
862d0b35
DN
7497 emit_stack_save (SAVE_BLOCK, &ret);
7498 return ret;
7499}
7500
1f62d637
TV
7501/* Emit code to get the openacc gang, worker or vector id or size. */
7502
7503static rtx
7504expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7505{
7506 const char *name;
7507 rtx fallback_retval;
7508 rtx_insn *(*gen_fn) (rtx, rtx);
7509 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7510 {
7511 case BUILT_IN_GOACC_PARLEVEL_ID:
7512 name = "__builtin_goacc_parlevel_id";
7513 fallback_retval = const0_rtx;
7514 gen_fn = targetm.gen_oacc_dim_pos;
7515 break;
7516 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7517 name = "__builtin_goacc_parlevel_size";
7518 fallback_retval = const1_rtx;
7519 gen_fn = targetm.gen_oacc_dim_size;
7520 break;
7521 default:
7522 gcc_unreachable ();
7523 }
7524
7525 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7526 {
7527 error ("%qs only supported in OpenACC code", name);
7528 return const0_rtx;
7529 }
7530
7531 tree arg = CALL_EXPR_ARG (exp, 0);
7532 if (TREE_CODE (arg) != INTEGER_CST)
7533 {
7534 error ("non-constant argument 0 to %qs", name);
7535 return const0_rtx;
7536 }
7537
7538 int dim = TREE_INT_CST_LOW (arg);
7539 switch (dim)
7540 {
7541 case GOMP_DIM_GANG:
7542 case GOMP_DIM_WORKER:
7543 case GOMP_DIM_VECTOR:
7544 break;
7545 default:
7546 error ("illegal argument 0 to %qs", name);
7547 return const0_rtx;
7548 }
7549
7550 if (ignore)
7551 return target;
7552
39bc9f83
TV
7553 if (target == NULL_RTX)
7554 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7555
1f62d637
TV
7556 if (!targetm.have_oacc_dim_size ())
7557 {
7558 emit_move_insn (target, fallback_retval);
7559 return target;
7560 }
7561
7562 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7563 emit_insn (gen_fn (reg, GEN_INT (dim)));
7564 if (reg != target)
7565 emit_move_insn (target, reg);
7566
7567 return target;
7568}
41dbbb37 7569
10a0e2a9 7570/* Expand a string compare operation using a sequence of char comparison
b2272b13
QZ
7571 to get rid of the calling overhead, with result going to TARGET if
7572 that's convenient.
7573
7574 VAR_STR is the variable string source;
7575 CONST_STR is the constant string source;
7576 LENGTH is the number of chars to compare;
7577 CONST_STR_N indicates which source string is the constant string;
7578 IS_MEMCMP indicates whether it's a memcmp or strcmp.
10a0e2a9 7579
b2272b13
QZ
7580 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7581
523a59ff
QZ
7582 target = (int) (unsigned char) var_str[0]
7583 - (int) (unsigned char) const_str[0];
b2272b13
QZ
7584 if (target != 0)
7585 goto ne_label;
7586 ...
523a59ff
QZ
7587 target = (int) (unsigned char) var_str[length - 2]
7588 - (int) (unsigned char) const_str[length - 2];
b2272b13
QZ
7589 if (target != 0)
7590 goto ne_label;
523a59ff
QZ
7591 target = (int) (unsigned char) var_str[length - 1]
7592 - (int) (unsigned char) const_str[length - 1];
b2272b13
QZ
7593 ne_label:
7594 */
7595
7596static rtx
10a0e2a9 7597inline_string_cmp (rtx target, tree var_str, const char *const_str,
b2272b13 7598 unsigned HOST_WIDE_INT length,
523a59ff 7599 int const_str_n, machine_mode mode)
b2272b13
QZ
7600{
7601 HOST_WIDE_INT offset = 0;
10a0e2a9 7602 rtx var_rtx_array
b2272b13
QZ
7603 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7604 rtx var_rtx = NULL_RTX;
10a0e2a9
JJ
7605 rtx const_rtx = NULL_RTX;
7606 rtx result = target ? target : gen_reg_rtx (mode);
7607 rtx_code_label *ne_label = gen_label_rtx ();
523a59ff 7608 tree unit_type_node = unsigned_char_type_node;
10a0e2a9
JJ
7609 scalar_int_mode unit_mode
7610 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
b2272b13
QZ
7611
7612 start_sequence ();
7613
7614 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7615 {
10a0e2a9 7616 var_rtx
b2272b13 7617 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
10a0e2a9 7618 const_rtx = c_readstr (const_str + offset, unit_mode);
b2272b13
QZ
7619 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7620 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
10a0e2a9 7621
523a59ff
QZ
7622 op0 = convert_modes (mode, unit_mode, op0, 1);
7623 op1 = convert_modes (mode, unit_mode, op1, 1);
10a0e2a9 7624 result = expand_simple_binop (mode, MINUS, op0, op1,
523a59ff 7625 result, 1, OPTAB_WIDEN);
10a0e2a9
JJ
7626 if (i < length - 1)
7627 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7628 mode, true, ne_label);
7629 offset += GET_MODE_SIZE (unit_mode);
b2272b13
QZ
7630 }
7631
7632 emit_label (ne_label);
7633 rtx_insn *insns = get_insns ();
7634 end_sequence ();
7635 emit_insn (insns);
7636
7637 return result;
7638}
7639
10a0e2a9 7640/* Inline expansion a call to str(n)cmp, with result going to
b2272b13
QZ
7641 TARGET if that's convenient.
7642 If the call is not been inlined, return NULL_RTX. */
7643static rtx
523a59ff 7644inline_expand_builtin_string_cmp (tree exp, rtx target)
b2272b13
QZ
7645{
7646 tree fndecl = get_callee_fndecl (exp);
7647 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7648 unsigned HOST_WIDE_INT length = 0;
7649 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7650
3d592d2d
QZ
7651 /* Do NOT apply this inlining expansion when optimizing for size or
7652 optimization level below 2. */
7653 if (optimize < 2 || optimize_insn_for_size_p ())
7654 return NULL_RTX;
7655
b2272b13 7656 gcc_checking_assert (fcode == BUILT_IN_STRCMP
10a0e2a9 7657 || fcode == BUILT_IN_STRNCMP
b2272b13
QZ
7658 || fcode == BUILT_IN_MEMCMP);
7659
523a59ff
QZ
7660 /* On a target where the type of the call (int) has same or narrower presicion
7661 than unsigned char, give up the inlining expansion. */
7662 if (TYPE_PRECISION (unsigned_char_type_node)
7663 >= TYPE_PRECISION (TREE_TYPE (exp)))
7664 return NULL_RTX;
7665
b2272b13
QZ
7666 tree arg1 = CALL_EXPR_ARG (exp, 0);
7667 tree arg2 = CALL_EXPR_ARG (exp, 1);
7668 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7669
7670 unsigned HOST_WIDE_INT len1 = 0;
7671 unsigned HOST_WIDE_INT len2 = 0;
7672 unsigned HOST_WIDE_INT len3 = 0;
7673
7674 const char *src_str1 = c_getstr (arg1, &len1);
7675 const char *src_str2 = c_getstr (arg2, &len2);
10a0e2a9 7676
b2272b13
QZ
7677 /* If neither strings is constant string, the call is not qualify. */
7678 if (!src_str1 && !src_str2)
7679 return NULL_RTX;
7680
7681 /* For strncmp, if the length is not a const, not qualify. */
6aa2e42c
ML
7682 if (is_ncmp)
7683 {
7684 if (!tree_fits_uhwi_p (len3_tree))
7685 return NULL_RTX;
7686 else
7687 len3 = tree_to_uhwi (len3_tree);
7688 }
7689
7690 if (src_str1 != NULL)
7691 len1 = strnlen (src_str1, len1) + 1;
7692
7693 if (src_str2 != NULL)
7694 len2 = strnlen (src_str2, len2) + 1;
b2272b13
QZ
7695
7696 int const_str_n = 0;
7697 if (!len1)
7698 const_str_n = 2;
7699 else if (!len2)
7700 const_str_n = 1;
7701 else if (len2 > len1)
7702 const_str_n = 1;
7703 else
7704 const_str_n = 2;
7705
7706 gcc_checking_assert (const_str_n > 0);
7707 length = (const_str_n == 1) ? len1 : len2;
7708
6aa2e42c 7709 if (is_ncmp && len3 < length)
b2272b13
QZ
7710 length = len3;
7711
10a0e2a9 7712 /* If the length of the comparision is larger than the threshold,
b2272b13 7713 do nothing. */
10a0e2a9 7714 if (length > (unsigned HOST_WIDE_INT)
028d4092 7715 param_builtin_string_cmp_inline_length)
b2272b13
QZ
7716 return NULL_RTX;
7717
7718 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7719
7720 /* Now, start inline expansion the call. */
10a0e2a9 7721 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
b2272b13 7722 (const_str_n == 1) ? src_str1 : src_str2, length,
523a59ff 7723 const_str_n, mode);
b2272b13
QZ
7724}
7725
425fc685
RE
7726/* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7727 represents the size of the first argument to that call, or VOIDmode
7728 if the argument is a pointer. IGNORE will be true if the result
7729 isn't used. */
7730static rtx
7731expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7732 bool ignore)
7733{
7734 rtx val, failsafe;
7735 unsigned nargs = call_expr_nargs (exp);
7736
7737 tree arg0 = CALL_EXPR_ARG (exp, 0);
7738
7739 if (mode == VOIDmode)
7740 {
7741 mode = TYPE_MODE (TREE_TYPE (arg0));
7742 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7743 }
7744
7745 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7746
7747 /* An optional second argument can be used as a failsafe value on
7748 some machines. If it isn't present, then the failsafe value is
7749 assumed to be 0. */
7750 if (nargs > 1)
7751 {
7752 tree arg1 = CALL_EXPR_ARG (exp, 1);
7753 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7754 }
7755 else
7756 failsafe = const0_rtx;
7757
7758 /* If the result isn't used, the behavior is undefined. It would be
7759 nice to emit a warning here, but path splitting means this might
7760 happen with legitimate code. So simply drop the builtin
7761 expansion in that case; we've handled any side-effects above. */
7762 if (ignore)
7763 return const0_rtx;
7764
7765 /* If we don't have a suitable target, create one to hold the result. */
7766 if (target == NULL || GET_MODE (target) != mode)
7767 target = gen_reg_rtx (mode);
7768
7769 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7770 val = convert_modes (mode, VOIDmode, val, false);
7771
7772 return targetm.speculation_safe_value (mode, target, val, failsafe);
7773}
7774
28f4ec01
BS
7775/* Expand an expression EXP that calls a built-in function,
7776 with result going to TARGET if that's convenient
7777 (and in mode MODE if that's convenient).
7778 SUBTARGET may be used as the target for computing one of EXP's operands.
7779 IGNORE is nonzero if the value is to be ignored. */
7780
7781rtx
ef4bddc2 7782expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
4682ae04 7783 int ignore)
28f4ec01 7784{
2f503025 7785 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 7786 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9e3920e9 7787 int flags;
28f4ec01 7788
d51151b2
JJ
7789 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7790 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7791
bdea98ca
MO
7792 /* When ASan is enabled, we don't want to expand some memory/string
7793 builtins and rely on libsanitizer's hooks. This allows us to avoid
7794 redundant checks and be sure, that possible overflow will be detected
7795 by ASan. */
7796
4d732405 7797 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
bdea98ca
MO
7798 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7799 return expand_call (exp, target, ignore);
7800
28f4ec01
BS
7801 /* When not optimizing, generate calls to library functions for a certain
7802 set of builtins. */
d25225de 7803 if (!optimize
48ae6c13 7804 && !called_as_built_in (fndecl)
63bf9a90
JH
7805 && fcode != BUILT_IN_FORK
7806 && fcode != BUILT_IN_EXECL
7807 && fcode != BUILT_IN_EXECV
7808 && fcode != BUILT_IN_EXECLP
7809 && fcode != BUILT_IN_EXECLE
7810 && fcode != BUILT_IN_EXECVP
7811 && fcode != BUILT_IN_EXECVE
9e878cf1 7812 && !ALLOCA_FUNCTION_CODE_P (fcode)
31db0fe0 7813 && fcode != BUILT_IN_FREE)
d25225de 7814 return expand_call (exp, target, ignore);
28f4ec01 7815
0a45ec5c
RS
7816 /* The built-in function expanders test for target == const0_rtx
7817 to determine whether the function's result will be ignored. */
7818 if (ignore)
7819 target = const0_rtx;
7820
7821 /* If the result of a pure or const built-in function is ignored, and
7822 none of its arguments are volatile, we can avoid expanding the
7823 built-in call and just evaluate the arguments for side-effects. */
7824 if (target == const0_rtx
9e3920e9
JJ
7825 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7826 && !(flags & ECF_LOOPING_CONST_OR_PURE))
0a45ec5c
RS
7827 {
7828 bool volatilep = false;
7829 tree arg;
5039610b 7830 call_expr_arg_iterator iter;
0a45ec5c 7831
5039610b
SL
7832 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7833 if (TREE_THIS_VOLATILE (arg))
0a45ec5c
RS
7834 {
7835 volatilep = true;
7836 break;
7837 }
7838
7839 if (! volatilep)
7840 {
5039610b
SL
7841 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7842 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a45ec5c
RS
7843 return const0_rtx;
7844 }
7845 }
7846
28f4ec01
BS
7847 switch (fcode)
7848 {
ea6a6627 7849 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 7850 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
7851 case BUILT_IN_FABSD32:
7852 case BUILT_IN_FABSD64:
7853 case BUILT_IN_FABSD128:
5039610b 7854 target = expand_builtin_fabs (exp, target, subtarget);
075ec276 7855 if (target)
c22cacf3 7856 return target;
075ec276
RS
7857 break;
7858
ea6a6627 7859 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6dc198e3 7860 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5039610b 7861 target = expand_builtin_copysign (exp, target, subtarget);
046625fa
RH
7862 if (target)
7863 return target;
7864 break;
7865
5906d013
EC
7866 /* Just do a normal library call if we were unable to fold
7867 the values. */
ea6a6627 7868 CASE_FLT_FN (BUILT_IN_CABS):
075ec276 7869 break;
28f4ec01 7870
1b1562a5 7871 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 7872 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
7873 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7874 if (target)
7875 return target;
7876 break;
7877
eaee4464
UB
7878 CASE_FLT_FN (BUILT_IN_ILOGB):
7879 if (! flag_unsafe_math_optimizations)
7880 break;
903c723b
TC
7881 gcc_fallthrough ();
7882 CASE_FLT_FN (BUILT_IN_ISINF):
7883 CASE_FLT_FN (BUILT_IN_FINITE):
7884 case BUILT_IN_ISFINITE:
7885 case BUILT_IN_ISNORMAL:
4359dc2a 7886 target = expand_builtin_interclass_mathfn (exp, target);
eaee4464
UB
7887 if (target)
7888 return target;
7889 break;
7890
6c32ee74 7891 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
7892 CASE_FLT_FN (BUILT_IN_LCEIL):
7893 CASE_FLT_FN (BUILT_IN_LLCEIL):
7894 CASE_FLT_FN (BUILT_IN_LFLOOR):
6c32ee74 7895 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627 7896 CASE_FLT_FN (BUILT_IN_LLFLOOR):
1856c8dc 7897 target = expand_builtin_int_roundingfn (exp, target);
d8b42d06
UB
7898 if (target)
7899 return target;
7900 break;
7901
6c32ee74 7902 CASE_FLT_FN (BUILT_IN_IRINT):
0bfa1541
RG
7903 CASE_FLT_FN (BUILT_IN_LRINT):
7904 CASE_FLT_FN (BUILT_IN_LLRINT):
6c32ee74 7905 CASE_FLT_FN (BUILT_IN_IROUND):
4d81bf84
RG
7906 CASE_FLT_FN (BUILT_IN_LROUND):
7907 CASE_FLT_FN (BUILT_IN_LLROUND):
1856c8dc 7908 target = expand_builtin_int_roundingfn_2 (exp, target);
0bfa1541
RG
7909 if (target)
7910 return target;
7911 break;
7912
ea6a6627 7913 CASE_FLT_FN (BUILT_IN_POWI):
4359dc2a 7914 target = expand_builtin_powi (exp, target);
17684d46
RG
7915 if (target)
7916 return target;
7917 break;
7918
75c7c595 7919 CASE_FLT_FN (BUILT_IN_CEXPI):
4359dc2a 7920 target = expand_builtin_cexpi (exp, target);
75c7c595
RG
7921 gcc_assert (target);
7922 return target;
7923
ea6a6627
VR
7924 CASE_FLT_FN (BUILT_IN_SIN):
7925 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
7926 if (! flag_unsafe_math_optimizations)
7927 break;
7928 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7929 if (target)
7930 return target;
7931 break;
7932
403e54f0
RG
7933 CASE_FLT_FN (BUILT_IN_SINCOS):
7934 if (! flag_unsafe_math_optimizations)
7935 break;
7936 target = expand_builtin_sincos (exp);
7937 if (target)
7938 return target;
7939 break;
7940
28f4ec01
BS
7941 case BUILT_IN_APPLY_ARGS:
7942 return expand_builtin_apply_args ();
7943
7944 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7945 FUNCTION with a copy of the parameters described by
7946 ARGUMENTS, and ARGSIZE. It returns a block of memory
7947 allocated on the stack into which is stored all the registers
7948 that might possibly be used for returning the result of a
7949 function. ARGUMENTS is the value returned by
7950 __builtin_apply_args. ARGSIZE is the number of bytes of
7951 arguments that must be copied. ??? How should this value be
7952 computed? We'll also need a safe worst case value for varargs
7953 functions. */
7954 case BUILT_IN_APPLY:
5039610b 7955 if (!validate_arglist (exp, POINTER_TYPE,
019fa094 7956 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5039610b 7957 && !validate_arglist (exp, REFERENCE_TYPE,
019fa094 7958 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01
BS
7959 return const0_rtx;
7960 else
7961 {
28f4ec01
BS
7962 rtx ops[3];
7963
5039610b
SL
7964 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7965 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7966 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
28f4ec01
BS
7967
7968 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7969 }
7970
7971 /* __builtin_return (RESULT) causes the function to return the
7972 value described by RESULT. RESULT is address of the block of
7973 memory returned by __builtin_apply. */
7974 case BUILT_IN_RETURN:
5039610b
SL
7975 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7976 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
28f4ec01
BS
7977 return const0_rtx;
7978
7979 case BUILT_IN_SAVEREGS:
d3707adb 7980 return expand_builtin_saveregs ();
28f4ec01 7981
6ef5231b
JJ
7982 case BUILT_IN_VA_ARG_PACK:
7983 /* All valid uses of __builtin_va_arg_pack () are removed during
7984 inlining. */
c94ed7a1 7985 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6ef5231b
JJ
7986 return const0_rtx;
7987
ab0e176c
JJ
7988 case BUILT_IN_VA_ARG_PACK_LEN:
7989 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7990 inlining. */
c94ed7a1 7991 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
ab0e176c
JJ
7992 return const0_rtx;
7993
28f4ec01
BS
7994 /* Return the address of the first anonymous stack arg. */
7995 case BUILT_IN_NEXT_ARG:
5039610b 7996 if (fold_builtin_next_arg (exp, false))
c22cacf3 7997 return const0_rtx;
8870e212 7998 return expand_builtin_next_arg ();
28f4ec01 7999
677feb77
DD
8000 case BUILT_IN_CLEAR_CACHE:
8001 target = expand_builtin___clear_cache (exp);
8002 if (target)
8003 return target;
8004 break;
8005
28f4ec01 8006 case BUILT_IN_CLASSIFY_TYPE:
5039610b 8007 return expand_builtin_classify_type (exp);
28f4ec01
BS
8008
8009 case BUILT_IN_CONSTANT_P:
6de9cd9a 8010 return const0_rtx;
28f4ec01
BS
8011
8012 case BUILT_IN_FRAME_ADDRESS:
8013 case BUILT_IN_RETURN_ADDRESS:
5039610b 8014 return expand_builtin_frame_address (fndecl, exp);
28f4ec01
BS
8015
8016 /* Returns the address of the area where the structure is returned.
8017 0 otherwise. */
8018 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5039610b 8019 if (call_expr_nargs (exp) != 0
ca7fd9cd 8020 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3c0cb5de 8021 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
ca7fd9cd 8022 return const0_rtx;
28f4ec01 8023 else
ca7fd9cd 8024 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
28f4ec01 8025
9e878cf1 8026 CASE_BUILT_IN_ALLOCA:
b7e52782 8027 target = expand_builtin_alloca (exp);
28f4ec01
BS
8028 if (target)
8029 return target;
8030 break;
8031
e3174bdf
MO
8032 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8033 return expand_asan_emit_allocas_unpoison (exp);
8034
6de9cd9a
DN
8035 case BUILT_IN_STACK_SAVE:
8036 return expand_stack_save ();
8037
8038 case BUILT_IN_STACK_RESTORE:
5039610b 8039 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6de9cd9a
DN
8040 return const0_rtx;
8041
ac868f29 8042 case BUILT_IN_BSWAP16:
167fa32c
EC
8043 case BUILT_IN_BSWAP32:
8044 case BUILT_IN_BSWAP64:
fe7ebef7 8045 case BUILT_IN_BSWAP128:
ac868f29 8046 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
167fa32c
EC
8047 if (target)
8048 return target;
8049 break;
8050
ea6a6627 8051 CASE_INT_FN (BUILT_IN_FFS):
5039610b 8052 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 8053 subtarget, ffs_optab);
2928cd7a
RH
8054 if (target)
8055 return target;
8056 break;
8057
ea6a6627 8058 CASE_INT_FN (BUILT_IN_CLZ):
5039610b 8059 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 8060 subtarget, clz_optab);
2928cd7a
RH
8061 if (target)
8062 return target;
8063 break;
8064
ea6a6627 8065 CASE_INT_FN (BUILT_IN_CTZ):
5039610b 8066 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 8067 subtarget, ctz_optab);
2928cd7a
RH
8068 if (target)
8069 return target;
8070 break;
8071
3801c801 8072 CASE_INT_FN (BUILT_IN_CLRSB):
3801c801
BS
8073 target = expand_builtin_unop (target_mode, exp, target,
8074 subtarget, clrsb_optab);
8075 if (target)
8076 return target;
8077 break;
8078
ea6a6627 8079 CASE_INT_FN (BUILT_IN_POPCOUNT):
5039610b 8080 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 8081 subtarget, popcount_optab);
2928cd7a
RH
8082 if (target)
8083 return target;
8084 break;
8085
ea6a6627 8086 CASE_INT_FN (BUILT_IN_PARITY):
5039610b 8087 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 8088 subtarget, parity_optab);
28f4ec01
BS
8089 if (target)
8090 return target;
8091 break;
8092
8093 case BUILT_IN_STRLEN:
5039610b 8094 target = expand_builtin_strlen (exp, target, target_mode);
28f4ec01
BS
8095 if (target)
8096 return target;
8097 break;
8098
781ff3d8
MS
8099 case BUILT_IN_STRNLEN:
8100 target = expand_builtin_strnlen (exp, target, target_mode);
8101 if (target)
8102 return target;
8103 break;
8104
ee92e7ba 8105 case BUILT_IN_STRCAT:
b5338fb3 8106 target = expand_builtin_strcat (exp);
ee92e7ba
MS
8107 if (target)
8108 return target;
8109 break;
8110
b5338fb3
MS
8111 case BUILT_IN_GETTEXT:
8112 case BUILT_IN_PUTS:
8113 case BUILT_IN_PUTS_UNLOCKED:
8114 case BUILT_IN_STRDUP:
8115 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8116 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8117 break;
8118
8119 case BUILT_IN_INDEX:
8120 case BUILT_IN_RINDEX:
8121 case BUILT_IN_STRCHR:
8122 case BUILT_IN_STRRCHR:
8123 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8124 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8125 break;
8126
8127 case BUILT_IN_FPUTS:
8128 case BUILT_IN_FPUTS_UNLOCKED:
8129 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8130 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8131 break;
8132
8133 case BUILT_IN_STRNDUP:
8134 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8135 check_nul_terminated_array (exp,
8136 CALL_EXPR_ARG (exp, 0),
8137 CALL_EXPR_ARG (exp, 1));
8138 break;
8139
8140 case BUILT_IN_STRCASECMP:
8141 case BUILT_IN_STRSTR:
8142 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8143 {
8144 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8145 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 1));
8146 }
8147 break;
8148
28f4ec01 8149 case BUILT_IN_STRCPY:
44e10129 8150 target = expand_builtin_strcpy (exp, target);
28f4ec01
BS
8151 if (target)
8152 return target;
8153 break;
8d51ecf8 8154
ee92e7ba
MS
8155 case BUILT_IN_STRNCAT:
8156 target = expand_builtin_strncat (exp, target);
8157 if (target)
8158 return target;
8159 break;
8160
da9e9f08 8161 case BUILT_IN_STRNCPY:
44e10129 8162 target = expand_builtin_strncpy (exp, target);
da9e9f08
KG
8163 if (target)
8164 return target;
8165 break;
8d51ecf8 8166
9cb65f92 8167 case BUILT_IN_STPCPY:
609ae0e2 8168 target = expand_builtin_stpcpy (exp, target, mode);
9cb65f92
KG
8169 if (target)
8170 return target;
8171 break;
8172
e50d56a5
MS
8173 case BUILT_IN_STPNCPY:
8174 target = expand_builtin_stpncpy (exp, target);
8175 if (target)
8176 return target;
8177 break;
8178
d9c5a8b9
MS
8179 case BUILT_IN_MEMCHR:
8180 target = expand_builtin_memchr (exp, target);
8181 if (target)
8182 return target;
8183 break;
8184
28f4ec01 8185 case BUILT_IN_MEMCPY:
44e10129 8186 target = expand_builtin_memcpy (exp, target);
9cb65f92
KG
8187 if (target)
8188 return target;
8189 break;
8190
e50d56a5
MS
8191 case BUILT_IN_MEMMOVE:
8192 target = expand_builtin_memmove (exp, target);
8193 if (target)
8194 return target;
8195 break;
8196
9cb65f92 8197 case BUILT_IN_MEMPCPY:
671a00ee 8198 target = expand_builtin_mempcpy (exp, target);
28f4ec01
BS
8199 if (target)
8200 return target;
8201 break;
8202
8203 case BUILT_IN_MEMSET:
5039610b 8204 target = expand_builtin_memset (exp, target, mode);
28f4ec01
BS
8205 if (target)
8206 return target;
8207 break;
8208
e3a709be 8209 case BUILT_IN_BZERO:
8148fe65 8210 target = expand_builtin_bzero (exp);
e3a709be
KG
8211 if (target)
8212 return target;
8213 break;
8214
10a0e2a9 8215 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8b0b334a
QZ
8216 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8217 when changing it to a strcmp call. */
8218 case BUILT_IN_STRCMP_EQ:
8219 target = expand_builtin_memcmp (exp, target, true);
8220 if (target)
8221 return target;
8222
8223 /* Change this call back to a BUILT_IN_STRCMP. */
10a0e2a9 8224 TREE_OPERAND (exp, 1)
8b0b334a
QZ
8225 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8226
8227 /* Delete the last parameter. */
8228 unsigned int i;
8229 vec<tree, va_gc> *arg_vec;
8230 vec_alloc (arg_vec, 2);
8231 for (i = 0; i < 2; i++)
8232 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8233 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8234 /* FALLTHROUGH */
8235
28f4ec01 8236 case BUILT_IN_STRCMP:
44e10129 8237 target = expand_builtin_strcmp (exp, target);
28f4ec01
BS
8238 if (target)
8239 return target;
8240 break;
8241
8b0b334a
QZ
8242 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8243 back to a BUILT_IN_STRNCMP. */
8244 case BUILT_IN_STRNCMP_EQ:
8245 target = expand_builtin_memcmp (exp, target, true);
8246 if (target)
8247 return target;
8248
8249 /* Change it back to a BUILT_IN_STRNCMP. */
10a0e2a9 8250 TREE_OPERAND (exp, 1)
8b0b334a
QZ
8251 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8252 /* FALLTHROUGH */
8253
da9e9f08
KG
8254 case BUILT_IN_STRNCMP:
8255 target = expand_builtin_strncmp (exp, target, mode);
8256 if (target)
8257 return target;
8258 break;
8259
4b2a62db 8260 case BUILT_IN_BCMP:
28f4ec01 8261 case BUILT_IN_MEMCMP:
36b85e43
BS
8262 case BUILT_IN_MEMCMP_EQ:
8263 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
28f4ec01
BS
8264 if (target)
8265 return target;
36b85e43
BS
8266 if (fcode == BUILT_IN_MEMCMP_EQ)
8267 {
8268 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8269 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8270 }
28f4ec01 8271 break;
28f4ec01
BS
8272
8273 case BUILT_IN_SETJMP:
903c723b 8274 /* This should have been lowered to the builtins below. */
4f6c2131
EB
8275 gcc_unreachable ();
8276
8277 case BUILT_IN_SETJMP_SETUP:
8278 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8279 and the receiver label. */
5039610b 8280 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4f6c2131 8281 {
5039610b 8282 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4f6c2131 8283 VOIDmode, EXPAND_NORMAL);
5039610b 8284 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
e67d1102 8285 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
8286
8287 /* This is copied from the handling of non-local gotos. */
8288 expand_builtin_setjmp_setup (buf_addr, label_r);
8289 nonlocal_goto_handler_labels
b5241a5a 8290 = gen_rtx_INSN_LIST (VOIDmode, label_r,
4f6c2131
EB
8291 nonlocal_goto_handler_labels);
8292 /* ??? Do not let expand_label treat us as such since we would
8293 not want to be both on the list of non-local labels and on
8294 the list of forced labels. */
8295 FORCED_LABEL (label) = 0;
8296 return const0_rtx;
8297 }
8298 break;
8299
4f6c2131
EB
8300 case BUILT_IN_SETJMP_RECEIVER:
8301 /* __builtin_setjmp_receiver is passed the receiver label. */
5039610b 8302 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4f6c2131 8303 {
5039610b 8304 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
e67d1102 8305 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
8306
8307 expand_builtin_setjmp_receiver (label_r);
8308 return const0_rtx;
8309 }
250d07b6 8310 break;
28f4ec01
BS
8311
8312 /* __builtin_longjmp is passed a pointer to an array of five words.
8313 It's similar to the C library longjmp function but works with
8314 __builtin_setjmp above. */
8315 case BUILT_IN_LONGJMP:
5039610b 8316 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01 8317 {
5039610b 8318 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
84217346 8319 VOIDmode, EXPAND_NORMAL);
5039610b 8320 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
28f4ec01
BS
8321
8322 if (value != const1_rtx)
8323 {
9e637a26 8324 error ("%<__builtin_longjmp%> second argument must be 1");
28f4ec01
BS
8325 return const0_rtx;
8326 }
8327
8328 expand_builtin_longjmp (buf_addr, value);
8329 return const0_rtx;
8330 }
4f6c2131 8331 break;
28f4ec01 8332
6de9cd9a 8333 case BUILT_IN_NONLOCAL_GOTO:
5039610b 8334 target = expand_builtin_nonlocal_goto (exp);
6de9cd9a
DN
8335 if (target)
8336 return target;
8337 break;
8338
2b92e7f5
RK
8339 /* This updates the setjmp buffer that is its argument with the value
8340 of the current stack pointer. */
8341 case BUILT_IN_UPDATE_SETJMP_BUF:
5039610b 8342 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2b92e7f5
RK
8343 {
8344 rtx buf_addr
5039610b 8345 = expand_normal (CALL_EXPR_ARG (exp, 0));
2b92e7f5
RK
8346
8347 expand_builtin_update_setjmp_buf (buf_addr);
8348 return const0_rtx;
8349 }
8350 break;
8351
28f4ec01 8352 case BUILT_IN_TRAP:
9602f5a0 8353 expand_builtin_trap ();
28f4ec01
BS
8354 return const0_rtx;
8355
468059bc
DD
8356 case BUILT_IN_UNREACHABLE:
8357 expand_builtin_unreachable ();
8358 return const0_rtx;
8359
ea6a6627 8360 CASE_FLT_FN (BUILT_IN_SIGNBIT):
44aea9ac
JJ
8361 case BUILT_IN_SIGNBITD32:
8362 case BUILT_IN_SIGNBITD64:
8363 case BUILT_IN_SIGNBITD128:
ef79730c
RS
8364 target = expand_builtin_signbit (exp, target);
8365 if (target)
8366 return target;
8367 break;
8368
28f4ec01
BS
8369 /* Various hooks for the DWARF 2 __throw routine. */
8370 case BUILT_IN_UNWIND_INIT:
8371 expand_builtin_unwind_init ();
8372 return const0_rtx;
8373 case BUILT_IN_DWARF_CFA:
8374 return virtual_cfa_rtx;
8375#ifdef DWARF2_UNWIND_INFO
9c80ff25
RH
8376 case BUILT_IN_DWARF_SP_COLUMN:
8377 return expand_builtin_dwarf_sp_column ();
d9d5c9de 8378 case BUILT_IN_INIT_DWARF_REG_SIZES:
5039610b 8379 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
d9d5c9de 8380 return const0_rtx;
28f4ec01
BS
8381#endif
8382 case BUILT_IN_FROB_RETURN_ADDR:
5039610b 8383 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 8384 case BUILT_IN_EXTRACT_RETURN_ADDR:
5039610b 8385 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 8386 case BUILT_IN_EH_RETURN:
5039610b
SL
8387 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8388 CALL_EXPR_ARG (exp, 1));
28f4ec01 8389 return const0_rtx;
52a11cbf 8390 case BUILT_IN_EH_RETURN_DATA_REGNO:
5039610b 8391 return expand_builtin_eh_return_data_regno (exp);
c76362b4 8392 case BUILT_IN_EXTEND_POINTER:
5039610b 8393 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
1d65f45c
RH
8394 case BUILT_IN_EH_POINTER:
8395 return expand_builtin_eh_pointer (exp);
8396 case BUILT_IN_EH_FILTER:
8397 return expand_builtin_eh_filter (exp);
8398 case BUILT_IN_EH_COPY_VALUES:
8399 return expand_builtin_eh_copy_values (exp);
c76362b4 8400
6c535c69 8401 case BUILT_IN_VA_START:
5039610b 8402 return expand_builtin_va_start (exp);
d3707adb 8403 case BUILT_IN_VA_END:
5039610b 8404 return expand_builtin_va_end (exp);
d3707adb 8405 case BUILT_IN_VA_COPY:
5039610b 8406 return expand_builtin_va_copy (exp);
994a57cd 8407 case BUILT_IN_EXPECT:
5039610b 8408 return expand_builtin_expect (exp, target);
1e9168b2
ML
8409 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8410 return expand_builtin_expect_with_probability (exp, target);
45d439ac
JJ
8411 case BUILT_IN_ASSUME_ALIGNED:
8412 return expand_builtin_assume_aligned (exp, target);
a9ccbb60 8413 case BUILT_IN_PREFETCH:
5039610b 8414 expand_builtin_prefetch (exp);
a9ccbb60
JJ
8415 return const0_rtx;
8416
6de9cd9a 8417 case BUILT_IN_INIT_TRAMPOLINE:
183dd130
ILT
8418 return expand_builtin_init_trampoline (exp, true);
8419 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8420 return expand_builtin_init_trampoline (exp, false);
6de9cd9a 8421 case BUILT_IN_ADJUST_TRAMPOLINE:
5039610b 8422 return expand_builtin_adjust_trampoline (exp);
6de9cd9a 8423
4c640e26
EB
8424 case BUILT_IN_INIT_DESCRIPTOR:
8425 return expand_builtin_init_descriptor (exp);
8426 case BUILT_IN_ADJUST_DESCRIPTOR:
8427 return expand_builtin_adjust_descriptor (exp);
8428
d1c38823
ZD
8429 case BUILT_IN_FORK:
8430 case BUILT_IN_EXECL:
8431 case BUILT_IN_EXECV:
8432 case BUILT_IN_EXECLP:
8433 case BUILT_IN_EXECLE:
8434 case BUILT_IN_EXECVP:
8435 case BUILT_IN_EXECVE:
5039610b 8436 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
d1c38823
ZD
8437 if (target)
8438 return target;
8439 break;
28f4ec01 8440
e0a8ecf2
AM
8441 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8442 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8443 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8444 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8445 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8446 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
86951993 8447 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
48ae6c13
RH
8448 if (target)
8449 return target;
8450 break;
8451
e0a8ecf2
AM
8452 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8453 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8454 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8455 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8456 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8457 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
86951993 8458 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
48ae6c13
RH
8459 if (target)
8460 return target;
8461 break;
8462
e0a8ecf2
AM
8463 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8464 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8465 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8466 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8467 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8468 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
86951993 8469 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
48ae6c13
RH
8470 if (target)
8471 return target;
8472 break;
8473
e0a8ecf2
AM
8474 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8475 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8476 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8477 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8478 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8479 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
86951993 8480 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
48ae6c13
RH
8481 if (target)
8482 return target;
8483 break;
8484
e0a8ecf2
AM
8485 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8486 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8487 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8488 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8489 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8490 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
86951993 8491 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
48ae6c13
RH
8492 if (target)
8493 return target;
8494 break;
8495
e0a8ecf2
AM
8496 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8497 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8498 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8499 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8500 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8501 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
86951993 8502 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
48ae6c13
RH
8503 if (target)
8504 return target;
8505 break;
8506
e0a8ecf2
AM
8507 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8508 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8509 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8510 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8511 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8512 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
86951993 8513 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
48ae6c13
RH
8514 if (target)
8515 return target;
8516 break;
8517
e0a8ecf2
AM
8518 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8519 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8520 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8521 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8522 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8523 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
86951993 8524 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
48ae6c13
RH
8525 if (target)
8526 return target;
8527 break;
8528
e0a8ecf2
AM
8529 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8530 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8531 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8532 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8533 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8534 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
86951993 8535 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
48ae6c13
RH
8536 if (target)
8537 return target;
8538 break;
8539
e0a8ecf2
AM
8540 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8541 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8542 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8543 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8544 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8545 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
86951993 8546 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
48ae6c13
RH
8547 if (target)
8548 return target;
8549 break;
8550
e0a8ecf2
AM
8551 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8552 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8553 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8554 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8555 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8556 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
86951993 8557 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
48ae6c13
RH
8558 if (target)
8559 return target;
8560 break;
8561
e0a8ecf2
AM
8562 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8563 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8564 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8565 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8566 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8567 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
86951993 8568 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
48ae6c13
RH
8569 if (target)
8570 return target;
8571 break;
8572
e0a8ecf2
AM
8573 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8574 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8575 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8576 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8577 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
5b5513d0
RH
8578 if (mode == VOIDmode)
8579 mode = TYPE_MODE (boolean_type_node);
48ae6c13
RH
8580 if (!target || !register_operand (target, mode))
8581 target = gen_reg_rtx (mode);
02ee605c 8582
e0a8ecf2
AM
8583 mode = get_builtin_sync_mode
8584 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
5039610b 8585 target = expand_builtin_compare_and_swap (mode, exp, true, target);
48ae6c13
RH
8586 if (target)
8587 return target;
8588 break;
8589
e0a8ecf2
AM
8590 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8591 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8592 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8593 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8594 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8595 mode = get_builtin_sync_mode
8596 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
5039610b 8597 target = expand_builtin_compare_and_swap (mode, exp, false, target);
48ae6c13
RH
8598 if (target)
8599 return target;
8600 break;
8601
e0a8ecf2
AM
8602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8603 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8604 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8605 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8606 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8607 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8608 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
48ae6c13
RH
8609 if (target)
8610 return target;
8611 break;
8612
e0a8ecf2
AM
8613 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8614 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8615 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8616 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8617 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8618 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8619 expand_builtin_sync_lock_release (mode, exp);
48ae6c13
RH
8620 return const0_rtx;
8621
e0a8ecf2
AM
8622 case BUILT_IN_SYNC_SYNCHRONIZE:
8623 expand_builtin_sync_synchronize ();
48ae6c13
RH
8624 return const0_rtx;
8625
86951993
AM
8626 case BUILT_IN_ATOMIC_EXCHANGE_1:
8627 case BUILT_IN_ATOMIC_EXCHANGE_2:
8628 case BUILT_IN_ATOMIC_EXCHANGE_4:
8629 case BUILT_IN_ATOMIC_EXCHANGE_8:
8630 case BUILT_IN_ATOMIC_EXCHANGE_16:
8631 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8632 target = expand_builtin_atomic_exchange (mode, exp, target);
8633 if (target)
8634 return target;
8635 break;
8636
8637 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8638 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8639 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8640 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8641 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
e351ae85
AM
8642 {
8643 unsigned int nargs, z;
9771b263 8644 vec<tree, va_gc> *vec;
e351ae85
AM
8645
8646 mode =
8647 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8648 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8649 if (target)
8650 return target;
8651
8652 /* If this is turned into an external library call, the weak parameter
8653 must be dropped to match the expected parameter list. */
8654 nargs = call_expr_nargs (exp);
9771b263 8655 vec_alloc (vec, nargs - 1);
e351ae85 8656 for (z = 0; z < 3; z++)
9771b263 8657 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
8658 /* Skip the boolean weak parameter. */
8659 for (z = 4; z < 6; z++)
9771b263 8660 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
8661 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8662 break;
8663 }
86951993
AM
8664
8665 case BUILT_IN_ATOMIC_LOAD_1:
8666 case BUILT_IN_ATOMIC_LOAD_2:
8667 case BUILT_IN_ATOMIC_LOAD_4:
8668 case BUILT_IN_ATOMIC_LOAD_8:
8669 case BUILT_IN_ATOMIC_LOAD_16:
8670 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8671 target = expand_builtin_atomic_load (mode, exp, target);
8672 if (target)
8673 return target;
8674 break;
8675
8676 case BUILT_IN_ATOMIC_STORE_1:
8677 case BUILT_IN_ATOMIC_STORE_2:
8678 case BUILT_IN_ATOMIC_STORE_4:
8679 case BUILT_IN_ATOMIC_STORE_8:
8680 case BUILT_IN_ATOMIC_STORE_16:
8681 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8682 target = expand_builtin_atomic_store (mode, exp);
8683 if (target)
8684 return const0_rtx;
8685 break;
8686
8687 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8688 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8689 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8690 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8691 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8692 {
8693 enum built_in_function lib;
8694 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8695 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8696 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8697 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8698 ignore, lib);
8699 if (target)
8700 return target;
8701 break;
8702 }
8703 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8704 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8705 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8706 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8707 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8708 {
8709 enum built_in_function lib;
8710 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8711 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8712 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8713 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8714 ignore, lib);
8715 if (target)
8716 return target;
8717 break;
8718 }
8719 case BUILT_IN_ATOMIC_AND_FETCH_1:
8720 case BUILT_IN_ATOMIC_AND_FETCH_2:
8721 case BUILT_IN_ATOMIC_AND_FETCH_4:
8722 case BUILT_IN_ATOMIC_AND_FETCH_8:
8723 case BUILT_IN_ATOMIC_AND_FETCH_16:
8724 {
8725 enum built_in_function lib;
8726 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8727 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8728 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8729 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8730 ignore, lib);
8731 if (target)
8732 return target;
8733 break;
8734 }
8735 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8736 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8737 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8738 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8739 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8740 {
8741 enum built_in_function lib;
8742 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8743 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8744 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8745 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8746 ignore, lib);
8747 if (target)
8748 return target;
8749 break;
8750 }
8751 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8752 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8753 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8754 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8755 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8756 {
8757 enum built_in_function lib;
8758 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8759 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8760 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8761 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8762 ignore, lib);
8763 if (target)
8764 return target;
8765 break;
8766 }
8767 case BUILT_IN_ATOMIC_OR_FETCH_1:
8768 case BUILT_IN_ATOMIC_OR_FETCH_2:
8769 case BUILT_IN_ATOMIC_OR_FETCH_4:
8770 case BUILT_IN_ATOMIC_OR_FETCH_8:
8771 case BUILT_IN_ATOMIC_OR_FETCH_16:
8772 {
8773 enum built_in_function lib;
8774 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8775 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8776 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8777 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8778 ignore, lib);
8779 if (target)
8780 return target;
8781 break;
8782 }
8783 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8784 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8785 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8786 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8787 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8788 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8789 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8790 ignore, BUILT_IN_NONE);
8791 if (target)
8792 return target;
8793 break;
8794
8795 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8796 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8797 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8798 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8799 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8800 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8801 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8802 ignore, BUILT_IN_NONE);
8803 if (target)
8804 return target;
8805 break;
8806
8807 case BUILT_IN_ATOMIC_FETCH_AND_1:
8808 case BUILT_IN_ATOMIC_FETCH_AND_2:
8809 case BUILT_IN_ATOMIC_FETCH_AND_4:
8810 case BUILT_IN_ATOMIC_FETCH_AND_8:
8811 case BUILT_IN_ATOMIC_FETCH_AND_16:
8812 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8813 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8814 ignore, BUILT_IN_NONE);
8815 if (target)
8816 return target;
8817 break;
8818
8819 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8820 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8821 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8822 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8823 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8824 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8825 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8826 ignore, BUILT_IN_NONE);
8827 if (target)
8828 return target;
8829 break;
8830
8831 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8832 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8833 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8834 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8835 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8836 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8837 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8838 ignore, BUILT_IN_NONE);
8839 if (target)
8840 return target;
8841 break;
8842
8843 case BUILT_IN_ATOMIC_FETCH_OR_1:
8844 case BUILT_IN_ATOMIC_FETCH_OR_2:
8845 case BUILT_IN_ATOMIC_FETCH_OR_4:
8846 case BUILT_IN_ATOMIC_FETCH_OR_8:
8847 case BUILT_IN_ATOMIC_FETCH_OR_16:
8848 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8849 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8850 ignore, BUILT_IN_NONE);
8851 if (target)
8852 return target;
8853 break;
d660c35e
AM
8854
8855 case BUILT_IN_ATOMIC_TEST_AND_SET:
744accb2 8856 return expand_builtin_atomic_test_and_set (exp, target);
d660c35e
AM
8857
8858 case BUILT_IN_ATOMIC_CLEAR:
8859 return expand_builtin_atomic_clear (exp);
86951993
AM
8860
8861 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8862 return expand_builtin_atomic_always_lock_free (exp);
8863
8864 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8865 target = expand_builtin_atomic_is_lock_free (exp);
8866 if (target)
8867 return target;
8868 break;
8869
8870 case BUILT_IN_ATOMIC_THREAD_FENCE:
8871 expand_builtin_atomic_thread_fence (exp);
8872 return const0_rtx;
8873
8874 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8875 expand_builtin_atomic_signal_fence (exp);
8876 return const0_rtx;
8877
10a0d495
JJ
8878 case BUILT_IN_OBJECT_SIZE:
8879 return expand_builtin_object_size (exp);
8880
8881 case BUILT_IN_MEMCPY_CHK:
8882 case BUILT_IN_MEMPCPY_CHK:
8883 case BUILT_IN_MEMMOVE_CHK:
8884 case BUILT_IN_MEMSET_CHK:
8885 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8886 if (target)
8887 return target;
8888 break;
8889
8890 case BUILT_IN_STRCPY_CHK:
8891 case BUILT_IN_STPCPY_CHK:
8892 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 8893 case BUILT_IN_STPNCPY_CHK:
10a0d495 8894 case BUILT_IN_STRCAT_CHK:
1c2fc017 8895 case BUILT_IN_STRNCAT_CHK:
10a0d495
JJ
8896 case BUILT_IN_SNPRINTF_CHK:
8897 case BUILT_IN_VSNPRINTF_CHK:
8898 maybe_emit_chk_warning (exp, fcode);
8899 break;
8900
8901 case BUILT_IN_SPRINTF_CHK:
8902 case BUILT_IN_VSPRINTF_CHK:
8903 maybe_emit_sprintf_chk_warning (exp, fcode);
8904 break;
8905
f9555f40 8906 case BUILT_IN_FREE:
a3a704a4
MH
8907 if (warn_free_nonheap_object)
8908 maybe_emit_free_warning (exp);
f9555f40
JJ
8909 break;
8910
f959607b
CLT
8911 case BUILT_IN_THREAD_POINTER:
8912 return expand_builtin_thread_pointer (exp, target);
8913
8914 case BUILT_IN_SET_THREAD_POINTER:
8915 expand_builtin_set_thread_pointer (exp);
8916 return const0_rtx;
8917
41dbbb37 8918 case BUILT_IN_ACC_ON_DEVICE:
164453bb
NS
8919 /* Do library call, if we failed to expand the builtin when
8920 folding. */
41dbbb37
TS
8921 break;
8922
1f62d637
TV
8923 case BUILT_IN_GOACC_PARLEVEL_ID:
8924 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8925 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8926
425fc685
RE
8927 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8928 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8929
8930 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8931 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8932 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8933 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8934 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8935 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8936 return expand_speculation_safe_value (mode, exp, target, ignore);
8937
e62f4abc 8938 default: /* just do library call, if unknown builtin */
84b8b0e0 8939 break;
28f4ec01
BS
8940 }
8941
8942 /* The switch statement above can drop through to cause the function
8943 to be called normally. */
8944 return expand_call (exp, target, ignore);
8945}
b0b3afb2 8946
4977bab6 8947/* Determine whether a tree node represents a call to a built-in
feda1845
RS
8948 function. If the tree T is a call to a built-in function with
8949 the right number of arguments of the appropriate types, return
8950 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8951 Otherwise the return value is END_BUILTINS. */
4682ae04 8952
4977bab6 8953enum built_in_function
fa233e34 8954builtin_mathfn_code (const_tree t)
4977bab6 8955{
fa233e34
KG
8956 const_tree fndecl, arg, parmlist;
8957 const_tree argtype, parmtype;
8958 const_call_expr_arg_iterator iter;
4977bab6 8959
5f92d109 8960 if (TREE_CODE (t) != CALL_EXPR)
4977bab6
ZW
8961 return END_BUILTINS;
8962
2f503025 8963 fndecl = get_callee_fndecl (t);
3d78e008
ML
8964 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8965 return END_BUILTINS;
4977bab6 8966
feda1845 8967 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
fa233e34 8968 init_const_call_expr_arg_iterator (t, &iter);
feda1845 8969 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
c0a47a61 8970 {
feda1845
RS
8971 /* If a function doesn't take a variable number of arguments,
8972 the last element in the list will have type `void'. */
8973 parmtype = TREE_VALUE (parmlist);
8974 if (VOID_TYPE_P (parmtype))
8975 {
fa233e34 8976 if (more_const_call_expr_args_p (&iter))
feda1845
RS
8977 return END_BUILTINS;
8978 return DECL_FUNCTION_CODE (fndecl);
8979 }
8980
fa233e34 8981 if (! more_const_call_expr_args_p (&iter))
c0a47a61 8982 return END_BUILTINS;
b8698a0f 8983
fa233e34 8984 arg = next_const_call_expr_arg (&iter);
5039610b 8985 argtype = TREE_TYPE (arg);
feda1845
RS
8986
8987 if (SCALAR_FLOAT_TYPE_P (parmtype))
8988 {
8989 if (! SCALAR_FLOAT_TYPE_P (argtype))
8990 return END_BUILTINS;
8991 }
8992 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8993 {
8994 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8995 return END_BUILTINS;
8996 }
8997 else if (POINTER_TYPE_P (parmtype))
8998 {
8999 if (! POINTER_TYPE_P (argtype))
9000 return END_BUILTINS;
9001 }
9002 else if (INTEGRAL_TYPE_P (parmtype))
9003 {
9004 if (! INTEGRAL_TYPE_P (argtype))
9005 return END_BUILTINS;
9006 }
9007 else
c0a47a61 9008 return END_BUILTINS;
c0a47a61
RS
9009 }
9010
feda1845 9011 /* Variable-length argument list. */
4977bab6
ZW
9012 return DECL_FUNCTION_CODE (fndecl);
9013}
9014
5039610b
SL
9015/* Fold a call to __builtin_constant_p, if we know its argument ARG will
9016 evaluate to a constant. */
b0b3afb2
BS
9017
9018static tree
5039610b 9019fold_builtin_constant_p (tree arg)
b0b3afb2 9020{
b0b3afb2
BS
9021 /* We return 1 for a numeric type that's known to be a constant
9022 value at compile-time or for an aggregate type that's a
9023 literal constant. */
5039610b 9024 STRIP_NOPS (arg);
b0b3afb2
BS
9025
9026 /* If we know this is a constant, emit the constant of one. */
5039610b
SL
9027 if (CONSTANT_CLASS_P (arg)
9028 || (TREE_CODE (arg) == CONSTRUCTOR
9029 && TREE_CONSTANT (arg)))
b0b3afb2 9030 return integer_one_node;
5039610b 9031 if (TREE_CODE (arg) == ADDR_EXPR)
fb664a2c 9032 {
5039610b 9033 tree op = TREE_OPERAND (arg, 0);
fb664a2c
RG
9034 if (TREE_CODE (op) == STRING_CST
9035 || (TREE_CODE (op) == ARRAY_REF
9036 && integer_zerop (TREE_OPERAND (op, 1))
9037 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9038 return integer_one_node;
9039 }
b0b3afb2 9040
0dcd3840
RH
9041 /* If this expression has side effects, show we don't know it to be a
9042 constant. Likewise if it's a pointer or aggregate type since in
9043 those case we only want literals, since those are only optimized
13104975
ZW
9044 when generating RTL, not later.
9045 And finally, if we are compiling an initializer, not code, we
9046 need to return a definite result now; there's not going to be any
9047 more optimization done. */
5039610b
SL
9048 if (TREE_SIDE_EFFECTS (arg)
9049 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9050 || POINTER_TYPE_P (TREE_TYPE (arg))
63b48197 9051 || cfun == 0
4e7d7b3d
JJ
9052 || folding_initializer
9053 || force_folding_builtin_constant_p)
b0b3afb2
BS
9054 return integer_zero_node;
9055
5039610b 9056 return NULL_TREE;
b0b3afb2
BS
9057}
9058
1e9168b2
ML
9059/* Create builtin_expect or builtin_expect_with_probability
9060 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9061 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9062 builtin_expect_with_probability instead uses third argument as PROBABILITY
9063 value. */
6de9cd9a
DN
9064
9065static tree
ed9c79e1 9066build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
1e9168b2 9067 tree predictor, tree probability)
6de9cd9a 9068{
419ce103 9069 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6de9cd9a 9070
1e9168b2
ML
9071 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9072 : BUILT_IN_EXPECT_WITH_PROBABILITY);
419ce103
AN
9073 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9074 ret_type = TREE_TYPE (TREE_TYPE (fn));
9075 pred_type = TREE_VALUE (arg_types);
9076 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9077
db3927fb
AH
9078 pred = fold_convert_loc (loc, pred_type, pred);
9079 expected = fold_convert_loc (loc, expected_type, expected);
1e9168b2
ML
9080
9081 if (probability)
9082 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9083 else
9084 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9085 predictor);
419ce103
AN
9086
9087 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9088 build_int_cst (ret_type, 0));
9089}
9090
1e9168b2 9091/* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
419ce103
AN
9092 NULL_TREE if no simplification is possible. */
9093
ed9c79e1 9094tree
1e9168b2
ML
9095fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9096 tree arg3)
419ce103 9097{
be31603a 9098 tree inner, fndecl, inner_arg0;
419ce103
AN
9099 enum tree_code code;
9100
be31603a
KT
9101 /* Distribute the expected value over short-circuiting operators.
9102 See through the cast from truthvalue_type_node to long. */
9103 inner_arg0 = arg0;
625a9766 9104 while (CONVERT_EXPR_P (inner_arg0)
be31603a
KT
9105 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9106 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9107 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9108
419ce103
AN
9109 /* If this is a builtin_expect within a builtin_expect keep the
9110 inner one. See through a comparison against a constant. It
9111 might have been added to create a thruthvalue. */
be31603a
KT
9112 inner = inner_arg0;
9113
419ce103
AN
9114 if (COMPARISON_CLASS_P (inner)
9115 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9116 inner = TREE_OPERAND (inner, 0);
9117
9118 if (TREE_CODE (inner) == CALL_EXPR
9119 && (fndecl = get_callee_fndecl (inner))
3d78e008
ML
9120 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
9121 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
419ce103
AN
9122 return arg0;
9123
be31603a 9124 inner = inner_arg0;
419ce103
AN
9125 code = TREE_CODE (inner);
9126 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9127 {
9128 tree op0 = TREE_OPERAND (inner, 0);
9129 tree op1 = TREE_OPERAND (inner, 1);
0d2f7959 9130 arg1 = save_expr (arg1);
419ce103 9131
1e9168b2
ML
9132 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9133 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
419ce103
AN
9134 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9135
db3927fb 9136 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
419ce103
AN
9137 }
9138
9139 /* If the argument isn't invariant then there's nothing else we can do. */
be31603a 9140 if (!TREE_CONSTANT (inner_arg0))
5039610b 9141 return NULL_TREE;
6de9cd9a 9142
419ce103
AN
9143 /* If we expect that a comparison against the argument will fold to
9144 a constant return the constant. In practice, this means a true
9145 constant or the address of a non-weak symbol. */
be31603a 9146 inner = inner_arg0;
6de9cd9a
DN
9147 STRIP_NOPS (inner);
9148 if (TREE_CODE (inner) == ADDR_EXPR)
9149 {
9150 do
9151 {
9152 inner = TREE_OPERAND (inner, 0);
9153 }
9154 while (TREE_CODE (inner) == COMPONENT_REF
9155 || TREE_CODE (inner) == ARRAY_REF);
8813a647 9156 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
5039610b 9157 return NULL_TREE;
6de9cd9a
DN
9158 }
9159
419ce103
AN
9160 /* Otherwise, ARG0 already has the proper type for the return value. */
9161 return arg0;
6de9cd9a
DN
9162}
9163
5039610b 9164/* Fold a call to __builtin_classify_type with argument ARG. */
5197bd50 9165
ad82abb8 9166static tree
5039610b 9167fold_builtin_classify_type (tree arg)
ad82abb8 9168{
5039610b 9169 if (arg == 0)
45a2c477 9170 return build_int_cst (integer_type_node, no_type_class);
ad82abb8 9171
45a2c477 9172 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
ad82abb8
ZW
9173}
9174
5039610b 9175/* Fold a call to __builtin_strlen with argument ARG. */
667bbbbb
EC
9176
9177static tree
ab996409 9178fold_builtin_strlen (location_t loc, tree type, tree arg)
667bbbbb 9179{
5039610b 9180 if (!validate_arg (arg, POINTER_TYPE))
667bbbbb
EC
9181 return NULL_TREE;
9182 else
9183 {
e09aa5bd
MS
9184 c_strlen_data lendata = { };
9185 tree len = c_strlen (arg, 0, &lendata);
667bbbbb
EC
9186
9187 if (len)
ab996409 9188 return fold_convert_loc (loc, type, len);
667bbbbb 9189
e09aa5bd
MS
9190 if (!lendata.decl)
9191 c_strlen (arg, 1, &lendata);
6ab24ea8 9192
e09aa5bd 9193 if (lendata.decl)
6ab24ea8
MS
9194 {
9195 if (EXPR_HAS_LOCATION (arg))
9196 loc = EXPR_LOCATION (arg);
9197 else if (loc == UNKNOWN_LOCATION)
9198 loc = input_location;
e09aa5bd 9199 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
6ab24ea8
MS
9200 }
9201
667bbbbb
EC
9202 return NULL_TREE;
9203 }
9204}
9205
ab5e2615
RH
9206/* Fold a call to __builtin_inf or __builtin_huge_val. */
9207
9208static tree
db3927fb 9209fold_builtin_inf (location_t loc, tree type, int warn)
ab5e2615 9210{
efdc7e19
RH
9211 REAL_VALUE_TYPE real;
9212
6d84156b
JM
9213 /* __builtin_inff is intended to be usable to define INFINITY on all
9214 targets. If an infinity is not available, INFINITY expands "to a
9215 positive constant of type float that overflows at translation
9216 time", footnote "In this case, using INFINITY will violate the
9217 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9218 Thus we pedwarn to ensure this constraint violation is
9219 diagnosed. */
ab5e2615 9220 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
db3927fb 9221 pedwarn (loc, 0, "target format does not support infinity");
ab5e2615 9222
efdc7e19
RH
9223 real_inf (&real);
9224 return build_real (type, real);
ab5e2615
RH
9225}
9226
75c7c595
RG
9227/* Fold function call to builtin sincos, sincosf, or sincosl. Return
9228 NULL_TREE if no simplification can be made. */
9229
9230static tree
db3927fb
AH
9231fold_builtin_sincos (location_t loc,
9232 tree arg0, tree arg1, tree arg2)
75c7c595 9233{
5039610b 9234 tree type;
5c1a2e63 9235 tree fndecl, call = NULL_TREE;
75c7c595 9236
5039610b
SL
9237 if (!validate_arg (arg0, REAL_TYPE)
9238 || !validate_arg (arg1, POINTER_TYPE)
9239 || !validate_arg (arg2, POINTER_TYPE))
75c7c595
RG
9240 return NULL_TREE;
9241
75c7c595 9242 type = TREE_TYPE (arg0);
75c7c595
RG
9243
9244 /* Calculate the result when the argument is a constant. */
b03ff92e 9245 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
5c1a2e63 9246 if (fn == END_BUILTINS)
75c7c595
RG
9247 return NULL_TREE;
9248
5c1a2e63
RS
9249 /* Canonicalize sincos to cexpi. */
9250 if (TREE_CODE (arg0) == REAL_CST)
9251 {
9252 tree complex_type = build_complex_type (type);
d7ebef06 9253 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
5c1a2e63
RS
9254 }
9255 if (!call)
9256 {
9257 if (!targetm.libc_has_function (function_c99_math_complex)
9258 || !builtin_decl_implicit_p (fn))
9259 return NULL_TREE;
9260 fndecl = builtin_decl_explicit (fn);
9261 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9262 call = builtin_save_expr (call);
9263 }
75c7c595 9264
1b17b994
RB
9265 tree ptype = build_pointer_type (type);
9266 arg1 = fold_convert (ptype, arg1);
9267 arg2 = fold_convert (ptype, arg2);
928c19bb 9268 return build2 (COMPOUND_EXPR, void_type_node,
75c7c595 9269 build2 (MODIFY_EXPR, void_type_node,
db3927fb 9270 build_fold_indirect_ref_loc (loc, arg1),
5c1a2e63 9271 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
75c7c595 9272 build2 (MODIFY_EXPR, void_type_node,
db3927fb 9273 build_fold_indirect_ref_loc (loc, arg2),
5c1a2e63 9274 fold_build1_loc (loc, REALPART_EXPR, type, call)));
75c7c595
RG
9275}
9276
5039610b
SL
9277/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9278 Return NULL_TREE if no simplification can be made. */
5bb650ec
RS
9279
9280static tree
db3927fb 9281fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
5bb650ec 9282{
5039610b
SL
9283 if (!validate_arg (arg1, POINTER_TYPE)
9284 || !validate_arg (arg2, POINTER_TYPE)
9285 || !validate_arg (len, INTEGER_TYPE))
9286 return NULL_TREE;
5bb650ec
RS
9287
9288 /* If the LEN parameter is zero, return zero. */
9289 if (integer_zerop (len))
db3927fb 9290 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
01847e9d 9291 arg1, arg2);
5bb650ec
RS
9292
9293 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9294 if (operand_equal_p (arg1, arg2, 0))
db3927fb 9295 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
01847e9d 9296
01847e9d
RS
9297 /* If len parameter is one, return an expression corresponding to
9298 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
ae7e9ddd 9299 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
01847e9d
RS
9300 {
9301 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
9302 tree cst_uchar_ptr_node
9303 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9304
db3927fb
AH
9305 tree ind1
9306 = fold_convert_loc (loc, integer_type_node,
9307 build1 (INDIRECT_REF, cst_uchar_node,
9308 fold_convert_loc (loc,
9309 cst_uchar_ptr_node,
01847e9d 9310 arg1)));
db3927fb
AH
9311 tree ind2
9312 = fold_convert_loc (loc, integer_type_node,
9313 build1 (INDIRECT_REF, cst_uchar_node,
9314 fold_convert_loc (loc,
9315 cst_uchar_ptr_node,
01847e9d 9316 arg2)));
db3927fb 9317 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
01847e9d 9318 }
5bb650ec 9319
5039610b 9320 return NULL_TREE;
5bb650ec
RS
9321}
9322
5039610b 9323/* Fold a call to builtin isascii with argument ARG. */
df0785d6
KG
9324
9325static tree
db3927fb 9326fold_builtin_isascii (location_t loc, tree arg)
df0785d6 9327{
5039610b
SL
9328 if (!validate_arg (arg, INTEGER_TYPE))
9329 return NULL_TREE;
df0785d6
KG
9330 else
9331 {
9332 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
6728ee79 9333 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
45a2c477 9334 build_int_cst (integer_type_node,
6728ee79 9335 ~ (unsigned HOST_WIDE_INT) 0x7f));
db3927fb 9336 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
45a2c477 9337 arg, integer_zero_node);
df0785d6
KG
9338 }
9339}
9340
5039610b 9341/* Fold a call to builtin toascii with argument ARG. */
df0785d6
KG
9342
9343static tree
db3927fb 9344fold_builtin_toascii (location_t loc, tree arg)
df0785d6 9345{
5039610b
SL
9346 if (!validate_arg (arg, INTEGER_TYPE))
9347 return NULL_TREE;
b8698a0f 9348
5039610b 9349 /* Transform toascii(c) -> (c & 0x7f). */
db3927fb 9350 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
45a2c477 9351 build_int_cst (integer_type_node, 0x7f));
df0785d6
KG
9352}
9353
5039610b 9354/* Fold a call to builtin isdigit with argument ARG. */
61218d19
KG
9355
9356static tree
db3927fb 9357fold_builtin_isdigit (location_t loc, tree arg)
61218d19 9358{
5039610b
SL
9359 if (!validate_arg (arg, INTEGER_TYPE))
9360 return NULL_TREE;
61218d19
KG
9361 else
9362 {
9363 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
c5ff069d
ZW
9364 /* According to the C standard, isdigit is unaffected by locale.
9365 However, it definitely is affected by the target character set. */
c5ff069d
ZW
9366 unsigned HOST_WIDE_INT target_digit0
9367 = lang_hooks.to_target_charset ('0');
9368
9369 if (target_digit0 == 0)
9370 return NULL_TREE;
9371
db3927fb 9372 arg = fold_convert_loc (loc, unsigned_type_node, arg);
6728ee79
MM
9373 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9374 build_int_cst (unsigned_type_node, target_digit0));
db3927fb 9375 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
5cdc4a26 9376 build_int_cst (unsigned_type_node, 9));
61218d19
KG
9377 }
9378}
ef79730c 9379
5039610b 9380/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9655d83b
RS
9381
9382static tree
db3927fb 9383fold_builtin_fabs (location_t loc, tree arg, tree type)
9655d83b 9384{
5039610b
SL
9385 if (!validate_arg (arg, REAL_TYPE))
9386 return NULL_TREE;
9655d83b 9387
db3927fb 9388 arg = fold_convert_loc (loc, type, arg);
db3927fb 9389 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
9390}
9391
5039610b 9392/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9655d83b
RS
9393
9394static tree
db3927fb 9395fold_builtin_abs (location_t loc, tree arg, tree type)
9655d83b 9396{
5039610b
SL
9397 if (!validate_arg (arg, INTEGER_TYPE))
9398 return NULL_TREE;
9655d83b 9399
db3927fb 9400 arg = fold_convert_loc (loc, type, arg);
db3927fb 9401 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
9402}
9403
527cab20
KG
9404/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9405
9406static tree
db3927fb 9407fold_builtin_carg (location_t loc, tree arg, tree type)
527cab20 9408{
c128599a
KG
9409 if (validate_arg (arg, COMPLEX_TYPE)
9410 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
527cab20
KG
9411 {
9412 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
b8698a0f 9413
527cab20
KG
9414 if (atan2_fn)
9415 {
5039610b 9416 tree new_arg = builtin_save_expr (arg);
db3927fb
AH
9417 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9418 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9419 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
527cab20
KG
9420 }
9421 }
b8698a0f 9422
527cab20
KG
9423 return NULL_TREE;
9424}
9425
7a2a25ab
KG
9426/* Fold a call to builtin frexp, we can assume the base is 2. */
9427
9428static tree
db3927fb 9429fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7a2a25ab
KG
9430{
9431 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9432 return NULL_TREE;
b8698a0f 9433
7a2a25ab 9434 STRIP_NOPS (arg0);
b8698a0f 9435
7a2a25ab
KG
9436 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9437 return NULL_TREE;
b8698a0f 9438
db3927fb 9439 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7a2a25ab
KG
9440
9441 /* Proceed if a valid pointer type was passed in. */
9442 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9443 {
9444 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9445 tree frac, exp;
b8698a0f 9446
7a2a25ab
KG
9447 switch (value->cl)
9448 {
9449 case rvc_zero:
9450 /* For +-0, return (*exp = 0, +-0). */
9451 exp = integer_zero_node;
9452 frac = arg0;
9453 break;
9454 case rvc_nan:
9455 case rvc_inf:
9456 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
db3927fb 9457 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7a2a25ab
KG
9458 case rvc_normal:
9459 {
9460 /* Since the frexp function always expects base 2, and in
9461 GCC normalized significands are already in the range
9462 [0.5, 1.0), we have exactly what frexp wants. */
9463 REAL_VALUE_TYPE frac_rvt = *value;
9464 SET_REAL_EXP (&frac_rvt, 0);
9465 frac = build_real (rettype, frac_rvt);
45a2c477 9466 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7a2a25ab
KG
9467 }
9468 break;
9469 default:
9470 gcc_unreachable ();
9471 }
b8698a0f 9472
7a2a25ab 9473 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 9474 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7a2a25ab 9475 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 9476 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7a2a25ab
KG
9477 }
9478
9479 return NULL_TREE;
9480}
9481
3d577eaf
KG
9482/* Fold a call to builtin modf. */
9483
9484static tree
db3927fb 9485fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
3d577eaf
KG
9486{
9487 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9488 return NULL_TREE;
b8698a0f 9489
3d577eaf 9490 STRIP_NOPS (arg0);
b8698a0f 9491
3d577eaf
KG
9492 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9493 return NULL_TREE;
b8698a0f 9494
db3927fb 9495 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3d577eaf
KG
9496
9497 /* Proceed if a valid pointer type was passed in. */
9498 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9499 {
9500 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9501 REAL_VALUE_TYPE trunc, frac;
9502
9503 switch (value->cl)
9504 {
9505 case rvc_nan:
9506 case rvc_zero:
9507 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9508 trunc = frac = *value;
9509 break;
9510 case rvc_inf:
9511 /* For +-Inf, return (*arg1 = arg0, +-0). */
9512 frac = dconst0;
9513 frac.sign = value->sign;
9514 trunc = *value;
9515 break;
9516 case rvc_normal:
9517 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9518 real_trunc (&trunc, VOIDmode, value);
9519 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9520 /* If the original number was negative and already
9521 integral, then the fractional part is -0.0. */
9522 if (value->sign && frac.cl == rvc_zero)
9523 frac.sign = value->sign;
9524 break;
9525 }
b8698a0f 9526
3d577eaf 9527 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 9528 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
3d577eaf
KG
9529 build_real (rettype, trunc));
9530 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 9531 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
3d577eaf
KG
9532 build_real (rettype, frac));
9533 }
b8698a0f 9534
3d577eaf
KG
9535 return NULL_TREE;
9536}
9537
903c723b
TC
9538/* Given a location LOC, an interclass builtin function decl FNDECL
9539 and its single argument ARG, return an folded expression computing
9540 the same, or NULL_TREE if we either couldn't or didn't want to fold
9541 (the latter happen if there's an RTL instruction available). */
9542
9543static tree
9544fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9545{
9546 machine_mode mode;
9547
9548 if (!validate_arg (arg, REAL_TYPE))
9549 return NULL_TREE;
9550
9551 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9552 return NULL_TREE;
9553
9554 mode = TYPE_MODE (TREE_TYPE (arg));
9555
9556 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
02cf2861 9557
903c723b
TC
9558 /* If there is no optab, try generic code. */
9559 switch (DECL_FUNCTION_CODE (fndecl))
9560 {
9561 tree result;
44e10129 9562
903c723b
TC
9563 CASE_FLT_FN (BUILT_IN_ISINF):
9564 {
9565 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9566 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9567 tree type = TREE_TYPE (arg);
9568 REAL_VALUE_TYPE r;
9569 char buf[128];
9570
9571 if (is_ibm_extended)
9572 {
9573 /* NaN and Inf are encoded in the high-order double value
9574 only. The low-order value is not significant. */
9575 type = double_type_node;
9576 mode = DFmode;
9577 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9578 }
00be2a5f 9579 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
9580 real_from_string (&r, buf);
9581 result = build_call_expr (isgr_fn, 2,
9582 fold_build1_loc (loc, ABS_EXPR, type, arg),
9583 build_real (type, r));
9584 return result;
9585 }
9586 CASE_FLT_FN (BUILT_IN_FINITE):
9587 case BUILT_IN_ISFINITE:
9588 {
9589 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9590 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9591 tree type = TREE_TYPE (arg);
9592 REAL_VALUE_TYPE r;
9593 char buf[128];
9594
9595 if (is_ibm_extended)
9596 {
9597 /* NaN and Inf are encoded in the high-order double value
9598 only. The low-order value is not significant. */
9599 type = double_type_node;
9600 mode = DFmode;
9601 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9602 }
00be2a5f 9603 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
9604 real_from_string (&r, buf);
9605 result = build_call_expr (isle_fn, 2,
9606 fold_build1_loc (loc, ABS_EXPR, type, arg),
9607 build_real (type, r));
9608 /*result = fold_build2_loc (loc, UNGT_EXPR,
9609 TREE_TYPE (TREE_TYPE (fndecl)),
9610 fold_build1_loc (loc, ABS_EXPR, type, arg),
9611 build_real (type, r));
9612 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9613 TREE_TYPE (TREE_TYPE (fndecl)),
9614 result);*/
9615 return result;
9616 }
9617 case BUILT_IN_ISNORMAL:
9618 {
9619 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9620 islessequal(fabs(x),DBL_MAX). */
9621 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9622 tree type = TREE_TYPE (arg);
9623 tree orig_arg, max_exp, min_exp;
9624 machine_mode orig_mode = mode;
9625 REAL_VALUE_TYPE rmax, rmin;
9626 char buf[128];
9627
9628 orig_arg = arg = builtin_save_expr (arg);
9629 if (is_ibm_extended)
9630 {
9631 /* Use double to test the normal range of IBM extended
9632 precision. Emin for IBM extended precision is
9633 different to emin for IEEE double, being 53 higher
9634 since the low double exponent is at least 53 lower
9635 than the high double exponent. */
9636 type = double_type_node;
9637 mode = DFmode;
9638 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9639 }
9640 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9641
00be2a5f 9642 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
9643 real_from_string (&rmax, buf);
9644 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9645 real_from_string (&rmin, buf);
9646 max_exp = build_real (type, rmax);
9647 min_exp = build_real (type, rmin);
9648
9649 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9650 if (is_ibm_extended)
9651 {
9652 /* Testing the high end of the range is done just using
9653 the high double, using the same test as isfinite().
9654 For the subnormal end of the range we first test the
9655 high double, then if its magnitude is equal to the
9656 limit of 0x1p-969, we test whether the low double is
9657 non-zero and opposite sign to the high double. */
9658 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9659 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9660 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9661 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9662 arg, min_exp);
9663 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9664 complex_double_type_node, orig_arg);
9665 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9666 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9667 tree zero = build_real (type, dconst0);
9668 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9669 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9670 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9671 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9672 fold_build3 (COND_EXPR,
9673 integer_type_node,
9674 hilt, logt, lolt));
9675 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9676 eq_min, ok_lo);
9677 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9678 gt_min, eq_min);
9679 }
9680 else
9681 {
9682 tree const isge_fn
9683 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9684 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9685 }
9686 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9687 max_exp, min_exp);
9688 return result;
9689 }
9690 default:
9691 break;
9692 }
9693
9694 return NULL_TREE;
9695}
9696
9697/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
5039610b 9698 ARG is the argument for the call. */
64a9295a
PB
9699
9700static tree
903c723b 9701fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
64a9295a 9702{
903c723b
TC
9703 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9704
5039610b 9705 if (!validate_arg (arg, REAL_TYPE))
83322951 9706 return NULL_TREE;
64a9295a 9707
64a9295a
PB
9708 switch (builtin_index)
9709 {
903c723b
TC
9710 case BUILT_IN_ISINF:
9711 if (!HONOR_INFINITIES (arg))
9712 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9713
9714 return NULL_TREE;
9715
05f41289
KG
9716 case BUILT_IN_ISINF_SIGN:
9717 {
9718 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9719 /* In a boolean context, GCC will fold the inner COND_EXPR to
9720 1. So e.g. "if (isinf_sign(x))" would be folded to just
9721 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
72f52f30 9722 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
e79983f4 9723 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
05f41289
KG
9724 tree tmp = NULL_TREE;
9725
9726 arg = builtin_save_expr (arg);
9727
9728 if (signbit_fn && isinf_fn)
9729 {
db3927fb
AH
9730 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9731 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
05f41289 9732
db3927fb 9733 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 9734 signbit_call, integer_zero_node);
db3927fb 9735 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 9736 isinf_call, integer_zero_node);
b8698a0f 9737
db3927fb 9738 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
05f41289 9739 integer_minus_one_node, integer_one_node);
db3927fb
AH
9740 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9741 isinf_call, tmp,
05f41289
KG
9742 integer_zero_node);
9743 }
9744
9745 return tmp;
9746 }
9747
903c723b
TC
9748 case BUILT_IN_ISFINITE:
9749 if (!HONOR_NANS (arg)
9750 && !HONOR_INFINITIES (arg))
9751 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9752
9753 return NULL_TREE;
9754
9755 case BUILT_IN_ISNAN:
9756 if (!HONOR_NANS (arg))
9757 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9758
9759 {
9760 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9761 if (is_ibm_extended)
9762 {
9763 /* NaN and Inf are encoded in the high-order double value
9764 only. The low-order value is not significant. */
9765 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9766 }
9767 }
9768 arg = builtin_save_expr (arg);
9769 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9770
64a9295a 9771 default:
298e6adc 9772 gcc_unreachable ();
64a9295a
PB
9773 }
9774}
9775
903c723b
TC
9776/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9777 This builtin will generate code to return the appropriate floating
9778 point classification depending on the value of the floating point
9779 number passed in. The possible return values must be supplied as
9780 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9781 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9782 one floating point argument which is "type generic". */
9783
9784static tree
9785fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9786{
9787 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9788 arg, type, res, tmp;
9789 machine_mode mode;
9790 REAL_VALUE_TYPE r;
9791 char buf[128];
9792
9793 /* Verify the required arguments in the original call. */
9794 if (nargs != 6
9795 || !validate_arg (args[0], INTEGER_TYPE)
9796 || !validate_arg (args[1], INTEGER_TYPE)
9797 || !validate_arg (args[2], INTEGER_TYPE)
9798 || !validate_arg (args[3], INTEGER_TYPE)
9799 || !validate_arg (args[4], INTEGER_TYPE)
9800 || !validate_arg (args[5], REAL_TYPE))
9801 return NULL_TREE;
9802
9803 fp_nan = args[0];
9804 fp_infinite = args[1];
9805 fp_normal = args[2];
9806 fp_subnormal = args[3];
9807 fp_zero = args[4];
9808 arg = args[5];
9809 type = TREE_TYPE (arg);
9810 mode = TYPE_MODE (type);
9811 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9812
9813 /* fpclassify(x) ->
9814 isnan(x) ? FP_NAN :
9815 (fabs(x) == Inf ? FP_INFINITE :
9816 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9817 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9818
9819 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9820 build_real (type, dconst0));
9821 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9822 tmp, fp_zero, fp_subnormal);
9823
9824 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9825 real_from_string (&r, buf);
9826 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9827 arg, build_real (type, r));
9828 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9829
9830 if (HONOR_INFINITIES (mode))
9831 {
9832 real_inf (&r);
9833 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9834 build_real (type, r));
9835 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9836 fp_infinite, res);
9837 }
9838
9839 if (HONOR_NANS (mode))
9840 {
9841 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9842 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9843 }
9844
9845 return res;
9846}
9847
08039bd8 9848/* Fold a call to an unordered comparison function such as
a35da91f 9849 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
5039610b 9850 being called and ARG0 and ARG1 are the arguments for the call.
64a9295a
PB
9851 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9852 the opposite of the desired result. UNORDERED_CODE is used
9853 for modes that can hold NaNs and ORDERED_CODE is used for
9854 the rest. */
08039bd8
RS
9855
9856static tree
db3927fb 9857fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
08039bd8
RS
9858 enum tree_code unordered_code,
9859 enum tree_code ordered_code)
9860{
14f661f1 9861 tree type = TREE_TYPE (TREE_TYPE (fndecl));
08039bd8 9862 enum tree_code code;
1aeaea8d
GK
9863 tree type0, type1;
9864 enum tree_code code0, code1;
9865 tree cmp_type = NULL_TREE;
08039bd8 9866
1aeaea8d
GK
9867 type0 = TREE_TYPE (arg0);
9868 type1 = TREE_TYPE (arg1);
c22cacf3 9869
1aeaea8d
GK
9870 code0 = TREE_CODE (type0);
9871 code1 = TREE_CODE (type1);
c22cacf3 9872
1aeaea8d
GK
9873 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9874 /* Choose the wider of two real types. */
9875 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9876 ? type0 : type1;
9877 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9878 cmp_type = type0;
9879 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9880 cmp_type = type1;
c22cacf3 9881
db3927fb
AH
9882 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9883 arg1 = fold_convert_loc (loc, cmp_type, arg1);
14f661f1
RS
9884
9885 if (unordered_code == UNORDERED_EXPR)
9886 {
1b457aa4 9887 if (!HONOR_NANS (arg0))
db3927fb
AH
9888 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9889 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
14f661f1 9890 }
08039bd8 9891
1b457aa4 9892 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
db3927fb
AH
9893 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9894 fold_build2_loc (loc, code, type, arg0, arg1));
08039bd8
RS
9895}
9896
1304953e
JJ
9897/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9898 arithmetics if it can never overflow, or into internal functions that
9899 return both result of arithmetics and overflowed boolean flag in
44a845ca
MS
9900 a complex integer result, or some other check for overflow.
9901 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9902 checking part of that. */
1304953e
JJ
9903
9904static tree
9905fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9906 tree arg0, tree arg1, tree arg2)
9907{
9908 enum internal_fn ifn = IFN_LAST;
43574e4f 9909 /* The code of the expression corresponding to the built-in. */
44a845ca
MS
9910 enum tree_code opcode = ERROR_MARK;
9911 bool ovf_only = false;
9912
1304953e
JJ
9913 switch (fcode)
9914 {
44a845ca
MS
9915 case BUILT_IN_ADD_OVERFLOW_P:
9916 ovf_only = true;
9917 /* FALLTHRU */
1304953e
JJ
9918 case BUILT_IN_ADD_OVERFLOW:
9919 case BUILT_IN_SADD_OVERFLOW:
9920 case BUILT_IN_SADDL_OVERFLOW:
9921 case BUILT_IN_SADDLL_OVERFLOW:
9922 case BUILT_IN_UADD_OVERFLOW:
9923 case BUILT_IN_UADDL_OVERFLOW:
9924 case BUILT_IN_UADDLL_OVERFLOW:
43574e4f 9925 opcode = PLUS_EXPR;
1304953e
JJ
9926 ifn = IFN_ADD_OVERFLOW;
9927 break;
44a845ca
MS
9928 case BUILT_IN_SUB_OVERFLOW_P:
9929 ovf_only = true;
9930 /* FALLTHRU */
1304953e
JJ
9931 case BUILT_IN_SUB_OVERFLOW:
9932 case BUILT_IN_SSUB_OVERFLOW:
9933 case BUILT_IN_SSUBL_OVERFLOW:
9934 case BUILT_IN_SSUBLL_OVERFLOW:
9935 case BUILT_IN_USUB_OVERFLOW:
9936 case BUILT_IN_USUBL_OVERFLOW:
9937 case BUILT_IN_USUBLL_OVERFLOW:
43574e4f 9938 opcode = MINUS_EXPR;
1304953e
JJ
9939 ifn = IFN_SUB_OVERFLOW;
9940 break;
44a845ca
MS
9941 case BUILT_IN_MUL_OVERFLOW_P:
9942 ovf_only = true;
9943 /* FALLTHRU */
1304953e
JJ
9944 case BUILT_IN_MUL_OVERFLOW:
9945 case BUILT_IN_SMUL_OVERFLOW:
9946 case BUILT_IN_SMULL_OVERFLOW:
9947 case BUILT_IN_SMULLL_OVERFLOW:
9948 case BUILT_IN_UMUL_OVERFLOW:
9949 case BUILT_IN_UMULL_OVERFLOW:
9950 case BUILT_IN_UMULLL_OVERFLOW:
43574e4f 9951 opcode = MULT_EXPR;
1304953e
JJ
9952 ifn = IFN_MUL_OVERFLOW;
9953 break;
9954 default:
9955 gcc_unreachable ();
9956 }
44a845ca
MS
9957
9958 /* For the "generic" overloads, the first two arguments can have different
9959 types and the last argument determines the target type to use to check
9960 for overflow. The arguments of the other overloads all have the same
9961 type. */
9962 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9963
9964 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9965 arguments are constant, attempt to fold the built-in call into a constant
9966 expression indicating whether or not it detected an overflow. */
9967 if (ovf_only
9968 && TREE_CODE (arg0) == INTEGER_CST
9969 && TREE_CODE (arg1) == INTEGER_CST)
9970 /* Perform the computation in the target type and check for overflow. */
9971 return omit_one_operand_loc (loc, boolean_type_node,
9972 arith_overflowed_p (opcode, type, arg0, arg1)
9973 ? boolean_true_node : boolean_false_node,
9974 arg2);
9975
43574e4f
JJ
9976 tree intres, ovfres;
9977 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9978 {
9979 intres = fold_binary_loc (loc, opcode, type,
9980 fold_convert_loc (loc, type, arg0),
9981 fold_convert_loc (loc, type, arg1));
9982 if (TREE_OVERFLOW (intres))
9983 intres = drop_tree_overflow (intres);
9984 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9985 ? boolean_true_node : boolean_false_node);
9986 }
9987 else
9988 {
9989 tree ctype = build_complex_type (type);
9990 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9991 arg0, arg1);
9992 tree tgt = save_expr (call);
9993 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9994 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9995 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9996 }
44a845ca
MS
9997
9998 if (ovf_only)
9999 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
10000
10001 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
1304953e
JJ
10002 tree store
10003 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
10004 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
10005}
10006
b25aad5f
MS
10007/* Fold a call to __builtin_FILE to a constant string. */
10008
10009static inline tree
10010fold_builtin_FILE (location_t loc)
10011{
10012 if (const char *fname = LOCATION_FILE (loc))
7365279f
BK
10013 {
10014 /* The documentation says this builtin is equivalent to the preprocessor
10015 __FILE__ macro so it appears appropriate to use the same file prefix
10016 mappings. */
10017 fname = remap_macro_filename (fname);
b25aad5f 10018 return build_string_literal (strlen (fname) + 1, fname);
7365279f 10019 }
b25aad5f
MS
10020
10021 return build_string_literal (1, "");
10022}
10023
10024/* Fold a call to __builtin_FUNCTION to a constant string. */
10025
10026static inline tree
10027fold_builtin_FUNCTION ()
10028{
f76b4224
NS
10029 const char *name = "";
10030
b25aad5f 10031 if (current_function_decl)
f76b4224 10032 name = lang_hooks.decl_printable_name (current_function_decl, 0);
b25aad5f 10033
f76b4224 10034 return build_string_literal (strlen (name) + 1, name);
b25aad5f
MS
10035}
10036
10037/* Fold a call to __builtin_LINE to an integer constant. */
10038
10039static inline tree
10040fold_builtin_LINE (location_t loc, tree type)
10041{
10042 return build_int_cst (type, LOCATION_LINE (loc));
10043}
10044
5039610b 10045/* Fold a call to built-in function FNDECL with 0 arguments.
2625bb5d 10046 This function returns NULL_TREE if no simplification was possible. */
b0b3afb2 10047
6de9cd9a 10048static tree
2625bb5d 10049fold_builtin_0 (location_t loc, tree fndecl)
b0b3afb2 10050{
c0a47a61 10051 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5039610b 10052 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
a0d2281e 10053 switch (fcode)
b0b3afb2 10054 {
b25aad5f
MS
10055 case BUILT_IN_FILE:
10056 return fold_builtin_FILE (loc);
10057
10058 case BUILT_IN_FUNCTION:
10059 return fold_builtin_FUNCTION ();
10060
10061 case BUILT_IN_LINE:
10062 return fold_builtin_LINE (loc, type);
10063
5039610b 10064 CASE_FLT_FN (BUILT_IN_INF):
6dc198e3 10065 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
5039610b
SL
10066 case BUILT_IN_INFD32:
10067 case BUILT_IN_INFD64:
10068 case BUILT_IN_INFD128:
db3927fb 10069 return fold_builtin_inf (loc, type, true);
d3147f64 10070
5039610b 10071 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
6dc198e3 10072 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
db3927fb 10073 return fold_builtin_inf (loc, type, false);
d3147f64 10074
5039610b
SL
10075 case BUILT_IN_CLASSIFY_TYPE:
10076 return fold_builtin_classify_type (NULL_TREE);
d3147f64 10077
5039610b
SL
10078 default:
10079 break;
10080 }
10081 return NULL_TREE;
10082}
d3147f64 10083
5039610b 10084/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
2625bb5d 10085 This function returns NULL_TREE if no simplification was possible. */
d3147f64 10086
5039610b 10087static tree
2625bb5d 10088fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
5039610b
SL
10089{
10090 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10091 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
10092
10093 if (TREE_CODE (arg0) == ERROR_MARK)
10094 return NULL_TREE;
10095
d7ebef06 10096 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
5c1a2e63
RS
10097 return ret;
10098
5039610b
SL
10099 switch (fcode)
10100 {
b0b3afb2 10101 case BUILT_IN_CONSTANT_P:
d3147f64 10102 {
5039610b 10103 tree val = fold_builtin_constant_p (arg0);
d3147f64 10104
d3147f64
EC
10105 /* Gimplification will pull the CALL_EXPR for the builtin out of
10106 an if condition. When not optimizing, we'll not CSE it back.
10107 To avoid link error types of regressions, return false now. */
10108 if (!val && !optimize)
10109 val = integer_zero_node;
10110
10111 return val;
10112 }
b0b3afb2 10113
ad82abb8 10114 case BUILT_IN_CLASSIFY_TYPE:
5039610b 10115 return fold_builtin_classify_type (arg0);
ad82abb8 10116
b0b3afb2 10117 case BUILT_IN_STRLEN:
ab996409 10118 return fold_builtin_strlen (loc, type, arg0);
b0b3afb2 10119
ea6a6627 10120 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 10121 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
10122 case BUILT_IN_FABSD32:
10123 case BUILT_IN_FABSD64:
10124 case BUILT_IN_FABSD128:
db3927fb 10125 return fold_builtin_fabs (loc, arg0, type);
9655d83b
RS
10126
10127 case BUILT_IN_ABS:
10128 case BUILT_IN_LABS:
10129 case BUILT_IN_LLABS:
10130 case BUILT_IN_IMAXABS:
db3927fb 10131 return fold_builtin_abs (loc, arg0, type);
07bae5ad 10132
ea6a6627 10133 CASE_FLT_FN (BUILT_IN_CONJ):
c128599a 10134 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 10135 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 10136 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
5039610b 10137 break;
aa6c7c3a 10138
ea6a6627 10139 CASE_FLT_FN (BUILT_IN_CREAL):
c128599a 10140 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 10141 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
6f3d1a5e 10142 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
5039610b 10143 break;
aa6c7c3a 10144
ea6a6627 10145 CASE_FLT_FN (BUILT_IN_CIMAG):
376da68e
KG
10146 if (validate_arg (arg0, COMPLEX_TYPE)
10147 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 10148 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
5039610b 10149 break;
aa6c7c3a 10150
5c1a2e63
RS
10151 CASE_FLT_FN (BUILT_IN_CARG):
10152 return fold_builtin_carg (loc, arg0, type);
43272bf5 10153
5c1a2e63
RS
10154 case BUILT_IN_ISASCII:
10155 return fold_builtin_isascii (loc, arg0);
b8698a0f 10156
5c1a2e63
RS
10157 case BUILT_IN_TOASCII:
10158 return fold_builtin_toascii (loc, arg0);
b8698a0f 10159
5c1a2e63
RS
10160 case BUILT_IN_ISDIGIT:
10161 return fold_builtin_isdigit (loc, arg0);
b8698a0f 10162
903c723b
TC
10163 CASE_FLT_FN (BUILT_IN_FINITE):
10164 case BUILT_IN_FINITED32:
10165 case BUILT_IN_FINITED64:
10166 case BUILT_IN_FINITED128:
10167 case BUILT_IN_ISFINITE:
10168 {
10169 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10170 if (ret)
10171 return ret;
10172 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10173 }
10174
10175 CASE_FLT_FN (BUILT_IN_ISINF):
10176 case BUILT_IN_ISINFD32:
10177 case BUILT_IN_ISINFD64:
10178 case BUILT_IN_ISINFD128:
10179 {
10180 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10181 if (ret)
10182 return ret;
10183 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10184 }
10185
10186 case BUILT_IN_ISNORMAL:
10187 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10188
5c1a2e63 10189 case BUILT_IN_ISINF_SIGN:
903c723b
TC
10190 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10191
10192 CASE_FLT_FN (BUILT_IN_ISNAN):
10193 case BUILT_IN_ISNAND32:
10194 case BUILT_IN_ISNAND64:
10195 case BUILT_IN_ISNAND128:
10196 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
b8698a0f 10197
5c1a2e63
RS
10198 case BUILT_IN_FREE:
10199 if (integer_zerop (arg0))
10200 return build_empty_stmt (loc);
abcc43f5 10201 break;
07bae5ad 10202
5c1a2e63 10203 default:
4835c978 10204 break;
5c1a2e63 10205 }
4977bab6 10206
5c1a2e63 10207 return NULL_TREE;
e19f6bde 10208
5c1a2e63 10209}
b53fed56 10210
b5338fb3
MS
10211/* Folds a call EXPR (which may be null) to built-in function FNDECL
10212 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10213 if no simplification was possible. */
5039610b
SL
10214
10215static tree
b5338fb3 10216fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
5039610b
SL
10217{
10218 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10219 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10220
5c1a2e63
RS
10221 if (TREE_CODE (arg0) == ERROR_MARK
10222 || TREE_CODE (arg1) == ERROR_MARK)
10223 return NULL_TREE;
ea91f957 10224
d7ebef06 10225 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
5c1a2e63 10226 return ret;
752b7d38 10227
5c1a2e63
RS
10228 switch (fcode)
10229 {
752b7d38
KG
10230 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10231 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10232 if (validate_arg (arg0, REAL_TYPE)
c3284718 10233 && validate_arg (arg1, POINTER_TYPE))
752b7d38
KG
10234 return do_mpfr_lgamma_r (arg0, arg1, type);
10235 break;
5039610b 10236
7a2a25ab 10237 CASE_FLT_FN (BUILT_IN_FREXP):
db3927fb 10238 return fold_builtin_frexp (loc, arg0, arg1, type);
7a2a25ab 10239
3d577eaf 10240 CASE_FLT_FN (BUILT_IN_MODF):
db3927fb 10241 return fold_builtin_modf (loc, arg0, arg1, type);
3d577eaf 10242
5039610b 10243 case BUILT_IN_STRSPN:
b5338fb3 10244 return fold_builtin_strspn (loc, expr, arg0, arg1);
5039610b
SL
10245
10246 case BUILT_IN_STRCSPN:
b5338fb3 10247 return fold_builtin_strcspn (loc, expr, arg0, arg1);
5039610b 10248
5039610b 10249 case BUILT_IN_STRPBRK:
b5338fb3 10250 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
5039610b
SL
10251
10252 case BUILT_IN_EXPECT:
1e9168b2 10253 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
5039610b 10254
08039bd8 10255 case BUILT_IN_ISGREATER:
db3927fb
AH
10256 return fold_builtin_unordered_cmp (loc, fndecl,
10257 arg0, arg1, UNLE_EXPR, LE_EXPR);
08039bd8 10258 case BUILT_IN_ISGREATEREQUAL:
db3927fb
AH
10259 return fold_builtin_unordered_cmp (loc, fndecl,
10260 arg0, arg1, UNLT_EXPR, LT_EXPR);
08039bd8 10261 case BUILT_IN_ISLESS:
db3927fb
AH
10262 return fold_builtin_unordered_cmp (loc, fndecl,
10263 arg0, arg1, UNGE_EXPR, GE_EXPR);
08039bd8 10264 case BUILT_IN_ISLESSEQUAL:
db3927fb
AH
10265 return fold_builtin_unordered_cmp (loc, fndecl,
10266 arg0, arg1, UNGT_EXPR, GT_EXPR);
08039bd8 10267 case BUILT_IN_ISLESSGREATER:
db3927fb
AH
10268 return fold_builtin_unordered_cmp (loc, fndecl,
10269 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
08039bd8 10270 case BUILT_IN_ISUNORDERED:
db3927fb
AH
10271 return fold_builtin_unordered_cmp (loc, fndecl,
10272 arg0, arg1, UNORDERED_EXPR,
a35da91f 10273 NOP_EXPR);
08039bd8 10274
d3147f64
EC
10275 /* We do the folding for va_start in the expander. */
10276 case BUILT_IN_VA_START:
10277 break;
a32e70c3 10278
10a0d495 10279 case BUILT_IN_OBJECT_SIZE:
5039610b 10280 return fold_builtin_object_size (arg0, arg1);
10a0d495 10281
86951993
AM
10282 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10283 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10284
10285 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10286 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10287
5039610b
SL
10288 default:
10289 break;
10290 }
10291 return NULL_TREE;
10292}
10293
10294/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
2625bb5d 10295 and ARG2.
5039610b
SL
10296 This function returns NULL_TREE if no simplification was possible. */
10297
10298static tree
db3927fb 10299fold_builtin_3 (location_t loc, tree fndecl,
2625bb5d 10300 tree arg0, tree arg1, tree arg2)
5039610b
SL
10301{
10302 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10303 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
10304
10305 if (TREE_CODE (arg0) == ERROR_MARK
10306 || TREE_CODE (arg1) == ERROR_MARK
10307 || TREE_CODE (arg2) == ERROR_MARK)
10308 return NULL_TREE;
10309
d7ebef06
RS
10310 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10311 arg0, arg1, arg2))
5c1a2e63
RS
10312 return ret;
10313
5039610b
SL
10314 switch (fcode)
10315 {
10316
10317 CASE_FLT_FN (BUILT_IN_SINCOS):
db3927fb 10318 return fold_builtin_sincos (loc, arg0, arg1, arg2);
5039610b 10319
ea91f957
KG
10320 CASE_FLT_FN (BUILT_IN_REMQUO):
10321 if (validate_arg (arg0, REAL_TYPE)
c3284718
RS
10322 && validate_arg (arg1, REAL_TYPE)
10323 && validate_arg (arg2, POINTER_TYPE))
ea91f957
KG
10324 return do_mpfr_remquo (arg0, arg1, arg2);
10325 break;
ea91f957 10326
5039610b 10327 case BUILT_IN_MEMCMP:
5de73c05 10328 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
5039610b 10329
ed9c79e1 10330 case BUILT_IN_EXPECT:
1e9168b2
ML
10331 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10332
10333 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10334 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
ed9c79e1 10335
1304953e
JJ
10336 case BUILT_IN_ADD_OVERFLOW:
10337 case BUILT_IN_SUB_OVERFLOW:
10338 case BUILT_IN_MUL_OVERFLOW:
44a845ca
MS
10339 case BUILT_IN_ADD_OVERFLOW_P:
10340 case BUILT_IN_SUB_OVERFLOW_P:
10341 case BUILT_IN_MUL_OVERFLOW_P:
1304953e
JJ
10342 case BUILT_IN_SADD_OVERFLOW:
10343 case BUILT_IN_SADDL_OVERFLOW:
10344 case BUILT_IN_SADDLL_OVERFLOW:
10345 case BUILT_IN_SSUB_OVERFLOW:
10346 case BUILT_IN_SSUBL_OVERFLOW:
10347 case BUILT_IN_SSUBLL_OVERFLOW:
10348 case BUILT_IN_SMUL_OVERFLOW:
10349 case BUILT_IN_SMULL_OVERFLOW:
10350 case BUILT_IN_SMULLL_OVERFLOW:
10351 case BUILT_IN_UADD_OVERFLOW:
10352 case BUILT_IN_UADDL_OVERFLOW:
10353 case BUILT_IN_UADDLL_OVERFLOW:
10354 case BUILT_IN_USUB_OVERFLOW:
10355 case BUILT_IN_USUBL_OVERFLOW:
10356 case BUILT_IN_USUBLL_OVERFLOW:
10357 case BUILT_IN_UMUL_OVERFLOW:
10358 case BUILT_IN_UMULL_OVERFLOW:
10359 case BUILT_IN_UMULLL_OVERFLOW:
10360 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10361
b0b3afb2
BS
10362 default:
10363 break;
10364 }
5039610b
SL
10365 return NULL_TREE;
10366}
b0b3afb2 10367
b5338fb3
MS
10368/* Folds a call EXPR (which may be null) to built-in function FNDECL.
10369 ARGS is an array of NARGS arguments. IGNORE is true if the result
10370 of the function call is ignored. This function returns NULL_TREE
10371 if no simplification was possible. */
b8698a0f 10372
b5338fb3
MS
10373static tree
10374fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10375 int nargs, bool)
5039610b
SL
10376{
10377 tree ret = NULL_TREE;
f4577fcd 10378
5039610b
SL
10379 switch (nargs)
10380 {
10381 case 0:
2625bb5d 10382 ret = fold_builtin_0 (loc, fndecl);
5039610b
SL
10383 break;
10384 case 1:
2625bb5d 10385 ret = fold_builtin_1 (loc, fndecl, args[0]);
5039610b
SL
10386 break;
10387 case 2:
b5338fb3 10388 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
5039610b
SL
10389 break;
10390 case 3:
2625bb5d 10391 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
5039610b 10392 break;
5039610b 10393 default:
903c723b 10394 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
5039610b
SL
10395 break;
10396 }
10397 if (ret)
10398 {
726a989a 10399 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
db3927fb 10400 SET_EXPR_LOCATION (ret, loc);
5039610b
SL
10401 return ret;
10402 }
10403 return NULL_TREE;
10404}
10405
862d0b35
DN
10406/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10407 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10408 of arguments in ARGS to be omitted. OLDNARGS is the number of
10409 elements in ARGS. */
5039610b
SL
10410
10411static tree
862d0b35
DN
10412rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10413 int skip, tree fndecl, int n, va_list newargs)
5039610b 10414{
862d0b35
DN
10415 int nargs = oldnargs - skip + n;
10416 tree *buffer;
5039610b 10417
862d0b35 10418 if (n > 0)
5039610b 10419 {
862d0b35 10420 int i, j;
5039610b 10421
862d0b35
DN
10422 buffer = XALLOCAVEC (tree, nargs);
10423 for (i = 0; i < n; i++)
10424 buffer[i] = va_arg (newargs, tree);
10425 for (j = skip; j < oldnargs; j++, i++)
10426 buffer[i] = args[j];
10427 }
10428 else
10429 buffer = args + skip;
3bf5906b 10430
862d0b35
DN
10431 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10432}
5039610b 10433
0889e9bc
JJ
10434/* Return true if FNDECL shouldn't be folded right now.
10435 If a built-in function has an inline attribute always_inline
10436 wrapper, defer folding it after always_inline functions have
10437 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10438 might not be performed. */
10439
e7f9dae0 10440bool
0889e9bc
JJ
10441avoid_folding_inline_builtin (tree fndecl)
10442{
10443 return (DECL_DECLARED_INLINE_P (fndecl)
10444 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10445 && cfun
10446 && !cfun->always_inline_functions_inlined
10447 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10448}
10449
6de9cd9a 10450/* A wrapper function for builtin folding that prevents warnings for
caf93cb0 10451 "statement without effect" and the like, caused by removing the
6de9cd9a
DN
10452 call node earlier than the warning is generated. */
10453
10454tree
db3927fb 10455fold_call_expr (location_t loc, tree exp, bool ignore)
6de9cd9a 10456{
5039610b
SL
10457 tree ret = NULL_TREE;
10458 tree fndecl = get_callee_fndecl (exp);
3d78e008 10459 if (fndecl && fndecl_built_in_p (fndecl)
6ef5231b
JJ
10460 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10461 yet. Defer folding until we see all the arguments
10462 (after inlining). */
10463 && !CALL_EXPR_VA_ARG_PACK (exp))
10464 {
10465 int nargs = call_expr_nargs (exp);
10466
10467 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10468 instead last argument is __builtin_va_arg_pack (). Defer folding
10469 even in that case, until arguments are finalized. */
10470 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10471 {
10472 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
3d78e008 10473 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
6ef5231b
JJ
10474 return NULL_TREE;
10475 }
10476
0889e9bc
JJ
10477 if (avoid_folding_inline_builtin (fndecl))
10478 return NULL_TREE;
10479
5039610b 10480 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
f311c3b4
NF
10481 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10482 CALL_EXPR_ARGP (exp), ignore);
5039610b
SL
10483 else
10484 {
a6a0570f 10485 tree *args = CALL_EXPR_ARGP (exp);
b5338fb3 10486 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
5039610b 10487 if (ret)
db3927fb 10488 return ret;
5039610b 10489 }
6de9cd9a 10490 }
5039610b
SL
10491 return NULL_TREE;
10492}
b8698a0f 10493
a6a0570f
RB
10494/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10495 N arguments are passed in the array ARGARRAY. Return a folded
10496 expression or NULL_TREE if no simplification was possible. */
4977bab6
ZW
10497
10498tree
a6a0570f 10499fold_builtin_call_array (location_t loc, tree,
94a0dd7b
SL
10500 tree fn,
10501 int n,
10502 tree *argarray)
6385a28f 10503{
a6a0570f
RB
10504 if (TREE_CODE (fn) != ADDR_EXPR)
10505 return NULL_TREE;
5039610b 10506
a6a0570f
RB
10507 tree fndecl = TREE_OPERAND (fn, 0);
10508 if (TREE_CODE (fndecl) == FUNCTION_DECL
3d78e008 10509 && fndecl_built_in_p (fndecl))
a6a0570f
RB
10510 {
10511 /* If last argument is __builtin_va_arg_pack (), arguments to this
10512 function are not finalized yet. Defer folding until they are. */
10513 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10514 {
10515 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
3d78e008 10516 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
a6a0570f
RB
10517 return NULL_TREE;
10518 }
10519 if (avoid_folding_inline_builtin (fndecl))
10520 return NULL_TREE;
10521 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10522 return targetm.fold_builtin (fndecl, n, argarray, false);
10523 else
b5338fb3 10524 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
a6a0570f 10525 }
5039610b 10526
a6a0570f 10527 return NULL_TREE;
5039610b
SL
10528}
10529
43ea30dc
NF
10530/* Construct a new CALL_EXPR using the tail of the argument list of EXP
10531 along with N new arguments specified as the "..." parameters. SKIP
10532 is the number of arguments in EXP to be omitted. This function is used
10533 to do varargs-to-varargs transformations. */
10534
10535static tree
10536rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10537{
10538 va_list ap;
10539 tree t;
10540
10541 va_start (ap, n);
10542 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10543 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10544 va_end (ap);
5039610b 10545
43ea30dc 10546 return t;
5039610b
SL
10547}
10548
10549/* Validate a single argument ARG against a tree code CODE representing
0dba7960 10550 a type. Return true when argument is valid. */
b8698a0f 10551
5039610b 10552static bool
0dba7960 10553validate_arg (const_tree arg, enum tree_code code)
5039610b
SL
10554{
10555 if (!arg)
10556 return false;
10557 else if (code == POINTER_TYPE)
0dba7960 10558 return POINTER_TYPE_P (TREE_TYPE (arg));
4cd8e76f
RG
10559 else if (code == INTEGER_TYPE)
10560 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
5039610b 10561 return code == TREE_CODE (TREE_TYPE (arg));
6385a28f 10562}
019fa094 10563
726a989a
RB
10564/* This function validates the types of a function call argument list
10565 against a specified list of tree_codes. If the last specifier is a 0,
10566 that represents an ellipses, otherwise the last specifier must be a
10567 VOID_TYPE.
10568
10569 This is the GIMPLE version of validate_arglist. Eventually we want to
10570 completely convert builtins.c to work from GIMPLEs and the tree based
10571 validate_arglist will then be removed. */
10572
10573bool
538dd0b7 10574validate_gimple_arglist (const gcall *call, ...)
726a989a
RB
10575{
10576 enum tree_code code;
10577 bool res = 0;
10578 va_list ap;
10579 const_tree arg;
10580 size_t i;
10581
10582 va_start (ap, call);
10583 i = 0;
10584
10585 do
10586 {
72b5577d 10587 code = (enum tree_code) va_arg (ap, int);
726a989a
RB
10588 switch (code)
10589 {
10590 case 0:
10591 /* This signifies an ellipses, any further arguments are all ok. */
10592 res = true;
10593 goto end;
10594 case VOID_TYPE:
10595 /* This signifies an endlink, if no arguments remain, return
10596 true, otherwise return false. */
10597 res = (i == gimple_call_num_args (call));
10598 goto end;
10599 default:
10600 /* If no parameters remain or the parameter's code does not
10601 match the specified code, return false. Otherwise continue
10602 checking any remaining arguments. */
10603 arg = gimple_call_arg (call, i++);
10604 if (!validate_arg (arg, code))
10605 goto end;
10606 break;
10607 }
10608 }
10609 while (1);
10610
10611 /* We need gotos here since we can only have one VA_CLOSE in a
10612 function. */
10613 end: ;
10614 va_end (ap);
10615
10616 return res;
10617}
10618
f6155fda
SS
10619/* Default target-specific builtin expander that does nothing. */
10620
10621rtx
4682ae04
AJ
10622default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10623 rtx target ATTRIBUTE_UNUSED,
10624 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 10625 machine_mode mode ATTRIBUTE_UNUSED,
4682ae04 10626 int ignore ATTRIBUTE_UNUSED)
f6155fda
SS
10627{
10628 return NULL_RTX;
10629}
34ee7f82 10630
7dc61d6c
KG
10631/* Returns true is EXP represents data that would potentially reside
10632 in a readonly section. */
10633
fef5a0d9 10634bool
7dc61d6c
KG
10635readonly_data_expr (tree exp)
10636{
10637 STRIP_NOPS (exp);
10638
aef0afc4
UW
10639 if (TREE_CODE (exp) != ADDR_EXPR)
10640 return false;
10641
10642 exp = get_base_address (TREE_OPERAND (exp, 0));
10643 if (!exp)
10644 return false;
10645
10646 /* Make sure we call decl_readonly_section only for trees it
10647 can handle (since it returns true for everything it doesn't
10648 understand). */
caf93cb0 10649 if (TREE_CODE (exp) == STRING_CST
aef0afc4 10650 || TREE_CODE (exp) == CONSTRUCTOR
8813a647 10651 || (VAR_P (exp) && TREE_STATIC (exp)))
aef0afc4 10652 return decl_readonly_section (exp, 0);
7dc61d6c
KG
10653 else
10654 return false;
10655}
6de9cd9a 10656
5039610b
SL
10657/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10658 to the call, and TYPE is its return type.
6de9cd9a 10659
5039610b 10660 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10661 simplified form of the call as a tree.
10662
10663 The simplified form may be a constant or other expression which
10664 computes the same value, but in a more efficient manner (including
10665 calls to other builtin functions).
10666
10667 The call may contain arguments which need to be evaluated, but
10668 which are not useful to determine the result of the call. In
10669 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10670 COMPOUND_EXPR will be an argument which must be evaluated.
10671 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10672 COMPOUND_EXPR in the chain will contain the tree for the simplified
10673 form of the builtin function call. */
10674
10675static tree
b5338fb3 10676fold_builtin_strpbrk (location_t loc, tree expr, tree s1, tree s2, tree type)
6de9cd9a 10677{
5039610b
SL
10678 if (!validate_arg (s1, POINTER_TYPE)
10679 || !validate_arg (s2, POINTER_TYPE))
10680 return NULL_TREE;
6de9cd9a 10681
b5338fb3
MS
10682 if (!check_nul_terminated_array (expr, s1)
10683 || !check_nul_terminated_array (expr, s2))
10684 return NULL_TREE;
6de9cd9a 10685
b5338fb3
MS
10686 tree fn;
10687 const char *p1, *p2;
6de9cd9a 10688
b5338fb3
MS
10689 p2 = c_getstr (s2);
10690 if (p2 == NULL)
10691 return NULL_TREE;
6de9cd9a 10692
b5338fb3
MS
10693 p1 = c_getstr (s1);
10694 if (p1 != NULL)
10695 {
10696 const char *r = strpbrk (p1, p2);
10697 tree tem;
6de9cd9a 10698
b5338fb3
MS
10699 if (r == NULL)
10700 return build_int_cst (TREE_TYPE (s1), 0);
6de9cd9a 10701
b5338fb3
MS
10702 /* Return an offset into the constant string argument. */
10703 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10704 return fold_convert_loc (loc, type, tem);
10705 }
6de9cd9a 10706
b5338fb3
MS
10707 if (p2[0] == '\0')
10708 /* strpbrk(x, "") == NULL.
10709 Evaluate and ignore s1 in case it had side-effects. */
10710 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
6de9cd9a 10711
b5338fb3
MS
10712 if (p2[1] != '\0')
10713 return NULL_TREE; /* Really call strpbrk. */
10714
10715 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10716 if (!fn)
10717 return NULL_TREE;
10718
10719 /* New argument list transforming strpbrk(s1, s2) to
10720 strchr(s1, s2[0]). */
10721 return build_call_expr_loc (loc, fn, 2, s1,
10722 build_int_cst (integer_type_node, p2[0]));
6de9cd9a
DN
10723}
10724
5039610b
SL
10725/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10726 to the call.
6de9cd9a 10727
5039610b 10728 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10729 simplified form of the call as a tree.
10730
10731 The simplified form may be a constant or other expression which
10732 computes the same value, but in a more efficient manner (including
10733 calls to other builtin functions).
10734
10735 The call may contain arguments which need to be evaluated, but
10736 which are not useful to determine the result of the call. In
10737 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10738 COMPOUND_EXPR will be an argument which must be evaluated.
10739 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10740 COMPOUND_EXPR in the chain will contain the tree for the simplified
10741 form of the builtin function call. */
10742
10743static tree
b5338fb3 10744fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
6de9cd9a 10745{
5039610b
SL
10746 if (!validate_arg (s1, POINTER_TYPE)
10747 || !validate_arg (s2, POINTER_TYPE))
10748 return NULL_TREE;
6de9cd9a 10749
b5338fb3
MS
10750 if (!check_nul_terminated_array (expr, s1)
10751 || !check_nul_terminated_array (expr, s2))
10752 return NULL_TREE;
10753
10754 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10755
10756 /* If either argument is "", return NULL_TREE. */
10757 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10758 /* Evaluate and ignore both arguments in case either one has
10759 side-effects. */
10760 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
08039bd8 10761 s1, s2);
b5338fb3 10762 return NULL_TREE;
6de9cd9a
DN
10763}
10764
5039610b
SL
10765/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10766 to the call.
6de9cd9a 10767
5039610b 10768 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10769 simplified form of the call as a tree.
10770
10771 The simplified form may be a constant or other expression which
10772 computes the same value, but in a more efficient manner (including
10773 calls to other builtin functions).
10774
10775 The call may contain arguments which need to be evaluated, but
10776 which are not useful to determine the result of the call. In
10777 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10778 COMPOUND_EXPR will be an argument which must be evaluated.
10779 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10780 COMPOUND_EXPR in the chain will contain the tree for the simplified
10781 form of the builtin function call. */
10782
10783static tree
b5338fb3 10784fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
6de9cd9a 10785{
5039610b
SL
10786 if (!validate_arg (s1, POINTER_TYPE)
10787 || !validate_arg (s2, POINTER_TYPE))
10788 return NULL_TREE;
b5338fb3
MS
10789
10790 if (!check_nul_terminated_array (expr, s1)
10791 || !check_nul_terminated_array (expr, s2))
10792 return NULL_TREE;
10793
10794 /* If the first argument is "", return NULL_TREE. */
10795 const char *p1 = c_getstr (s1);
10796 if (p1 && *p1 == '\0')
6de9cd9a 10797 {
b5338fb3
MS
10798 /* Evaluate and ignore argument s2 in case it has
10799 side-effects. */
10800 return omit_one_operand_loc (loc, size_type_node,
002bd9f0 10801 size_zero_node, s2);
b5338fb3 10802 }
6de9cd9a 10803
b5338fb3
MS
10804 /* If the second argument is "", return __builtin_strlen(s1). */
10805 const char *p2 = c_getstr (s2);
10806 if (p2 && *p2 == '\0')
10807 {
10808 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
6de9cd9a 10809
b5338fb3
MS
10810 /* If the replacement _DECL isn't initialized, don't do the
10811 transformation. */
10812 if (!fn)
10813 return NULL_TREE;
6de9cd9a 10814
b5338fb3 10815 return build_call_expr_loc (loc, fn, 1, s1);
6de9cd9a 10816 }
b5338fb3 10817 return NULL_TREE;
6de9cd9a
DN
10818}
10819
5039610b 10820/* Fold the next_arg or va_start call EXP. Returns true if there was an error
2efcfa4e
AP
10821 produced. False otherwise. This is done so that we don't output the error
10822 or warning twice or three times. */
726a989a 10823
2efcfa4e 10824bool
5039610b 10825fold_builtin_next_arg (tree exp, bool va_start_p)
6de9cd9a
DN
10826{
10827 tree fntype = TREE_TYPE (current_function_decl);
5039610b
SL
10828 int nargs = call_expr_nargs (exp);
10829 tree arg;
34c88790
DS
10830 /* There is good chance the current input_location points inside the
10831 definition of the va_start macro (perhaps on the token for
10832 builtin) in a system header, so warnings will not be emitted.
10833 Use the location in real source code. */
620e594b 10834 location_t current_location =
34c88790
DS
10835 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10836 NULL);
6de9cd9a 10837
f38958e8 10838 if (!stdarg_p (fntype))
2efcfa4e 10839 {
a9c697b8 10840 error ("%<va_start%> used in function with fixed arguments");
2efcfa4e
AP
10841 return true;
10842 }
5039610b
SL
10843
10844 if (va_start_p)
8870e212 10845 {
5039610b
SL
10846 if (va_start_p && (nargs != 2))
10847 {
10848 error ("wrong number of arguments to function %<va_start%>");
10849 return true;
10850 }
10851 arg = CALL_EXPR_ARG (exp, 1);
8870e212
JJ
10852 }
10853 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10854 when we checked the arguments and if needed issued a warning. */
5039610b 10855 else
6de9cd9a 10856 {
5039610b
SL
10857 if (nargs == 0)
10858 {
10859 /* Evidently an out of date version of <stdarg.h>; can't validate
10860 va_start's second argument, but can still work as intended. */
34c88790 10861 warning_at (current_location,
b9c8da34
DS
10862 OPT_Wvarargs,
10863 "%<__builtin_next_arg%> called without an argument");
5039610b
SL
10864 return true;
10865 }
10866 else if (nargs > 1)
c22cacf3 10867 {
5039610b 10868 error ("wrong number of arguments to function %<__builtin_next_arg%>");
c22cacf3
MS
10869 return true;
10870 }
5039610b
SL
10871 arg = CALL_EXPR_ARG (exp, 0);
10872 }
10873
4e3825db
MM
10874 if (TREE_CODE (arg) == SSA_NAME)
10875 arg = SSA_NAME_VAR (arg);
10876
5039610b 10877 /* We destructively modify the call to be __builtin_va_start (ap, 0)
b8698a0f 10878 or __builtin_next_arg (0) the first time we see it, after checking
5039610b
SL
10879 the arguments and if needed issuing a warning. */
10880 if (!integer_zerop (arg))
10881 {
10882 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8870e212 10883
6de9cd9a
DN
10884 /* Strip off all nops for the sake of the comparison. This
10885 is not quite the same as STRIP_NOPS. It does more.
10886 We must also strip off INDIRECT_EXPR for C++ reference
10887 parameters. */
1043771b 10888 while (CONVERT_EXPR_P (arg)
6de9cd9a
DN
10889 || TREE_CODE (arg) == INDIRECT_REF)
10890 arg = TREE_OPERAND (arg, 0);
10891 if (arg != last_parm)
c22cacf3 10892 {
118f3b19
KH
10893 /* FIXME: Sometimes with the tree optimizers we can get the
10894 not the last argument even though the user used the last
10895 argument. We just warn and set the arg to be the last
10896 argument so that we will get wrong-code because of
10897 it. */
34c88790 10898 warning_at (current_location,
b9c8da34 10899 OPT_Wvarargs,
34c88790 10900 "second parameter of %<va_start%> not last named argument");
2efcfa4e 10901 }
2985f531
MLI
10902
10903 /* Undefined by C99 7.15.1.4p4 (va_start):
10904 "If the parameter parmN is declared with the register storage
10905 class, with a function or array type, or with a type that is
10906 not compatible with the type that results after application of
10907 the default argument promotions, the behavior is undefined."
10908 */
10909 else if (DECL_REGISTER (arg))
34c88790
DS
10910 {
10911 warning_at (current_location,
b9c8da34 10912 OPT_Wvarargs,
9c582551 10913 "undefined behavior when second parameter of "
34c88790
DS
10914 "%<va_start%> is declared with %<register%> storage");
10915 }
2985f531 10916
8870e212 10917 /* We want to verify the second parameter just once before the tree
c22cacf3
MS
10918 optimizers are run and then avoid keeping it in the tree,
10919 as otherwise we could warn even for correct code like:
10920 void foo (int i, ...)
10921 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
5039610b
SL
10922 if (va_start_p)
10923 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10924 else
10925 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
2efcfa4e
AP
10926 }
10927 return false;
6de9cd9a
DN
10928}
10929
10930
5039610b 10931/* Expand a call EXP to __builtin_object_size. */
10a0d495 10932
9b2b7279 10933static rtx
10a0d495
JJ
10934expand_builtin_object_size (tree exp)
10935{
10936 tree ost;
10937 int object_size_type;
10938 tree fndecl = get_callee_fndecl (exp);
10a0d495 10939
5039610b 10940 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10a0d495 10941 {
0f2c4a8f 10942 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
c94ed7a1 10943 exp, fndecl);
10a0d495
JJ
10944 expand_builtin_trap ();
10945 return const0_rtx;
10946 }
10947
5039610b 10948 ost = CALL_EXPR_ARG (exp, 1);
10a0d495
JJ
10949 STRIP_NOPS (ost);
10950
10951 if (TREE_CODE (ost) != INTEGER_CST
10952 || tree_int_cst_sgn (ost) < 0
10953 || compare_tree_int (ost, 3) > 0)
10954 {
0f2c4a8f 10955 error ("%Klast argument of %qD is not integer constant between 0 and 3",
c94ed7a1 10956 exp, fndecl);
10a0d495
JJ
10957 expand_builtin_trap ();
10958 return const0_rtx;
10959 }
10960
9439e9a1 10961 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
10962
10963 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10964}
10965
10966/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10967 FCODE is the BUILT_IN_* to use.
5039610b 10968 Return NULL_RTX if we failed; the caller should emit a normal call,
10a0d495
JJ
10969 otherwise try to get the result in TARGET, if convenient (and in
10970 mode MODE if that's convenient). */
10971
10972static rtx
ef4bddc2 10973expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10a0d495
JJ
10974 enum built_in_function fcode)
10975{
5039610b 10976 if (!validate_arglist (exp,
10a0d495
JJ
10977 POINTER_TYPE,
10978 fcode == BUILT_IN_MEMSET_CHK
10979 ? INTEGER_TYPE : POINTER_TYPE,
10980 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5039610b 10981 return NULL_RTX;
10a0d495 10982
cc8bea0a
MS
10983 tree dest = CALL_EXPR_ARG (exp, 0);
10984 tree src = CALL_EXPR_ARG (exp, 1);
10985 tree len = CALL_EXPR_ARG (exp, 2);
10986 tree size = CALL_EXPR_ARG (exp, 3);
10a0d495 10987
cc8bea0a
MS
10988 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10989 /*str=*/NULL_TREE, size);
ee92e7ba
MS
10990
10991 if (!tree_fits_uhwi_p (size))
5039610b 10992 return NULL_RTX;
10a0d495 10993
cc269bb6 10994 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10a0d495 10995 {
ee92e7ba
MS
10996 /* Avoid transforming the checking call to an ordinary one when
10997 an overflow has been detected or when the call couldn't be
10998 validated because the size is not constant. */
10999 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
11000 return NULL_RTX;
10a0d495 11001
ee92e7ba 11002 tree fn = NULL_TREE;
10a0d495
JJ
11003 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11004 mem{cpy,pcpy,move,set} is available. */
11005 switch (fcode)
11006 {
11007 case BUILT_IN_MEMCPY_CHK:
e79983f4 11008 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10a0d495
JJ
11009 break;
11010 case BUILT_IN_MEMPCPY_CHK:
e79983f4 11011 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10a0d495
JJ
11012 break;
11013 case BUILT_IN_MEMMOVE_CHK:
e79983f4 11014 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10a0d495
JJ
11015 break;
11016 case BUILT_IN_MEMSET_CHK:
e79983f4 11017 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10a0d495
JJ
11018 break;
11019 default:
11020 break;
11021 }
11022
11023 if (! fn)
5039610b 11024 return NULL_RTX;
10a0d495 11025
aa493694 11026 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
44e10129
MM
11027 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11028 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
11029 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11030 }
11031 else if (fcode == BUILT_IN_MEMSET_CHK)
5039610b 11032 return NULL_RTX;
10a0d495
JJ
11033 else
11034 {
0eb77834 11035 unsigned int dest_align = get_pointer_alignment (dest);
10a0d495
JJ
11036
11037 /* If DEST is not a pointer type, call the normal function. */
11038 if (dest_align == 0)
5039610b 11039 return NULL_RTX;
10a0d495
JJ
11040
11041 /* If SRC and DEST are the same (and not volatile), do nothing. */
11042 if (operand_equal_p (src, dest, 0))
11043 {
11044 tree expr;
11045
11046 if (fcode != BUILT_IN_MEMPCPY_CHK)
11047 {
11048 /* Evaluate and ignore LEN in case it has side-effects. */
11049 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11050 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11051 }
11052
5d49b6a7 11053 expr = fold_build_pointer_plus (dest, len);
10a0d495
JJ
11054 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11055 }
11056
11057 /* __memmove_chk special case. */
11058 if (fcode == BUILT_IN_MEMMOVE_CHK)
11059 {
0eb77834 11060 unsigned int src_align = get_pointer_alignment (src);
10a0d495
JJ
11061
11062 if (src_align == 0)
5039610b 11063 return NULL_RTX;
10a0d495
JJ
11064
11065 /* If src is categorized for a readonly section we can use
11066 normal __memcpy_chk. */
11067 if (readonly_data_expr (src))
11068 {
e79983f4 11069 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10a0d495 11070 if (!fn)
5039610b 11071 return NULL_RTX;
aa493694
JJ
11072 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11073 dest, src, len, size);
44e10129
MM
11074 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11075 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
11076 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11077 }
11078 }
5039610b 11079 return NULL_RTX;
10a0d495
JJ
11080 }
11081}
11082
11083/* Emit warning if a buffer overflow is detected at compile time. */
11084
11085static void
11086maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11087{
ee92e7ba
MS
11088 /* The source string. */
11089 tree srcstr = NULL_TREE;
11090 /* The size of the destination object. */
11091 tree objsize = NULL_TREE;
11092 /* The string that is being concatenated with (as in __strcat_chk)
11093 or null if it isn't. */
11094 tree catstr = NULL_TREE;
11095 /* The maximum length of the source sequence in a bounded operation
11096 (such as __strncat_chk) or null if the operation isn't bounded
11097 (such as __strcat_chk). */
cc8bea0a 11098 tree maxread = NULL_TREE;
9c1caf50
MS
11099 /* The exact size of the access (such as in __strncpy_chk). */
11100 tree size = NULL_TREE;
10a0d495
JJ
11101
11102 switch (fcode)
11103 {
11104 case BUILT_IN_STRCPY_CHK:
11105 case BUILT_IN_STPCPY_CHK:
ee92e7ba
MS
11106 srcstr = CALL_EXPR_ARG (exp, 1);
11107 objsize = CALL_EXPR_ARG (exp, 2);
11108 break;
11109
10a0d495 11110 case BUILT_IN_STRCAT_CHK:
ee92e7ba
MS
11111 /* For __strcat_chk the warning will be emitted only if overflowing
11112 by at least strlen (dest) + 1 bytes. */
11113 catstr = CALL_EXPR_ARG (exp, 0);
11114 srcstr = CALL_EXPR_ARG (exp, 1);
11115 objsize = CALL_EXPR_ARG (exp, 2);
10a0d495 11116 break;
ee92e7ba 11117
1c2fc017 11118 case BUILT_IN_STRNCAT_CHK:
ee92e7ba
MS
11119 catstr = CALL_EXPR_ARG (exp, 0);
11120 srcstr = CALL_EXPR_ARG (exp, 1);
cc8bea0a 11121 maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
11122 objsize = CALL_EXPR_ARG (exp, 3);
11123 break;
11124
10a0d495 11125 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 11126 case BUILT_IN_STPNCPY_CHK:
ee92e7ba 11127 srcstr = CALL_EXPR_ARG (exp, 1);
9c1caf50 11128 size = CALL_EXPR_ARG (exp, 2);
ee92e7ba 11129 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495 11130 break;
ee92e7ba 11131
10a0d495
JJ
11132 case BUILT_IN_SNPRINTF_CHK:
11133 case BUILT_IN_VSNPRINTF_CHK:
cc8bea0a 11134 maxread = CALL_EXPR_ARG (exp, 1);
ee92e7ba 11135 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495
JJ
11136 break;
11137 default:
11138 gcc_unreachable ();
11139 }
11140
cc8bea0a 11141 if (catstr && maxread)
10a0d495 11142 {
ee92e7ba
MS
11143 /* Check __strncat_chk. There is no way to determine the length
11144 of the string to which the source string is being appended so
11145 just warn when the length of the source string is not known. */
d9c5a8b9
MS
11146 check_strncat_sizes (exp, objsize);
11147 return;
10a0d495 11148 }
10a0d495 11149
cc8bea0a
MS
11150 /* The destination argument is the first one for all built-ins above. */
11151 tree dst = CALL_EXPR_ARG (exp, 0);
11152
11153 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10a0d495
JJ
11154}
11155
11156/* Emit warning if a buffer overflow is detected at compile time
11157 in __sprintf_chk/__vsprintf_chk calls. */
11158
11159static void
11160maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11161{
451409e4 11162 tree size, len, fmt;
10a0d495 11163 const char *fmt_str;
5039610b 11164 int nargs = call_expr_nargs (exp);
10a0d495
JJ
11165
11166 /* Verify the required arguments in the original call. */
b8698a0f 11167
5039610b 11168 if (nargs < 4)
10a0d495 11169 return;
5039610b
SL
11170 size = CALL_EXPR_ARG (exp, 2);
11171 fmt = CALL_EXPR_ARG (exp, 3);
10a0d495 11172
cc269bb6 11173 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10a0d495
JJ
11174 return;
11175
11176 /* Check whether the format is a literal string constant. */
11177 fmt_str = c_getstr (fmt);
11178 if (fmt_str == NULL)
11179 return;
11180
62e5bf5d 11181 if (!init_target_chars ())
000ba23d
KG
11182 return;
11183
10a0d495 11184 /* If the format doesn't contain % args or %%, we know its size. */
000ba23d 11185 if (strchr (fmt_str, target_percent) == 0)
10a0d495
JJ
11186 len = build_int_cstu (size_type_node, strlen (fmt_str));
11187 /* If the format is "%s" and first ... argument is a string literal,
11188 we know it too. */
5039610b
SL
11189 else if (fcode == BUILT_IN_SPRINTF_CHK
11190 && strcmp (fmt_str, target_percent_s) == 0)
10a0d495
JJ
11191 {
11192 tree arg;
11193
5039610b 11194 if (nargs < 5)
10a0d495 11195 return;
5039610b 11196 arg = CALL_EXPR_ARG (exp, 4);
10a0d495
JJ
11197 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11198 return;
11199
11200 len = c_strlen (arg, 1);
cc269bb6 11201 if (!len || ! tree_fits_uhwi_p (len))
10a0d495
JJ
11202 return;
11203 }
11204 else
11205 return;
11206
ee92e7ba
MS
11207 /* Add one for the terminating nul. */
11208 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
cc8bea0a
MS
11209
11210 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
11211 /*maxread=*/NULL_TREE, len, size);
10a0d495
JJ
11212}
11213
f9555f40
JJ
11214/* Emit warning if a free is called with address of a variable. */
11215
11216static void
11217maybe_emit_free_warning (tree exp)
11218{
9616781d
JJ
11219 if (call_expr_nargs (exp) != 1)
11220 return;
11221
f9555f40
JJ
11222 tree arg = CALL_EXPR_ARG (exp, 0);
11223
11224 STRIP_NOPS (arg);
11225 if (TREE_CODE (arg) != ADDR_EXPR)
11226 return;
11227
11228 arg = get_base_address (TREE_OPERAND (arg, 0));
70f34814 11229 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
f9555f40
JJ
11230 return;
11231
11232 if (SSA_VAR_P (arg))
a3a704a4
MH
11233 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11234 "%Kattempt to free a non-heap object %qD", exp, arg);
f9555f40 11235 else
a3a704a4
MH
11236 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11237 "%Kattempt to free a non-heap object", exp);
f9555f40
JJ
11238}
11239
5039610b
SL
11240/* Fold a call to __builtin_object_size with arguments PTR and OST,
11241 if possible. */
10a0d495 11242
9b2b7279 11243static tree
5039610b 11244fold_builtin_object_size (tree ptr, tree ost)
10a0d495 11245{
88e06841 11246 unsigned HOST_WIDE_INT bytes;
10a0d495
JJ
11247 int object_size_type;
11248
5039610b
SL
11249 if (!validate_arg (ptr, POINTER_TYPE)
11250 || !validate_arg (ost, INTEGER_TYPE))
11251 return NULL_TREE;
10a0d495 11252
10a0d495
JJ
11253 STRIP_NOPS (ost);
11254
11255 if (TREE_CODE (ost) != INTEGER_CST
11256 || tree_int_cst_sgn (ost) < 0
11257 || compare_tree_int (ost, 3) > 0)
5039610b 11258 return NULL_TREE;
10a0d495 11259
9439e9a1 11260 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
11261
11262 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11263 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11264 and (size_t) 0 for types 2 and 3. */
11265 if (TREE_SIDE_EFFECTS (ptr))
2ac7cbb5 11266 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10a0d495
JJ
11267
11268 if (TREE_CODE (ptr) == ADDR_EXPR)
88e06841 11269 {
05a64756 11270 compute_builtin_object_size (ptr, object_size_type, &bytes);
807e902e 11271 if (wi::fits_to_tree_p (bytes, size_type_node))
88e06841
AS
11272 return build_int_cstu (size_type_node, bytes);
11273 }
10a0d495
JJ
11274 else if (TREE_CODE (ptr) == SSA_NAME)
11275 {
10a0d495
JJ
11276 /* If object size is not known yet, delay folding until
11277 later. Maybe subsequent passes will help determining
11278 it. */
05a64756
MS
11279 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11280 && wi::fits_to_tree_p (bytes, size_type_node))
88e06841 11281 return build_int_cstu (size_type_node, bytes);
10a0d495
JJ
11282 }
11283
88e06841 11284 return NULL_TREE;
10a0d495
JJ
11285}
11286
903c723b
TC
11287/* Builtins with folding operations that operate on "..." arguments
11288 need special handling; we need to store the arguments in a convenient
11289 data structure before attempting any folding. Fortunately there are
11290 only a few builtins that fall into this category. FNDECL is the
11291 function, EXP is the CALL_EXPR for the call. */
11292
11293static tree
11294fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11295{
11296 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11297 tree ret = NULL_TREE;
11298
11299 switch (fcode)
11300 {
11301 case BUILT_IN_FPCLASSIFY:
11302 ret = fold_builtin_fpclassify (loc, args, nargs);
11303 break;
11304
11305 default:
11306 break;
11307 }
11308 if (ret)
11309 {
11310 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11311 SET_EXPR_LOCATION (ret, loc);
11312 TREE_NO_WARNING (ret) = 1;
11313 return ret;
11314 }
11315 return NULL_TREE;
11316}
11317
000ba23d
KG
11318/* Initialize format string characters in the target charset. */
11319
fef5a0d9 11320bool
000ba23d
KG
11321init_target_chars (void)
11322{
11323 static bool init;
11324 if (!init)
11325 {
11326 target_newline = lang_hooks.to_target_charset ('\n');
11327 target_percent = lang_hooks.to_target_charset ('%');
11328 target_c = lang_hooks.to_target_charset ('c');
11329 target_s = lang_hooks.to_target_charset ('s');
11330 if (target_newline == 0 || target_percent == 0 || target_c == 0
11331 || target_s == 0)
11332 return false;
11333
11334 target_percent_c[0] = target_percent;
11335 target_percent_c[1] = target_c;
11336 target_percent_c[2] = '\0';
11337
11338 target_percent_s[0] = target_percent;
11339 target_percent_s[1] = target_s;
11340 target_percent_s[2] = '\0';
11341
11342 target_percent_s_newline[0] = target_percent;
11343 target_percent_s_newline[1] = target_s;
11344 target_percent_s_newline[2] = target_newline;
11345 target_percent_s_newline[3] = '\0';
c22cacf3 11346
000ba23d
KG
11347 init = true;
11348 }
11349 return true;
11350}
1f3f1f68 11351
4413d881
KG
11352/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11353 and no overflow/underflow occurred. INEXACT is true if M was not
2f8e468b 11354 exactly calculated. TYPE is the tree type for the result. This
4413d881
KG
11355 function assumes that you cleared the MPFR flags and then
11356 calculated M to see if anything subsequently set a flag prior to
11357 entering this function. Return NULL_TREE if any checks fail. */
11358
11359static tree
62e5bf5d 11360do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
4413d881
KG
11361{
11362 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11363 overflow/underflow occurred. If -frounding-math, proceed iff the
11364 result of calling FUNC was exact. */
62e5bf5d 11365 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
4413d881
KG
11366 && (!flag_rounding_math || !inexact))
11367 {
11368 REAL_VALUE_TYPE rr;
11369
90ca6847 11370 real_from_mpfr (&rr, m, type, MPFR_RNDN);
4413d881
KG
11371 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11372 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11373 but the mpft_t is not, then we underflowed in the
11374 conversion. */
4c8c70e0 11375 if (real_isfinite (&rr)
4413d881
KG
11376 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11377 {
11378 REAL_VALUE_TYPE rmode;
11379
11380 real_convert (&rmode, TYPE_MODE (type), &rr);
11381 /* Proceed iff the specified mode can hold the value. */
11382 if (real_identical (&rmode, &rr))
11383 return build_real (type, rmode);
11384 }
11385 }
11386 return NULL_TREE;
11387}
11388
c128599a
KG
11389/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11390 number and no overflow/underflow occurred. INEXACT is true if M
11391 was not exactly calculated. TYPE is the tree type for the result.
11392 This function assumes that you cleared the MPFR flags and then
11393 calculated M to see if anything subsequently set a flag prior to
ca75b926
KG
11394 entering this function. Return NULL_TREE if any checks fail, if
11395 FORCE_CONVERT is true, then bypass the checks. */
c128599a
KG
11396
11397static tree
ca75b926 11398do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
c128599a
KG
11399{
11400 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11401 overflow/underflow occurred. If -frounding-math, proceed iff the
11402 result of calling FUNC was exact. */
ca75b926
KG
11403 if (force_convert
11404 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11405 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11406 && (!flag_rounding_math || !inexact)))
c128599a
KG
11407 {
11408 REAL_VALUE_TYPE re, im;
11409
90ca6847
TB
11410 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11411 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
c128599a
KG
11412 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11413 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11414 but the mpft_t is not, then we underflowed in the
11415 conversion. */
ca75b926
KG
11416 if (force_convert
11417 || (real_isfinite (&re) && real_isfinite (&im)
11418 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11419 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
c128599a
KG
11420 {
11421 REAL_VALUE_TYPE re_mode, im_mode;
11422
11423 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11424 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11425 /* Proceed iff the specified mode can hold the value. */
ca75b926
KG
11426 if (force_convert
11427 || (real_identical (&re_mode, &re)
11428 && real_identical (&im_mode, &im)))
c128599a
KG
11429 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11430 build_real (TREE_TYPE (type), im_mode));
11431 }
11432 }
11433 return NULL_TREE;
11434}
c128599a 11435
ea91f957
KG
11436/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11437 the pointer *(ARG_QUO) and return the result. The type is taken
11438 from the type of ARG0 and is used for setting the precision of the
11439 calculation and results. */
11440
11441static tree
11442do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11443{
11444 tree const type = TREE_TYPE (arg0);
11445 tree result = NULL_TREE;
b8698a0f 11446
ea91f957
KG
11447 STRIP_NOPS (arg0);
11448 STRIP_NOPS (arg1);
b8698a0f 11449
ea91f957
KG
11450 /* To proceed, MPFR must exactly represent the target floating point
11451 format, which only happens when the target base equals two. */
11452 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11453 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11454 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11455 {
11456 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11457 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11458
4c8c70e0 11459 if (real_isfinite (ra0) && real_isfinite (ra1))
ea91f957 11460 {
3e479de3
UW
11461 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11462 const int prec = fmt->p;
90ca6847 11463 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
ea91f957
KG
11464 tree result_rem;
11465 long integer_quo;
11466 mpfr_t m0, m1;
11467
11468 mpfr_inits2 (prec, m0, m1, NULL);
90ca6847
TB
11469 mpfr_from_real (m0, ra0, MPFR_RNDN);
11470 mpfr_from_real (m1, ra1, MPFR_RNDN);
ea91f957 11471 mpfr_clear_flags ();
3e479de3 11472 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
ea91f957
KG
11473 /* Remquo is independent of the rounding mode, so pass
11474 inexact=0 to do_mpfr_ckconv(). */
11475 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11476 mpfr_clears (m0, m1, NULL);
11477 if (result_rem)
11478 {
11479 /* MPFR calculates quo in the host's long so it may
11480 return more bits in quo than the target int can hold
11481 if sizeof(host long) > sizeof(target int). This can
11482 happen even for native compilers in LP64 mode. In
11483 these cases, modulo the quo value with the largest
11484 number that the target int can hold while leaving one
11485 bit for the sign. */
11486 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11487 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11488
11489 /* Dereference the quo pointer argument. */
11490 arg_quo = build_fold_indirect_ref (arg_quo);
11491 /* Proceed iff a valid pointer type was passed in. */
11492 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11493 {
11494 /* Set the value. */
45a2c477
RG
11495 tree result_quo
11496 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11497 build_int_cst (TREE_TYPE (arg_quo),
11498 integer_quo));
ea91f957
KG
11499 TREE_SIDE_EFFECTS (result_quo) = 1;
11500 /* Combine the quo assignment with the rem. */
11501 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11502 result_quo, result_rem));
11503 }
11504 }
11505 }
11506 }
11507 return result;
11508}
752b7d38
KG
11509
11510/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11511 resulting value as a tree with type TYPE. The mpfr precision is
11512 set to the precision of TYPE. We assume that this mpfr function
11513 returns zero if the result could be calculated exactly within the
11514 requested precision. In addition, the integer pointer represented
11515 by ARG_SG will be dereferenced and set to the appropriate signgam
11516 (-1,1) value. */
11517
11518static tree
11519do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11520{
11521 tree result = NULL_TREE;
11522
11523 STRIP_NOPS (arg);
b8698a0f 11524
752b7d38
KG
11525 /* To proceed, MPFR must exactly represent the target floating point
11526 format, which only happens when the target base equals two. Also
11527 verify ARG is a constant and that ARG_SG is an int pointer. */
11528 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11529 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11530 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11531 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11532 {
11533 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11534
11535 /* In addition to NaN and Inf, the argument cannot be zero or a
11536 negative integer. */
4c8c70e0 11537 if (real_isfinite (ra)
752b7d38 11538 && ra->cl != rvc_zero
c3284718 11539 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
752b7d38 11540 {
3e479de3
UW
11541 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11542 const int prec = fmt->p;
90ca6847 11543 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
752b7d38
KG
11544 int inexact, sg;
11545 mpfr_t m;
11546 tree result_lg;
11547
11548 mpfr_init2 (m, prec);
90ca6847 11549 mpfr_from_real (m, ra, MPFR_RNDN);
752b7d38 11550 mpfr_clear_flags ();
3e479de3 11551 inexact = mpfr_lgamma (m, &sg, m, rnd);
752b7d38
KG
11552 result_lg = do_mpfr_ckconv (m, type, inexact);
11553 mpfr_clear (m);
11554 if (result_lg)
11555 {
11556 tree result_sg;
11557
11558 /* Dereference the arg_sg pointer argument. */
11559 arg_sg = build_fold_indirect_ref (arg_sg);
11560 /* Assign the signgam value into *arg_sg. */
11561 result_sg = fold_build2 (MODIFY_EXPR,
11562 TREE_TYPE (arg_sg), arg_sg,
45a2c477 11563 build_int_cst (TREE_TYPE (arg_sg), sg));
752b7d38
KG
11564 TREE_SIDE_EFFECTS (result_sg) = 1;
11565 /* Combine the signgam assignment with the lgamma result. */
11566 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11567 result_sg, result_lg));
11568 }
11569 }
11570 }
11571
11572 return result;
11573}
726a989a 11574
a41d064d
KG
11575/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11576 mpc function FUNC on it and return the resulting value as a tree
11577 with type TYPE. The mpfr precision is set to the precision of
11578 TYPE. We assume that function FUNC returns zero if the result
ca75b926
KG
11579 could be calculated exactly within the requested precision. If
11580 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11581 in the arguments and/or results. */
a41d064d 11582
2f440f6a 11583tree
ca75b926 11584do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
a41d064d
KG
11585 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11586{
11587 tree result = NULL_TREE;
b8698a0f 11588
a41d064d
KG
11589 STRIP_NOPS (arg0);
11590 STRIP_NOPS (arg1);
11591
11592 /* To proceed, MPFR must exactly represent the target floating point
11593 format, which only happens when the target base equals two. */
11594 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11595 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11596 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11597 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11598 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11599 {
11600 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11601 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11602 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11603 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11604
ca75b926
KG
11605 if (do_nonfinite
11606 || (real_isfinite (re0) && real_isfinite (im0)
11607 && real_isfinite (re1) && real_isfinite (im1)))
a41d064d
KG
11608 {
11609 const struct real_format *const fmt =
11610 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11611 const int prec = fmt->p;
90ca6847
TB
11612 const mpfr_rnd_t rnd = fmt->round_towards_zero
11613 ? MPFR_RNDZ : MPFR_RNDN;
a41d064d
KG
11614 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11615 int inexact;
11616 mpc_t m0, m1;
b8698a0f 11617
a41d064d
KG
11618 mpc_init2 (m0, prec);
11619 mpc_init2 (m1, prec);
c3284718
RS
11620 mpfr_from_real (mpc_realref (m0), re0, rnd);
11621 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11622 mpfr_from_real (mpc_realref (m1), re1, rnd);
11623 mpfr_from_real (mpc_imagref (m1), im1, rnd);
a41d064d
KG
11624 mpfr_clear_flags ();
11625 inexact = func (m0, m0, m1, crnd);
ca75b926 11626 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
a41d064d
KG
11627 mpc_clear (m0);
11628 mpc_clear (m1);
11629 }
11630 }
11631
11632 return result;
11633}
c128599a 11634
726a989a
RB
11635/* A wrapper function for builtin folding that prevents warnings for
11636 "statement without effect" and the like, caused by removing the
11637 call node earlier than the warning is generated. */
11638
11639tree
538dd0b7 11640fold_call_stmt (gcall *stmt, bool ignore)
726a989a
RB
11641{
11642 tree ret = NULL_TREE;
11643 tree fndecl = gimple_call_fndecl (stmt);
db3927fb 11644 location_t loc = gimple_location (stmt);
3d78e008 11645 if (fndecl && fndecl_built_in_p (fndecl)
726a989a
RB
11646 && !gimple_call_va_arg_pack_p (stmt))
11647 {
11648 int nargs = gimple_call_num_args (stmt);
8897c9ce
NF
11649 tree *args = (nargs > 0
11650 ? gimple_call_arg_ptr (stmt, 0)
11651 : &error_mark_node);
726a989a 11652
0889e9bc
JJ
11653 if (avoid_folding_inline_builtin (fndecl))
11654 return NULL_TREE;
726a989a
RB
11655 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11656 {
8897c9ce 11657 return targetm.fold_builtin (fndecl, nargs, args, ignore);
726a989a
RB
11658 }
11659 else
11660 {
b5338fb3 11661 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
726a989a
RB
11662 if (ret)
11663 {
11664 /* Propagate location information from original call to
11665 expansion of builtin. Otherwise things like
11666 maybe_emit_chk_warning, that operate on the expansion
11667 of a builtin, will use the wrong location information. */
11668 if (gimple_has_location (stmt))
11669 {
11670 tree realret = ret;
11671 if (TREE_CODE (ret) == NOP_EXPR)
11672 realret = TREE_OPERAND (ret, 0);
11673 if (CAN_HAVE_LOCATION_P (realret)
11674 && !EXPR_HAS_LOCATION (realret))
db3927fb 11675 SET_EXPR_LOCATION (realret, loc);
726a989a
RB
11676 return realret;
11677 }
11678 return ret;
11679 }
11680 }
11681 }
11682 return NULL_TREE;
11683}
d7f09764 11684
e79983f4 11685/* Look up the function in builtin_decl that corresponds to DECL
d7f09764
DN
11686 and set ASMSPEC as its user assembler name. DECL must be a
11687 function decl that declares a builtin. */
11688
11689void
11690set_builtin_user_assembler_name (tree decl, const char *asmspec)
11691{
3d78e008 11692 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
d7f09764
DN
11693 && asmspec != 0);
11694
ee516de9 11695 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
ce835863 11696 set_user_assembler_name (builtin, asmspec);
ee516de9
EB
11697
11698 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11699 && INT_TYPE_SIZE < BITS_PER_WORD)
d7f09764 11700 {
fffbab82 11701 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
ee516de9 11702 set_user_assembler_libfunc ("ffs", asmspec);
fffbab82 11703 set_optab_libfunc (ffs_optab, mode, "ffs");
d7f09764
DN
11704 }
11705}
bec922f0
SL
11706
11707/* Return true if DECL is a builtin that expands to a constant or similarly
11708 simple code. */
11709bool
11710is_simple_builtin (tree decl)
11711{
3d78e008 11712 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
bec922f0
SL
11713 switch (DECL_FUNCTION_CODE (decl))
11714 {
11715 /* Builtins that expand to constants. */
11716 case BUILT_IN_CONSTANT_P:
11717 case BUILT_IN_EXPECT:
11718 case BUILT_IN_OBJECT_SIZE:
11719 case BUILT_IN_UNREACHABLE:
11720 /* Simple register moves or loads from stack. */
45d439ac 11721 case BUILT_IN_ASSUME_ALIGNED:
bec922f0
SL
11722 case BUILT_IN_RETURN_ADDRESS:
11723 case BUILT_IN_EXTRACT_RETURN_ADDR:
11724 case BUILT_IN_FROB_RETURN_ADDR:
11725 case BUILT_IN_RETURN:
11726 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11727 case BUILT_IN_FRAME_ADDRESS:
11728 case BUILT_IN_VA_END:
11729 case BUILT_IN_STACK_SAVE:
11730 case BUILT_IN_STACK_RESTORE:
11731 /* Exception state returns or moves registers around. */
11732 case BUILT_IN_EH_FILTER:
11733 case BUILT_IN_EH_POINTER:
11734 case BUILT_IN_EH_COPY_VALUES:
11735 return true;
11736
11737 default:
11738 return false;
11739 }
11740
11741 return false;
11742}
11743
11744/* Return true if DECL is a builtin that is not expensive, i.e., they are
11745 most probably expanded inline into reasonably simple code. This is a
11746 superset of is_simple_builtin. */
11747bool
11748is_inexpensive_builtin (tree decl)
11749{
11750 if (!decl)
11751 return false;
11752 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11753 return true;
11754 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11755 switch (DECL_FUNCTION_CODE (decl))
11756 {
11757 case BUILT_IN_ABS:
9e878cf1 11758 CASE_BUILT_IN_ALLOCA:
ac868f29 11759 case BUILT_IN_BSWAP16:
bec922f0
SL
11760 case BUILT_IN_BSWAP32:
11761 case BUILT_IN_BSWAP64:
fe7ebef7 11762 case BUILT_IN_BSWAP128:
bec922f0
SL
11763 case BUILT_IN_CLZ:
11764 case BUILT_IN_CLZIMAX:
11765 case BUILT_IN_CLZL:
11766 case BUILT_IN_CLZLL:
11767 case BUILT_IN_CTZ:
11768 case BUILT_IN_CTZIMAX:
11769 case BUILT_IN_CTZL:
11770 case BUILT_IN_CTZLL:
11771 case BUILT_IN_FFS:
11772 case BUILT_IN_FFSIMAX:
11773 case BUILT_IN_FFSL:
11774 case BUILT_IN_FFSLL:
11775 case BUILT_IN_IMAXABS:
11776 case BUILT_IN_FINITE:
11777 case BUILT_IN_FINITEF:
11778 case BUILT_IN_FINITEL:
11779 case BUILT_IN_FINITED32:
11780 case BUILT_IN_FINITED64:
11781 case BUILT_IN_FINITED128:
11782 case BUILT_IN_FPCLASSIFY:
11783 case BUILT_IN_ISFINITE:
11784 case BUILT_IN_ISINF_SIGN:
11785 case BUILT_IN_ISINF:
11786 case BUILT_IN_ISINFF:
11787 case BUILT_IN_ISINFL:
11788 case BUILT_IN_ISINFD32:
11789 case BUILT_IN_ISINFD64:
11790 case BUILT_IN_ISINFD128:
11791 case BUILT_IN_ISNAN:
11792 case BUILT_IN_ISNANF:
11793 case BUILT_IN_ISNANL:
11794 case BUILT_IN_ISNAND32:
11795 case BUILT_IN_ISNAND64:
11796 case BUILT_IN_ISNAND128:
11797 case BUILT_IN_ISNORMAL:
11798 case BUILT_IN_ISGREATER:
11799 case BUILT_IN_ISGREATEREQUAL:
11800 case BUILT_IN_ISLESS:
11801 case BUILT_IN_ISLESSEQUAL:
11802 case BUILT_IN_ISLESSGREATER:
11803 case BUILT_IN_ISUNORDERED:
11804 case BUILT_IN_VA_ARG_PACK:
11805 case BUILT_IN_VA_ARG_PACK_LEN:
11806 case BUILT_IN_VA_COPY:
11807 case BUILT_IN_TRAP:
11808 case BUILT_IN_SAVEREGS:
11809 case BUILT_IN_POPCOUNTL:
11810 case BUILT_IN_POPCOUNTLL:
11811 case BUILT_IN_POPCOUNTIMAX:
11812 case BUILT_IN_POPCOUNT:
11813 case BUILT_IN_PARITYL:
11814 case BUILT_IN_PARITYLL:
11815 case BUILT_IN_PARITYIMAX:
11816 case BUILT_IN_PARITY:
11817 case BUILT_IN_LABS:
11818 case BUILT_IN_LLABS:
11819 case BUILT_IN_PREFETCH:
41dbbb37 11820 case BUILT_IN_ACC_ON_DEVICE:
bec922f0
SL
11821 return true;
11822
11823 default:
11824 return is_simple_builtin (decl);
11825 }
11826
11827 return false;
11828}
488c6247
ML
11829
11830/* Return true if T is a constant and the value cast to a target char
11831 can be represented by a host char.
11832 Store the casted char constant in *P if so. */
11833
11834bool
11835target_char_cst_p (tree t, char *p)
11836{
11837 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11838 return false;
11839
11840 *p = (char)tree_to_uhwi (t);
11841 return true;
11842}
5747e0c0
XHL
11843
11844/* Return true if the builtin DECL is implemented in a standard library.
11845 Otherwise returns false which doesn't guarantee it is not (thus the list of
11846 handled builtins below may be incomplete). */
11847
11848bool
11849builtin_with_linkage_p (tree decl)
11850{
11851 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11852 switch (DECL_FUNCTION_CODE (decl))
11853 {
11854 CASE_FLT_FN (BUILT_IN_ACOS):
11855 CASE_FLT_FN (BUILT_IN_ACOSH):
11856 CASE_FLT_FN (BUILT_IN_ASIN):
11857 CASE_FLT_FN (BUILT_IN_ASINH):
11858 CASE_FLT_FN (BUILT_IN_ATAN):
11859 CASE_FLT_FN (BUILT_IN_ATANH):
11860 CASE_FLT_FN (BUILT_IN_ATAN2):
11861 CASE_FLT_FN (BUILT_IN_CBRT):
11862 CASE_FLT_FN (BUILT_IN_CEIL):
11863 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11864 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11865 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11866 CASE_FLT_FN (BUILT_IN_COS):
11867 CASE_FLT_FN (BUILT_IN_COSH):
11868 CASE_FLT_FN (BUILT_IN_ERF):
11869 CASE_FLT_FN (BUILT_IN_ERFC):
11870 CASE_FLT_FN (BUILT_IN_EXP):
11871 CASE_FLT_FN (BUILT_IN_EXP2):
11872 CASE_FLT_FN (BUILT_IN_EXPM1):
11873 CASE_FLT_FN (BUILT_IN_FABS):
11874 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11875 CASE_FLT_FN (BUILT_IN_FDIM):
11876 CASE_FLT_FN (BUILT_IN_FLOOR):
11877 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11878 CASE_FLT_FN (BUILT_IN_FMA):
11879 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11880 CASE_FLT_FN (BUILT_IN_FMAX):
11881 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11882 CASE_FLT_FN (BUILT_IN_FMIN):
11883 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11884 CASE_FLT_FN (BUILT_IN_FMOD):
11885 CASE_FLT_FN (BUILT_IN_FREXP):
11886 CASE_FLT_FN (BUILT_IN_HYPOT):
11887 CASE_FLT_FN (BUILT_IN_ILOGB):
11888 CASE_FLT_FN (BUILT_IN_LDEXP):
11889 CASE_FLT_FN (BUILT_IN_LGAMMA):
11890 CASE_FLT_FN (BUILT_IN_LLRINT):
11891 CASE_FLT_FN (BUILT_IN_LLROUND):
11892 CASE_FLT_FN (BUILT_IN_LOG):
11893 CASE_FLT_FN (BUILT_IN_LOG10):
11894 CASE_FLT_FN (BUILT_IN_LOG1P):
11895 CASE_FLT_FN (BUILT_IN_LOG2):
11896 CASE_FLT_FN (BUILT_IN_LOGB):
11897 CASE_FLT_FN (BUILT_IN_LRINT):
11898 CASE_FLT_FN (BUILT_IN_LROUND):
11899 CASE_FLT_FN (BUILT_IN_MODF):
11900 CASE_FLT_FN (BUILT_IN_NAN):
11901 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11902 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11903 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11904 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11905 CASE_FLT_FN (BUILT_IN_POW):
11906 CASE_FLT_FN (BUILT_IN_REMAINDER):
11907 CASE_FLT_FN (BUILT_IN_REMQUO):
11908 CASE_FLT_FN (BUILT_IN_RINT):
11909 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11910 CASE_FLT_FN (BUILT_IN_ROUND):
11911 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11912 CASE_FLT_FN (BUILT_IN_SCALBLN):
11913 CASE_FLT_FN (BUILT_IN_SCALBN):
11914 CASE_FLT_FN (BUILT_IN_SIN):
11915 CASE_FLT_FN (BUILT_IN_SINH):
11916 CASE_FLT_FN (BUILT_IN_SINCOS):
11917 CASE_FLT_FN (BUILT_IN_SQRT):
11918 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11919 CASE_FLT_FN (BUILT_IN_TAN):
11920 CASE_FLT_FN (BUILT_IN_TANH):
11921 CASE_FLT_FN (BUILT_IN_TGAMMA):
11922 CASE_FLT_FN (BUILT_IN_TRUNC):
11923 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11924 return true;
11925 default:
11926 break;
11927 }
11928 return false;
11929}