]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
[openacc] Add __builtin_goacc_parlevel_{id,size}
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
28f4ec01 1/* Expand builtin functions.
85ec4feb 2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
28f4ec01 3
1322177d 4This file is part of GCC.
28f4ec01 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
28f4ec01 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
28f4ec01
BS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
28f4ec01 19
25ab3b0a
RB
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
28f4ec01
BS
24#include "config.h"
25#include "system.h"
4977bab6 26#include "coretypes.h"
c7131fb2 27#include "backend.h"
957060b5
AM
28#include "target.h"
29#include "rtl.h"
c7131fb2 30#include "tree.h"
e73cf9a2 31#include "memmodel.h"
c7131fb2 32#include "gimple.h"
957060b5
AM
33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
f90aa46c 36#include "tree-vrp.h"
957060b5
AM
37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
957060b5
AM
40#include "emit-rtl.h"
41#include "recog.h"
957060b5 42#include "diagnostic-core.h"
40e23961 43#include "alias.h"
40e23961 44#include "fold-const.h"
5c1a2e63 45#include "fold-const-call.h"
cc8bea0a 46#include "gimple-ssa-warn-restrict.h"
d8a2d370
DN
47#include "stor-layout.h"
48#include "calls.h"
49#include "varasm.h"
50#include "tree-object-size.h"
d49b6e1e 51#include "realmpfr.h"
60393bbc 52#include "cfgrtl.h"
28f4ec01 53#include "except.h"
36566b39
PK
54#include "dojump.h"
55#include "explow.h"
36566b39 56#include "stmt.h"
28f4ec01 57#include "expr.h"
e78d8e51 58#include "libfuncs.h"
28f4ec01
BS
59#include "output.h"
60#include "typeclass.h"
ab393bf1 61#include "langhooks.h"
079a182e 62#include "value-prof.h"
fa19795e 63#include "builtins.h"
314e6352
ML
64#include "stringpool.h"
65#include "attribs.h"
bdea98ca 66#include "asan.h"
d5e254e1
IE
67#include "tree-chkp.h"
68#include "rtl-chkp.h"
686ee971 69#include "internal-fn.h"
b03ff92e 70#include "case-cfn-macros.h"
44a845ca 71#include "gimple-fold.h"
ee92e7ba 72#include "intl.h"
7365279f 73#include "file-prefix-map.h" /* remap_macro_filename() */
1f62d637
TV
74#include "gomp-constants.h"
75#include "omp-general.h"
81f5094d 76
fa19795e
RS
77struct target_builtins default_target_builtins;
78#if SWITCHABLE_TARGET
79struct target_builtins *this_target_builtins = &default_target_builtins;
80#endif
81
9df2c88c 82/* Define the names of the builtin function types and codes. */
5e351e96 83const char *const built_in_class_names[BUILT_IN_LAST]
9df2c88c
RK
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85
c6a912da 86#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
7e7e470f 87const char * built_in_names[(int) END_BUILTINS] =
cb1072f4
KG
88{
89#include "builtins.def"
90};
9df2c88c 91
cbf5d0e7 92/* Setup an array of builtin_info_type, make sure each element decl is
3ff5f682 93 initialized to NULL_TREE. */
cbf5d0e7 94builtin_info_type builtin_info[(int)END_BUILTINS];
3ff5f682 95
4e7d7b3d
JJ
96/* Non-zero if __builtin_constant_p should be folded right away. */
97bool force_folding_builtin_constant_p;
98
095a2d76 99static rtx c_readstr (const char *, scalar_int_mode);
4682ae04 100static int target_char_cast (tree, char *);
435bb2a1 101static rtx get_memory_rtx (tree, tree);
4682ae04
AJ
102static int apply_args_size (void);
103static int apply_result_size (void);
4682ae04 104static rtx result_vector (int, rtx);
4682ae04
AJ
105static void expand_builtin_prefetch (tree);
106static rtx expand_builtin_apply_args (void);
107static rtx expand_builtin_apply_args_1 (void);
108static rtx expand_builtin_apply (rtx, rtx, rtx);
109static void expand_builtin_return (rtx);
110static enum type_class type_to_class (tree);
111static rtx expand_builtin_classify_type (tree);
6c7cf1f0 112static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
1b1562a5 113static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
4359dc2a 114static rtx expand_builtin_interclass_mathfn (tree, rtx);
403e54f0 115static rtx expand_builtin_sincos (tree);
4359dc2a 116static rtx expand_builtin_cexpi (tree, rtx);
1856c8dc
JH
117static rtx expand_builtin_int_roundingfn (tree, rtx);
118static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
8870e212 119static rtx expand_builtin_next_arg (void);
4682ae04
AJ
120static rtx expand_builtin_va_start (tree);
121static rtx expand_builtin_va_end (tree);
122static rtx expand_builtin_va_copy (tree);
44e10129 123static rtx expand_builtin_strcmp (tree, rtx);
ef4bddc2 124static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
095a2d76 125static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
d9c5a8b9 126static rtx expand_builtin_memchr (tree, rtx);
44e10129 127static rtx expand_builtin_memcpy (tree, rtx);
edcf72f3 128static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
671a00ee
ML
129static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
130 rtx target, tree exp, int endp);
e50d56a5 131static rtx expand_builtin_memmove (tree, rtx);
671a00ee
ML
132static rtx expand_builtin_mempcpy (tree, rtx);
133static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
134static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
ee92e7ba 135static rtx expand_builtin_strcat (tree, rtx);
44e10129
MM
136static rtx expand_builtin_strcpy (tree, rtx);
137static rtx expand_builtin_strcpy_args (tree, tree, rtx);
ef4bddc2 138static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
e50d56a5 139static rtx expand_builtin_stpncpy (tree, rtx);
ee92e7ba 140static rtx expand_builtin_strncat (tree, rtx);
44e10129 141static rtx expand_builtin_strncpy (tree, rtx);
095a2d76 142static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
ef4bddc2 143static rtx expand_builtin_memset (tree, rtx, machine_mode);
edcf72f3 144static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
ef4bddc2 145static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
4682ae04 146static rtx expand_builtin_bzero (tree);
ef4bddc2 147static rtx expand_builtin_strlen (tree, rtx, machine_mode);
b7e52782 148static rtx expand_builtin_alloca (tree);
ef4bddc2 149static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
4682ae04 150static rtx expand_builtin_frame_address (tree, tree);
db3927fb 151static tree stabilize_va_list_loc (location_t, tree, int);
4682ae04
AJ
152static rtx expand_builtin_expect (tree, rtx);
153static tree fold_builtin_constant_p (tree);
154static tree fold_builtin_classify_type (tree);
ab996409 155static tree fold_builtin_strlen (location_t, tree, tree);
db3927fb 156static tree fold_builtin_inf (location_t, tree, int);
db3927fb 157static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
0dba7960 158static bool validate_arg (const_tree, enum tree_code code);
4682ae04 159static rtx expand_builtin_fabs (tree, rtx, rtx);
ef79730c 160static rtx expand_builtin_signbit (tree, rtx);
db3927fb 161static tree fold_builtin_memcmp (location_t, tree, tree, tree);
db3927fb
AH
162static tree fold_builtin_isascii (location_t, tree);
163static tree fold_builtin_toascii (location_t, tree);
164static tree fold_builtin_isdigit (location_t, tree);
165static tree fold_builtin_fabs (location_t, tree, tree);
166static tree fold_builtin_abs (location_t, tree, tree);
167static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
a35da91f 168 enum tree_code);
2625bb5d
RB
169static tree fold_builtin_0 (location_t, tree);
170static tree fold_builtin_1 (location_t, tree, tree);
171static tree fold_builtin_2 (location_t, tree, tree, tree);
172static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
903c723b 173static tree fold_builtin_varargs (location_t, tree, tree*, int);
db3927fb
AH
174
175static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
db3927fb
AH
176static tree fold_builtin_strspn (location_t, tree, tree);
177static tree fold_builtin_strcspn (location_t, tree, tree);
6de9cd9a 178
10a0d495 179static rtx expand_builtin_object_size (tree);
ef4bddc2 180static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
10a0d495
JJ
181 enum built_in_function);
182static void maybe_emit_chk_warning (tree, enum built_in_function);
183static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
f9555f40 184static void maybe_emit_free_warning (tree);
5039610b 185static tree fold_builtin_object_size (tree, tree);
000ba23d 186
ad03a744 187unsigned HOST_WIDE_INT target_newline;
fef5a0d9 188unsigned HOST_WIDE_INT target_percent;
000ba23d
KG
189static unsigned HOST_WIDE_INT target_c;
190static unsigned HOST_WIDE_INT target_s;
edd7ae68 191char target_percent_c[3];
fef5a0d9 192char target_percent_s[3];
ad03a744 193char target_percent_s_newline[4];
ea91f957 194static tree do_mpfr_remquo (tree, tree, tree);
752b7d38 195static tree do_mpfr_lgamma_r (tree, tree, tree);
86951993 196static void expand_builtin_sync_synchronize (void);
10a0d495 197
d7f09764
DN
198/* Return true if NAME starts with __builtin_ or __sync_. */
199
0c1e7e42 200static bool
bbf7ce11 201is_builtin_name (const char *name)
48ae6c13 202{
48ae6c13
RH
203 if (strncmp (name, "__builtin_", 10) == 0)
204 return true;
205 if (strncmp (name, "__sync_", 7) == 0)
206 return true;
86951993
AM
207 if (strncmp (name, "__atomic_", 9) == 0)
208 return true;
48ae6c13
RH
209 return false;
210}
6de9cd9a 211
d7f09764
DN
212
213/* Return true if DECL is a function symbol representing a built-in. */
214
215bool
216is_builtin_fn (tree decl)
217{
218 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
219}
220
bbf7ce11
RAE
221/* Return true if NODE should be considered for inline expansion regardless
222 of the optimization level. This means whenever a function is invoked with
223 its "internal" name, which normally contains the prefix "__builtin". */
224
4cfe7a6c 225bool
bbf7ce11
RAE
226called_as_built_in (tree node)
227{
228 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
229 we want the name used to call the function, not the name it
230 will have. */
231 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
232 return is_builtin_name (name);
233}
234
644ffefd
MJ
235/* Compute values M and N such that M divides (address of EXP - N) and such
236 that N < M. If these numbers can be determined, store M in alignp and N in
237 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
238 *alignp and any bit-offset to *bitposp.
73f6eabc
RS
239
240 Note that the address (and thus the alignment) computed here is based
241 on the address to which a symbol resolves, whereas DECL_ALIGN is based
242 on the address at which an object is actually located. These two
243 addresses are not always the same. For example, on ARM targets,
244 the address &foo of a Thumb function foo() has the lowest bit set,
b0f4a35f 245 whereas foo() itself starts on an even address.
df96b059 246
b0f4a35f
RG
247 If ADDR_P is true we are taking the address of the memory reference EXP
248 and thus cannot rely on the access taking place. */
249
250static bool
251get_object_alignment_2 (tree exp, unsigned int *alignp,
252 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
df96b059 253{
7df9b6f1 254 poly_int64 bitsize, bitpos;
e80c2726 255 tree offset;
ef4bddc2 256 machine_mode mode;
ee45a32d 257 int unsignedp, reversep, volatilep;
eae76e53 258 unsigned int align = BITS_PER_UNIT;
644ffefd 259 bool known_alignment = false;
df96b059 260
e80c2726
RG
261 /* Get the innermost object and the constant (bitpos) and possibly
262 variable (offset) offset of the access. */
ee45a32d 263 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
25b75a48 264 &unsignedp, &reversep, &volatilep);
e80c2726
RG
265
266 /* Extract alignment information from the innermost object and
267 possibly adjust bitpos and offset. */
b0f4a35f 268 if (TREE_CODE (exp) == FUNCTION_DECL)
73f6eabc 269 {
b0f4a35f
RG
270 /* Function addresses can encode extra information besides their
271 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
272 allows the low bit to be used as a virtual bit, we know
273 that the address itself must be at least 2-byte aligned. */
274 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
275 align = 2 * BITS_PER_UNIT;
73f6eabc 276 }
b0f4a35f
RG
277 else if (TREE_CODE (exp) == LABEL_DECL)
278 ;
279 else if (TREE_CODE (exp) == CONST_DECL)
e80c2726 280 {
b0f4a35f
RG
281 /* The alignment of a CONST_DECL is determined by its initializer. */
282 exp = DECL_INITIAL (exp);
e80c2726 283 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 284 if (CONSTANT_CLASS_P (exp))
58e17cf8 285 align = targetm.constant_alignment (exp, align);
6b00e42d 286
b0f4a35f 287 known_alignment = true;
e80c2726 288 }
b0f4a35f 289 else if (DECL_P (exp))
644ffefd 290 {
b0f4a35f 291 align = DECL_ALIGN (exp);
644ffefd 292 known_alignment = true;
644ffefd 293 }
b0f4a35f
RG
294 else if (TREE_CODE (exp) == INDIRECT_REF
295 || TREE_CODE (exp) == MEM_REF
296 || TREE_CODE (exp) == TARGET_MEM_REF)
e80c2726
RG
297 {
298 tree addr = TREE_OPERAND (exp, 0);
644ffefd
MJ
299 unsigned ptr_align;
300 unsigned HOST_WIDE_INT ptr_bitpos;
4ceae7e9 301 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
644ffefd 302
4ceae7e9 303 /* If the address is explicitely aligned, handle that. */
e80c2726
RG
304 if (TREE_CODE (addr) == BIT_AND_EXPR
305 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
306 {
4ceae7e9
RB
307 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
308 ptr_bitmask *= BITS_PER_UNIT;
146ec50f 309 align = least_bit_hwi (ptr_bitmask);
e80c2726
RG
310 addr = TREE_OPERAND (addr, 0);
311 }
644ffefd 312
b0f4a35f
RG
313 known_alignment
314 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
b0f4a35f
RG
315 align = MAX (ptr_align, align);
316
4ceae7e9
RB
317 /* Re-apply explicit alignment to the bitpos. */
318 ptr_bitpos &= ptr_bitmask;
319
3c82efd9
RG
320 /* The alignment of the pointer operand in a TARGET_MEM_REF
321 has to take the variable offset parts into account. */
b0f4a35f 322 if (TREE_CODE (exp) == TARGET_MEM_REF)
1be38ccb 323 {
b0f4a35f
RG
324 if (TMR_INDEX (exp))
325 {
326 unsigned HOST_WIDE_INT step = 1;
327 if (TMR_STEP (exp))
328 step = TREE_INT_CST_LOW (TMR_STEP (exp));
146ec50f 329 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
b0f4a35f
RG
330 }
331 if (TMR_INDEX2 (exp))
332 align = BITS_PER_UNIT;
333 known_alignment = false;
1be38ccb 334 }
644ffefd 335
b0f4a35f
RG
336 /* When EXP is an actual memory reference then we can use
337 TYPE_ALIGN of a pointer indirection to derive alignment.
338 Do so only if get_pointer_alignment_1 did not reveal absolute
3c82efd9
RG
339 alignment knowledge and if using that alignment would
340 improve the situation. */
a4cf4b64 341 unsigned int talign;
3c82efd9 342 if (!addr_p && !known_alignment
a4cf4b64
RB
343 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
344 && talign > align)
345 align = talign;
3c82efd9
RG
346 else
347 {
348 /* Else adjust bitpos accordingly. */
349 bitpos += ptr_bitpos;
350 if (TREE_CODE (exp) == MEM_REF
351 || TREE_CODE (exp) == TARGET_MEM_REF)
aca52e6f 352 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
3c82efd9 353 }
e80c2726 354 }
b0f4a35f 355 else if (TREE_CODE (exp) == STRING_CST)
1be38ccb 356 {
b0f4a35f
RG
357 /* STRING_CST are the only constant objects we allow to be not
358 wrapped inside a CONST_DECL. */
359 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 360 if (CONSTANT_CLASS_P (exp))
58e17cf8 361 align = targetm.constant_alignment (exp, align);
6b00e42d 362
b0f4a35f 363 known_alignment = true;
e80c2726 364 }
e80c2726
RG
365
366 /* If there is a non-constant offset part extract the maximum
367 alignment that can prevail. */
eae76e53 368 if (offset)
e80c2726 369 {
e75fde1a 370 unsigned int trailing_zeros = tree_ctz (offset);
eae76e53 371 if (trailing_zeros < HOST_BITS_PER_INT)
e80c2726 372 {
eae76e53
JJ
373 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
374 if (inner)
375 align = MIN (align, inner);
e80c2726 376 }
e80c2726
RG
377 }
378
7df9b6f1
RS
379 /* Account for the alignment of runtime coefficients, so that the constant
380 bitpos is guaranteed to be accurate. */
381 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
382 if (alt_align != 0 && alt_align < align)
383 {
384 align = alt_align;
385 known_alignment = false;
386 }
387
b0f4a35f 388 *alignp = align;
7df9b6f1 389 *bitposp = bitpos.coeffs[0] & (align - 1);
644ffefd 390 return known_alignment;
daade206
RG
391}
392
b0f4a35f
RG
393/* For a memory reference expression EXP compute values M and N such that M
394 divides (&EXP - N) and such that N < M. If these numbers can be determined,
395 store M in alignp and N in *BITPOSP and return true. Otherwise return false
396 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
397
398bool
399get_object_alignment_1 (tree exp, unsigned int *alignp,
400 unsigned HOST_WIDE_INT *bitposp)
401{
402 return get_object_alignment_2 (exp, alignp, bitposp, false);
403}
404
0eb77834 405/* Return the alignment in bits of EXP, an object. */
daade206
RG
406
407unsigned int
0eb77834 408get_object_alignment (tree exp)
daade206
RG
409{
410 unsigned HOST_WIDE_INT bitpos = 0;
411 unsigned int align;
412
644ffefd 413 get_object_alignment_1 (exp, &align, &bitpos);
daade206 414
e80c2726
RG
415 /* align and bitpos now specify known low bits of the pointer.
416 ptr & (align - 1) == bitpos. */
417
418 if (bitpos != 0)
146ec50f 419 align = least_bit_hwi (bitpos);
0eb77834 420 return align;
df96b059
JJ
421}
422
644ffefd
MJ
423/* For a pointer valued expression EXP compute values M and N such that M
424 divides (EXP - N) and such that N < M. If these numbers can be determined,
b0f4a35f
RG
425 store M in alignp and N in *BITPOSP and return true. Return false if
426 the results are just a conservative approximation.
28f4ec01 427
644ffefd 428 If EXP is not a pointer, false is returned too. */
28f4ec01 429
644ffefd
MJ
430bool
431get_pointer_alignment_1 (tree exp, unsigned int *alignp,
432 unsigned HOST_WIDE_INT *bitposp)
28f4ec01 433{
1be38ccb 434 STRIP_NOPS (exp);
6026b73e 435
1be38ccb 436 if (TREE_CODE (exp) == ADDR_EXPR)
b0f4a35f
RG
437 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
438 alignp, bitposp, true);
5fa79de8
RB
439 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
440 {
441 unsigned int align;
442 unsigned HOST_WIDE_INT bitpos;
443 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
444 &align, &bitpos);
445 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
446 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
447 else
448 {
449 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
450 if (trailing_zeros < HOST_BITS_PER_INT)
451 {
452 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
453 if (inner)
454 align = MIN (align, inner);
455 }
456 }
457 *alignp = align;
458 *bitposp = bitpos & (align - 1);
459 return res;
460 }
1be38ccb
RG
461 else if (TREE_CODE (exp) == SSA_NAME
462 && POINTER_TYPE_P (TREE_TYPE (exp)))
28f4ec01 463 {
644ffefd 464 unsigned int ptr_align, ptr_misalign;
1be38ccb 465 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
644ffefd
MJ
466
467 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
468 {
469 *bitposp = ptr_misalign * BITS_PER_UNIT;
470 *alignp = ptr_align * BITS_PER_UNIT;
5505978a
RB
471 /* Make sure to return a sensible alignment when the multiplication
472 by BITS_PER_UNIT overflowed. */
473 if (*alignp == 0)
474 *alignp = 1u << (HOST_BITS_PER_INT - 1);
b0f4a35f 475 /* We cannot really tell whether this result is an approximation. */
5f9a167b 476 return false;
644ffefd
MJ
477 }
478 else
87c0fb4b
RG
479 {
480 *bitposp = 0;
644ffefd
MJ
481 *alignp = BITS_PER_UNIT;
482 return false;
87c0fb4b 483 }
28f4ec01 484 }
44fabee4
RG
485 else if (TREE_CODE (exp) == INTEGER_CST)
486 {
487 *alignp = BIGGEST_ALIGNMENT;
488 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
489 & (BIGGEST_ALIGNMENT - 1));
490 return true;
491 }
1be38ccb 492
87c0fb4b 493 *bitposp = 0;
644ffefd
MJ
494 *alignp = BITS_PER_UNIT;
495 return false;
28f4ec01
BS
496}
497
87c0fb4b
RG
498/* Return the alignment in bits of EXP, a pointer valued expression.
499 The alignment returned is, by default, the alignment of the thing that
500 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
501
502 Otherwise, look at the expression to see if we can do better, i.e., if the
503 expression is actually pointing at an object whose alignment is tighter. */
504
505unsigned int
506get_pointer_alignment (tree exp)
507{
508 unsigned HOST_WIDE_INT bitpos = 0;
509 unsigned int align;
644ffefd
MJ
510
511 get_pointer_alignment_1 (exp, &align, &bitpos);
87c0fb4b
RG
512
513 /* align and bitpos now specify known low bits of the pointer.
514 ptr & (align - 1) == bitpos. */
515
516 if (bitpos != 0)
146ec50f 517 align = least_bit_hwi (bitpos);
87c0fb4b
RG
518
519 return align;
520}
521
1eb4547b
MS
522/* Return the number of non-zero elements in the sequence
523 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
524 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
525
526static unsigned
527string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
528{
529 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
530
531 unsigned n;
532
533 if (eltsize == 1)
534 {
535 /* Optimize the common case of plain char. */
536 for (n = 0; n < maxelts; n++)
537 {
538 const char *elt = (const char*) ptr + n;
539 if (!*elt)
540 break;
541 }
542 }
543 else
544 {
545 for (n = 0; n < maxelts; n++)
546 {
547 const char *elt = (const char*) ptr + n * eltsize;
548 if (!memcmp (elt, "\0\0\0\0", eltsize))
549 break;
550 }
551 }
552 return n;
553}
554
555/* Compute the length of a null-terminated character string or wide
556 character string handling character sizes of 1, 2, and 4 bytes.
557 TREE_STRING_LENGTH is not the right way because it evaluates to
558 the size of the character array in bytes (as opposed to characters)
559 and because it can contain a zero byte in the middle.
28f4ec01 560
f1ba665b 561 ONLY_VALUE should be nonzero if the result is not going to be emitted
88373ed0 562 into the instruction stream and zero if it is going to be expanded.
f1ba665b 563 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
ae808627
JJ
564 is returned, otherwise NULL, since
565 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
566 evaluate the side-effects.
567
21e8fb22
RB
568 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
569 accesses. Note that this implies the result is not going to be emitted
570 into the instruction stream.
571
fed3cef0
RK
572 The value returned is of type `ssizetype'.
573
28f4ec01
BS
574 Unfortunately, string_constant can't access the values of const char
575 arrays with initializers, so neither can we do so here. */
576
6de9cd9a 577tree
ae808627 578c_strlen (tree src, int only_value)
28f4ec01 579{
ae808627
JJ
580 STRIP_NOPS (src);
581 if (TREE_CODE (src) == COND_EXPR
582 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
583 {
584 tree len1, len2;
585
586 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
587 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
33521f7d 588 if (tree_int_cst_equal (len1, len2))
ae808627
JJ
589 return len1;
590 }
591
592 if (TREE_CODE (src) == COMPOUND_EXPR
593 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
594 return c_strlen (TREE_OPERAND (src, 1), only_value);
595
1eb4547b 596 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
59d49708 597
1eb4547b
MS
598 /* Offset from the beginning of the string in bytes. */
599 tree byteoff;
600 src = string_constant (src, &byteoff);
28f4ec01 601 if (src == 0)
5039610b 602 return NULL_TREE;
fed3cef0 603
1eb4547b
MS
604 /* Determine the size of the string element. */
605 unsigned eltsize
606 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
607
608 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
609 length of SRC. */
610 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
611
612 /* PTR can point to the byte representation of any string type, including
613 char* and wchar_t*. */
614 const char *ptr = TREE_STRING_POINTER (src);
fed3cef0 615
1eb4547b 616 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
28f4ec01
BS
617 {
618 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
619 compute the offset to the following null if we don't know where to
620 start searching for it. */
1eb4547b
MS
621 if (string_length (ptr, eltsize, maxelts) < maxelts)
622 {
623 /* Return when an embedded null character is found. */
5039610b 624 return NULL_TREE;
1eb4547b 625 }
fed3cef0 626
c42d0aa0
MS
627 if (!maxelts)
628 return ssize_int (0);
629
28f4ec01
BS
630 /* We don't know the starting offset, but we do know that the string
631 has no internal zero bytes. We can assume that the offset falls
632 within the bounds of the string; otherwise, the programmer deserves
633 what he gets. Subtract the offset from the length of the string,
fed3cef0
RK
634 and return that. This would perhaps not be valid if we were dealing
635 with named arrays in addition to literal string constants. */
636
1eb4547b 637 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
28f4ec01
BS
638 }
639
1eb4547b
MS
640 /* Offset from the beginning of the string in elements. */
641 HOST_WIDE_INT eltoff;
642
28f4ec01 643 /* We have a known offset into the string. Start searching there for
5197bd50 644 a null character if we can represent it as a single HOST_WIDE_INT. */
1eb4547b
MS
645 if (byteoff == 0)
646 eltoff = 0;
647 else if (! tree_fits_shwi_p (byteoff))
648 eltoff = -1;
28f4ec01 649 else
1eb4547b 650 eltoff = tree_to_shwi (byteoff) / eltsize;
fed3cef0 651
b2ed71b6
BE
652 /* If the offset is known to be out of bounds, warn, and call strlen at
653 runtime. */
1eb4547b 654 if (eltoff < 0 || eltoff > maxelts)
28f4ec01 655 {
b2ed71b6 656 /* Suppress multiple warnings for propagated constant strings. */
3b57ff81
RB
657 if (only_value != 2
658 && !TREE_NO_WARNING (src))
b2ed71b6 659 {
c42d0aa0
MS
660 warning_at (loc, OPT_Warray_bounds,
661 "offset %qwi outside bounds of constant string",
1eb4547b 662 eltoff);
b2ed71b6
BE
663 TREE_NO_WARNING (src) = 1;
664 }
5039610b 665 return NULL_TREE;
28f4ec01 666 }
fed3cef0 667
28f4ec01
BS
668 /* Use strlen to search for the first zero byte. Since any strings
669 constructed with build_string will have nulls appended, we win even
670 if we get handed something like (char[4])"abcd".
671
1eb4547b 672 Since ELTOFF is our starting index into the string, no further
28f4ec01 673 calculation is needed. */
1eb4547b
MS
674 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
675 maxelts - eltoff);
676
677 return ssize_int (len);
28f4ec01
BS
678}
679
807e902e 680/* Return a constant integer corresponding to target reading
bf06b5d8 681 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ab937357 682
57814e5e 683static rtx
095a2d76 684c_readstr (const char *str, scalar_int_mode mode)
57814e5e 685{
57814e5e
JJ
686 HOST_WIDE_INT ch;
687 unsigned int i, j;
807e902e 688 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
57814e5e 689
298e6adc 690 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
807e902e
KZ
691 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
692 / HOST_BITS_PER_WIDE_INT;
693
694 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
695 for (i = 0; i < len; i++)
696 tmp[i] = 0;
5906d013 697
57814e5e
JJ
698 ch = 1;
699 for (i = 0; i < GET_MODE_SIZE (mode); i++)
700 {
701 j = i;
702 if (WORDS_BIG_ENDIAN)
703 j = GET_MODE_SIZE (mode) - i - 1;
704 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
e046112d 705 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
57814e5e
JJ
706 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
707 j *= BITS_PER_UNIT;
5906d013 708
57814e5e
JJ
709 if (ch)
710 ch = (unsigned char) str[i];
807e902e 711 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
57814e5e 712 }
807e902e
KZ
713
714 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
715 return immed_wide_int_const (c, mode);
57814e5e
JJ
716}
717
ab937357 718/* Cast a target constant CST to target CHAR and if that value fits into
206048bd 719 host char type, return zero and put that value into variable pointed to by
ab937357
JJ
720 P. */
721
722static int
4682ae04 723target_char_cast (tree cst, char *p)
ab937357
JJ
724{
725 unsigned HOST_WIDE_INT val, hostval;
726
de77ab75 727 if (TREE_CODE (cst) != INTEGER_CST
ab937357
JJ
728 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
729 return 1;
730
807e902e 731 /* Do not care if it fits or not right here. */
de77ab75 732 val = TREE_INT_CST_LOW (cst);
807e902e 733
ab937357 734 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
fecfbfa4 735 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ab937357
JJ
736
737 hostval = val;
738 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
fecfbfa4 739 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ab937357
JJ
740
741 if (val != hostval)
742 return 1;
743
744 *p = hostval;
745 return 0;
746}
747
6de9cd9a
DN
748/* Similar to save_expr, but assumes that arbitrary code is not executed
749 in between the multiple evaluations. In particular, we assume that a
750 non-addressable local variable will not be modified. */
751
752static tree
753builtin_save_expr (tree exp)
754{
5cbf5c20
RG
755 if (TREE_CODE (exp) == SSA_NAME
756 || (TREE_ADDRESSABLE (exp) == 0
757 && (TREE_CODE (exp) == PARM_DECL
8813a647 758 || (VAR_P (exp) && !TREE_STATIC (exp)))))
6de9cd9a
DN
759 return exp;
760
761 return save_expr (exp);
762}
763
28f4ec01
BS
764/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
765 times to get the address of either a higher stack frame, or a return
766 address located within it (depending on FNDECL_CODE). */
fed3cef0 767
54e62799 768static rtx
c6d01079 769expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
28f4ec01
BS
770{
771 int i;
c6d01079 772 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
579f2946 773 if (tem == NULL_RTX)
c8f27794 774 {
579f2946
TS
775 /* For a zero count with __builtin_return_address, we don't care what
776 frame address we return, because target-specific definitions will
777 override us. Therefore frame pointer elimination is OK, and using
778 the soft frame pointer is OK.
779
780 For a nonzero count, or a zero count with __builtin_frame_address,
781 we require a stable offset from the current frame pointer to the
782 previous one, so we must use the hard frame pointer, and
783 we must disable frame pointer elimination. */
784 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
785 tem = frame_pointer_rtx;
786 else
787 {
788 tem = hard_frame_pointer_rtx;
c8f27794 789
579f2946
TS
790 /* Tell reload not to eliminate the frame pointer. */
791 crtl->accesses_prior_frames = 1;
792 }
c8f27794 793 }
c6d01079 794
28f4ec01
BS
795 if (count > 0)
796 SETUP_FRAME_ADDRESSES ();
28f4ec01 797
224869d9 798 /* On the SPARC, the return address is not in the frame, it is in a
28f4ec01
BS
799 register. There is no way to access it off of the current frame
800 pointer, but it can be accessed off the previous frame pointer by
801 reading the value from the register window save area. */
2e612c47 802 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
28f4ec01 803 count--;
28f4ec01
BS
804
805 /* Scan back COUNT frames to the specified frame. */
806 for (i = 0; i < count; i++)
807 {
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
28f4ec01 810 tem = DYNAMIC_CHAIN_ADDRESS (tem);
28f4ec01 811 tem = memory_address (Pmode, tem);
bf877a76 812 tem = gen_frame_mem (Pmode, tem);
432fd734 813 tem = copy_to_reg (tem);
28f4ec01
BS
814 }
815
224869d9
EB
816 /* For __builtin_frame_address, return what we've got. But, on
817 the SPARC for example, we may have to add a bias. */
28f4ec01 818 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
224869d9 819 return FRAME_ADDR_RTX (tem);
28f4ec01 820
224869d9 821 /* For __builtin_return_address, get the return address from that frame. */
28f4ec01
BS
822#ifdef RETURN_ADDR_RTX
823 tem = RETURN_ADDR_RTX (count, tem);
824#else
825 tem = memory_address (Pmode,
0a81f074 826 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
bf877a76 827 tem = gen_frame_mem (Pmode, tem);
28f4ec01
BS
828#endif
829 return tem;
830}
831
3bdf5ad1 832/* Alias set used for setjmp buffer. */
4862826d 833static alias_set_type setjmp_alias_set = -1;
3bdf5ad1 834
250d07b6 835/* Construct the leading half of a __builtin_setjmp call. Control will
4f6c2131
EB
836 return to RECEIVER_LABEL. This is also called directly by the SJLJ
837 exception handling code. */
28f4ec01 838
250d07b6 839void
4682ae04 840expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
28f4ec01 841{
ef4bddc2 842 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 843 rtx stack_save;
3bdf5ad1 844 rtx mem;
28f4ec01 845
3bdf5ad1
RK
846 if (setjmp_alias_set == -1)
847 setjmp_alias_set = new_alias_set ();
848
5ae6cd0d 849 buf_addr = convert_memory_address (Pmode, buf_addr);
28f4ec01 850
7d505b82 851 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
28f4ec01 852
250d07b6
RH
853 /* We store the frame pointer and the address of receiver_label in
854 the buffer and use the rest of it for the stack save area, which
855 is machine-dependent. */
28f4ec01 856
3bdf5ad1 857 mem = gen_rtx_MEM (Pmode, buf_addr);
ba4828e0 858 set_mem_alias_set (mem, setjmp_alias_set);
d6da68b9 859 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
3bdf5ad1 860
0a81f074
RS
861 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
862 GET_MODE_SIZE (Pmode))),
ba4828e0 863 set_mem_alias_set (mem, setjmp_alias_set);
3bdf5ad1
RK
864
865 emit_move_insn (validize_mem (mem),
250d07b6 866 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
28f4ec01
BS
867
868 stack_save = gen_rtx_MEM (sa_mode,
0a81f074 869 plus_constant (Pmode, buf_addr,
28f4ec01 870 2 * GET_MODE_SIZE (Pmode)));
ba4828e0 871 set_mem_alias_set (stack_save, setjmp_alias_set);
9eac0f2a 872 emit_stack_save (SAVE_NONLOCAL, &stack_save);
28f4ec01
BS
873
874 /* If there is further processing to do, do it. */
95a3fb9d
RS
875 if (targetm.have_builtin_setjmp_setup ())
876 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
28f4ec01 877
ecaebb9e 878 /* We have a nonlocal label. */
e3b5732b 879 cfun->has_nonlocal_label = 1;
250d07b6 880}
28f4ec01 881
4f6c2131 882/* Construct the trailing part of a __builtin_setjmp call. This is
e90d1568
HPN
883 also called directly by the SJLJ exception handling code.
884 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
250d07b6
RH
885
886void
95a3fb9d 887expand_builtin_setjmp_receiver (rtx receiver_label)
250d07b6 888{
531ca746
RH
889 rtx chain;
890
e90d1568 891 /* Mark the FP as used when we get here, so we have to make sure it's
28f4ec01 892 marked as used by this function. */
c41c1387 893 emit_use (hard_frame_pointer_rtx);
28f4ec01
BS
894
895 /* Mark the static chain as clobbered here so life information
896 doesn't get messed up for it. */
4b522b8f 897 chain = rtx_for_static_chain (current_function_decl, true);
531ca746
RH
898 if (chain && REG_P (chain))
899 emit_clobber (chain);
28f4ec01
BS
900
901 /* Now put in the code to restore the frame pointer, and argument
caf93cb0 902 pointer, if needed. */
95a3fb9d 903 if (! targetm.have_nonlocal_goto ())
f1257268
RS
904 {
905 /* First adjust our frame pointer to its actual value. It was
906 previously set to the start of the virtual area corresponding to
907 the stacked variables when we branched here and now needs to be
908 adjusted to the actual hardware fp value.
909
910 Assignments to virtual registers are converted by
911 instantiate_virtual_regs into the corresponding assignment
912 to the underlying register (fp in this case) that makes
913 the original assignment true.
914 So the following insn will actually be decrementing fp by
2a31c321 915 TARGET_STARTING_FRAME_OFFSET. */
f1257268
RS
916 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
917
918 /* Restoring the frame pointer also modifies the hard frame pointer.
919 Mark it used (so that the previous assignment remains live once
920 the frame pointer is eliminated) and clobbered (to represent the
921 implicit update from the assignment). */
922 emit_use (hard_frame_pointer_rtx);
923 emit_clobber (hard_frame_pointer_rtx);
924 }
28f4ec01 925
38b0b093 926 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
28f4ec01 927 {
e90d1568
HPN
928 /* If the argument pointer can be eliminated in favor of the
929 frame pointer, we don't need to restore it. We assume here
930 that if such an elimination is present, it can always be used.
931 This is the case on all known machines; if we don't make this
932 assumption, we do unnecessary saving on many machines. */
28f4ec01 933 size_t i;
8b60264b 934 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
28f4ec01 935
b6a1cbae 936 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
28f4ec01
BS
937 if (elim_regs[i].from == ARG_POINTER_REGNUM
938 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
939 break;
940
b6a1cbae 941 if (i == ARRAY_SIZE (elim_regs))
28f4ec01
BS
942 {
943 /* Now restore our arg pointer from the address at which it
278ed218 944 was saved in our stack frame. */
2e3f842f 945 emit_move_insn (crtl->args.internal_arg_pointer,
bd60bab2 946 copy_to_reg (get_arg_pointer_save_area ()));
28f4ec01
BS
947 }
948 }
28f4ec01 949
95a3fb9d
RS
950 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
951 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
952 else if (targetm.have_nonlocal_goto_receiver ())
953 emit_insn (targetm.gen_nonlocal_goto_receiver ());
28f4ec01 954 else
95a3fb9d 955 { /* Nothing */ }
bcd7edfe 956
6fb5fa3c
DB
957 /* We must not allow the code we just generated to be reordered by
958 scheduling. Specifically, the update of the frame pointer must
f1257268 959 happen immediately, not later. */
6fb5fa3c 960 emit_insn (gen_blockage ());
250d07b6 961}
28f4ec01 962
28f4ec01
BS
963/* __builtin_longjmp is passed a pointer to an array of five words (not
964 all will be used on all machines). It operates similarly to the C
965 library function of the same name, but is more efficient. Much of
4f6c2131 966 the code below is copied from the handling of non-local gotos. */
28f4ec01 967
54e62799 968static void
4682ae04 969expand_builtin_longjmp (rtx buf_addr, rtx value)
28f4ec01 970{
58f4cf2a
DM
971 rtx fp, lab, stack;
972 rtx_insn *insn, *last;
ef4bddc2 973 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 974
b8698a0f 975 /* DRAP is needed for stack realign if longjmp is expanded to current
2e3f842f
L
976 function */
977 if (SUPPORTS_STACK_ALIGNMENT)
978 crtl->need_drap = true;
979
3bdf5ad1
RK
980 if (setjmp_alias_set == -1)
981 setjmp_alias_set = new_alias_set ();
982
5ae6cd0d 983 buf_addr = convert_memory_address (Pmode, buf_addr);
4b6c1672 984
28f4ec01
BS
985 buf_addr = force_reg (Pmode, buf_addr);
986
531ca746
RH
987 /* We require that the user must pass a second argument of 1, because
988 that is what builtin_setjmp will return. */
298e6adc 989 gcc_assert (value == const1_rtx);
28f4ec01 990
d337d653 991 last = get_last_insn ();
95a3fb9d
RS
992 if (targetm.have_builtin_longjmp ())
993 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
28f4ec01 994 else
28f4ec01
BS
995 {
996 fp = gen_rtx_MEM (Pmode, buf_addr);
0a81f074 997 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
28f4ec01
BS
998 GET_MODE_SIZE (Pmode)));
999
0a81f074 1000 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
28f4ec01 1001 2 * GET_MODE_SIZE (Pmode)));
ba4828e0
RK
1002 set_mem_alias_set (fp, setjmp_alias_set);
1003 set_mem_alias_set (lab, setjmp_alias_set);
1004 set_mem_alias_set (stack, setjmp_alias_set);
28f4ec01
BS
1005
1006 /* Pick up FP, label, and SP from the block and jump. This code is
1007 from expand_goto in stmt.c; see there for detailed comments. */
95a3fb9d 1008 if (targetm.have_nonlocal_goto ())
28f4ec01
BS
1009 /* We have to pass a value to the nonlocal_goto pattern that will
1010 get copied into the static_chain pointer, but it does not matter
1011 what that value is, because builtin_setjmp does not use it. */
95a3fb9d 1012 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
28f4ec01 1013 else
28f4ec01
BS
1014 {
1015 lab = copy_to_reg (lab);
1016
c41c1387
RS
1017 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1018 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
41439bf6 1019
28f4ec01 1020 emit_move_insn (hard_frame_pointer_rtx, fp);
9eac0f2a 1021 emit_stack_restore (SAVE_NONLOCAL, stack);
28f4ec01 1022
c41c1387
RS
1023 emit_use (hard_frame_pointer_rtx);
1024 emit_use (stack_pointer_rtx);
28f4ec01
BS
1025 emit_indirect_jump (lab);
1026 }
1027 }
4b01bd16
RH
1028
1029 /* Search backwards and mark the jump insn as a non-local goto.
1030 Note that this precludes the use of __builtin_longjmp to a
1031 __builtin_setjmp target in the same function. However, we've
1032 already cautioned the user that these functions are for
1033 internal exception handling use only. */
8206fc89
AM
1034 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1035 {
298e6adc 1036 gcc_assert (insn != last);
5906d013 1037
4b4bf941 1038 if (JUMP_P (insn))
8206fc89 1039 {
65c5f2a6 1040 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
8206fc89
AM
1041 break;
1042 }
4b4bf941 1043 else if (CALL_P (insn))
ca7fd9cd 1044 break;
8206fc89 1045 }
28f4ec01
BS
1046}
1047
862d0b35
DN
1048static inline bool
1049more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1050{
1051 return (iter->i < iter->n);
1052}
1053
1054/* This function validates the types of a function call argument list
1055 against a specified list of tree_codes. If the last specifier is a 0,
474da67e 1056 that represents an ellipsis, otherwise the last specifier must be a
862d0b35
DN
1057 VOID_TYPE. */
1058
1059static bool
1060validate_arglist (const_tree callexpr, ...)
1061{
1062 enum tree_code code;
1063 bool res = 0;
1064 va_list ap;
1065 const_call_expr_arg_iterator iter;
1066 const_tree arg;
1067
1068 va_start (ap, callexpr);
1069 init_const_call_expr_arg_iterator (callexpr, &iter);
1070
474da67e 1071 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
0dba7960
JJ
1072 tree fn = CALL_EXPR_FN (callexpr);
1073 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
474da67e
MS
1074
1075 for (unsigned argno = 1; ; ++argno)
862d0b35
DN
1076 {
1077 code = (enum tree_code) va_arg (ap, int);
474da67e 1078
862d0b35
DN
1079 switch (code)
1080 {
1081 case 0:
1082 /* This signifies an ellipses, any further arguments are all ok. */
1083 res = true;
1084 goto end;
1085 case VOID_TYPE:
1086 /* This signifies an endlink, if no arguments remain, return
1087 true, otherwise return false. */
1088 res = !more_const_call_expr_args_p (&iter);
1089 goto end;
474da67e
MS
1090 case POINTER_TYPE:
1091 /* The actual argument must be nonnull when either the whole
1092 called function has been declared nonnull, or when the formal
1093 argument corresponding to the actual argument has been. */
0dba7960
JJ
1094 if (argmap
1095 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1096 {
1097 arg = next_const_call_expr_arg (&iter);
1098 if (!validate_arg (arg, code) || integer_zerop (arg))
1099 goto end;
1100 break;
1101 }
474da67e 1102 /* FALLTHRU */
862d0b35
DN
1103 default:
1104 /* If no parameters remain or the parameter's code does not
1105 match the specified code, return false. Otherwise continue
1106 checking any remaining arguments. */
1107 arg = next_const_call_expr_arg (&iter);
0dba7960 1108 if (!validate_arg (arg, code))
862d0b35
DN
1109 goto end;
1110 break;
1111 }
1112 }
862d0b35
DN
1113
1114 /* We need gotos here since we can only have one VA_CLOSE in a
1115 function. */
1116 end: ;
1117 va_end (ap);
1118
474da67e
MS
1119 BITMAP_FREE (argmap);
1120
862d0b35
DN
1121 return res;
1122}
1123
6de9cd9a
DN
1124/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1125 and the address of the save area. */
1126
1127static rtx
5039610b 1128expand_builtin_nonlocal_goto (tree exp)
6de9cd9a
DN
1129{
1130 tree t_label, t_save_area;
58f4cf2a
DM
1131 rtx r_label, r_save_area, r_fp, r_sp;
1132 rtx_insn *insn;
6de9cd9a 1133
5039610b 1134 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
1135 return NULL_RTX;
1136
5039610b
SL
1137 t_label = CALL_EXPR_ARG (exp, 0);
1138 t_save_area = CALL_EXPR_ARG (exp, 1);
6de9cd9a 1139
84217346 1140 r_label = expand_normal (t_label);
5e89a381 1141 r_label = convert_memory_address (Pmode, r_label);
84217346 1142 r_save_area = expand_normal (t_save_area);
5e89a381 1143 r_save_area = convert_memory_address (Pmode, r_save_area);
bc6d3f91
EB
1144 /* Copy the address of the save location to a register just in case it was
1145 based on the frame pointer. */
cba2d79f 1146 r_save_area = copy_to_reg (r_save_area);
6de9cd9a
DN
1147 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1148 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
0a81f074
RS
1149 plus_constant (Pmode, r_save_area,
1150 GET_MODE_SIZE (Pmode)));
6de9cd9a 1151
e3b5732b 1152 crtl->has_nonlocal_goto = 1;
6de9cd9a 1153
6de9cd9a 1154 /* ??? We no longer need to pass the static chain value, afaik. */
95a3fb9d
RS
1155 if (targetm.have_nonlocal_goto ())
1156 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
6de9cd9a 1157 else
6de9cd9a
DN
1158 {
1159 r_label = copy_to_reg (r_label);
1160
c41c1387
RS
1161 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1162 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
caf93cb0 1163
bc6d3f91 1164 /* Restore frame pointer for containing function. */
6de9cd9a 1165 emit_move_insn (hard_frame_pointer_rtx, r_fp);
9eac0f2a 1166 emit_stack_restore (SAVE_NONLOCAL, r_sp);
caf93cb0 1167
6de9cd9a
DN
1168 /* USE of hard_frame_pointer_rtx added for consistency;
1169 not clear if really needed. */
c41c1387
RS
1170 emit_use (hard_frame_pointer_rtx);
1171 emit_use (stack_pointer_rtx);
eae645b6
RS
1172
1173 /* If the architecture is using a GP register, we must
1174 conservatively assume that the target function makes use of it.
1175 The prologue of functions with nonlocal gotos must therefore
1176 initialize the GP register to the appropriate value, and we
1177 must then make sure that this value is live at the point
1178 of the jump. (Note that this doesn't necessarily apply
1179 to targets with a nonlocal_goto pattern; they are free
1180 to implement it in their own way. Note also that this is
1181 a no-op if the GP register is a global invariant.) */
959c1e20
AH
1182 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1183 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
c41c1387 1184 emit_use (pic_offset_table_rtx);
eae645b6 1185
6de9cd9a
DN
1186 emit_indirect_jump (r_label);
1187 }
caf93cb0 1188
6de9cd9a
DN
1189 /* Search backwards to the jump insn and mark it as a
1190 non-local goto. */
1191 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1192 {
4b4bf941 1193 if (JUMP_P (insn))
6de9cd9a 1194 {
65c5f2a6 1195 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
6de9cd9a
DN
1196 break;
1197 }
4b4bf941 1198 else if (CALL_P (insn))
6de9cd9a
DN
1199 break;
1200 }
1201
1202 return const0_rtx;
1203}
1204
2b92e7f5
RK
1205/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1206 (not all will be used on all machines) that was passed to __builtin_setjmp.
d33606c3
EB
1207 It updates the stack pointer in that block to the current value. This is
1208 also called directly by the SJLJ exception handling code. */
2b92e7f5 1209
d33606c3 1210void
2b92e7f5
RK
1211expand_builtin_update_setjmp_buf (rtx buf_addr)
1212{
ef4bddc2 1213 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
4887028b 1214 buf_addr = convert_memory_address (Pmode, buf_addr);
bc6d3f91 1215 rtx stack_save
2b92e7f5
RK
1216 = gen_rtx_MEM (sa_mode,
1217 memory_address
1218 (sa_mode,
0a81f074
RS
1219 plus_constant (Pmode, buf_addr,
1220 2 * GET_MODE_SIZE (Pmode))));
2b92e7f5 1221
9eac0f2a 1222 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2b92e7f5
RK
1223}
1224
a9ccbb60
JJ
1225/* Expand a call to __builtin_prefetch. For a target that does not support
1226 data prefetch, evaluate the memory address argument in case it has side
1227 effects. */
1228
1229static void
5039610b 1230expand_builtin_prefetch (tree exp)
a9ccbb60
JJ
1231{
1232 tree arg0, arg1, arg2;
5039610b 1233 int nargs;
a9ccbb60
JJ
1234 rtx op0, op1, op2;
1235
5039610b 1236 if (!validate_arglist (exp, POINTER_TYPE, 0))
e83d297b
JJ
1237 return;
1238
5039610b
SL
1239 arg0 = CALL_EXPR_ARG (exp, 0);
1240
e83d297b
JJ
1241 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1242 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1243 locality). */
5039610b
SL
1244 nargs = call_expr_nargs (exp);
1245 if (nargs > 1)
1246 arg1 = CALL_EXPR_ARG (exp, 1);
e83d297b 1247 else
5039610b
SL
1248 arg1 = integer_zero_node;
1249 if (nargs > 2)
1250 arg2 = CALL_EXPR_ARG (exp, 2);
1251 else
9a9d280e 1252 arg2 = integer_three_node;
a9ccbb60
JJ
1253
1254 /* Argument 0 is an address. */
1255 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1256
1257 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1258 if (TREE_CODE (arg1) != INTEGER_CST)
1259 {
40b97a2e 1260 error ("second argument to %<__builtin_prefetch%> must be a constant");
ca7fd9cd 1261 arg1 = integer_zero_node;
a9ccbb60 1262 }
84217346 1263 op1 = expand_normal (arg1);
a9ccbb60
JJ
1264 /* Argument 1 must be either zero or one. */
1265 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1266 {
d4ee4d25 1267 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
40b97a2e 1268 " using zero");
a9ccbb60
JJ
1269 op1 = const0_rtx;
1270 }
1271
1272 /* Argument 2 (locality) must be a compile-time constant int. */
1273 if (TREE_CODE (arg2) != INTEGER_CST)
1274 {
40b97a2e 1275 error ("third argument to %<__builtin_prefetch%> must be a constant");
a9ccbb60
JJ
1276 arg2 = integer_zero_node;
1277 }
84217346 1278 op2 = expand_normal (arg2);
a9ccbb60
JJ
1279 /* Argument 2 must be 0, 1, 2, or 3. */
1280 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1281 {
d4ee4d25 1282 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
a9ccbb60
JJ
1283 op2 = const0_rtx;
1284 }
1285
134b044d 1286 if (targetm.have_prefetch ())
a9ccbb60 1287 {
a5c7d693
RS
1288 struct expand_operand ops[3];
1289
1290 create_address_operand (&ops[0], op0);
1291 create_integer_operand (&ops[1], INTVAL (op1));
1292 create_integer_operand (&ops[2], INTVAL (op2));
134b044d 1293 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
a5c7d693 1294 return;
a9ccbb60 1295 }
ad76cef8 1296
5ab2f7b7
KH
1297 /* Don't do anything with direct references to volatile memory, but
1298 generate code to handle other side effects. */
3c0cb5de 1299 if (!MEM_P (op0) && side_effects_p (op0))
5ab2f7b7 1300 emit_insn (op0);
a9ccbb60
JJ
1301}
1302
3bdf5ad1 1303/* Get a MEM rtx for expression EXP which is the address of an operand
435bb2a1
JJ
1304 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1305 the maximum length of the block of memory that might be accessed or
1306 NULL if unknown. */
3bdf5ad1 1307
28f4ec01 1308static rtx
435bb2a1 1309get_memory_rtx (tree exp, tree len)
28f4ec01 1310{
805903b5
JJ
1311 tree orig_exp = exp;
1312 rtx addr, mem;
805903b5
JJ
1313
1314 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1315 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1316 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1317 exp = TREE_OPERAND (exp, 0);
1318
1319 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1320 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
8ac61af7 1321
3bdf5ad1 1322 /* Get an expression we can use to find the attributes to assign to MEM.
625ed172 1323 First remove any nops. */
1043771b 1324 while (CONVERT_EXPR_P (exp)
3bdf5ad1
RK
1325 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1326 exp = TREE_OPERAND (exp, 0);
1327
625ed172
MM
1328 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1329 (as builtin stringops may alias with anything). */
1330 exp = fold_build2 (MEM_REF,
1331 build_array_type (char_type_node,
1332 build_range_type (sizetype,
1333 size_one_node, len)),
1334 exp, build_int_cst (ptr_type_node, 0));
1335
1336 /* If the MEM_REF has no acceptable address, try to get the base object
1337 from the original address we got, and build an all-aliasing
1338 unknown-sized access to that one. */
1339 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1340 set_mem_attributes (mem, exp, 0);
1341 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1342 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1343 0))))
343fb412 1344 {
625ed172
MM
1345 exp = build_fold_addr_expr (exp);
1346 exp = fold_build2 (MEM_REF,
1347 build_array_type (char_type_node,
1348 build_range_type (sizetype,
1349 size_zero_node,
1350 NULL)),
1351 exp, build_int_cst (ptr_type_node, 0));
931e6c29 1352 set_mem_attributes (mem, exp, 0);
343fb412 1353 }
625ed172 1354 set_mem_alias_set (mem, 0);
28f4ec01
BS
1355 return mem;
1356}
1357\f
1358/* Built-in functions to perform an untyped call and return. */
1359
fa19795e
RS
1360#define apply_args_mode \
1361 (this_target_builtins->x_apply_args_mode)
1362#define apply_result_mode \
1363 (this_target_builtins->x_apply_result_mode)
28f4ec01 1364
28f4ec01
BS
1365/* Return the size required for the block returned by __builtin_apply_args,
1366 and initialize apply_args_mode. */
1367
1368static int
4682ae04 1369apply_args_size (void)
28f4ec01
BS
1370{
1371 static int size = -1;
cbf5468f
AH
1372 int align;
1373 unsigned int regno;
28f4ec01
BS
1374
1375 /* The values computed by this function never change. */
1376 if (size < 0)
1377 {
1378 /* The first value is the incoming arg-pointer. */
1379 size = GET_MODE_SIZE (Pmode);
1380
1381 /* The second value is the structure value address unless this is
1382 passed as an "invisible" first argument. */
92f6864c 1383 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1384 size += GET_MODE_SIZE (Pmode);
1385
1386 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1387 if (FUNCTION_ARG_REGNO_P (regno))
1388 {
b660eccf 1389 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
33521f7d 1390
298e6adc 1391 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1392
1393 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1394 if (size % align != 0)
1395 size = CEIL (size, align) * align;
28f4ec01
BS
1396 size += GET_MODE_SIZE (mode);
1397 apply_args_mode[regno] = mode;
1398 }
1399 else
1400 {
b660eccf 1401 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1402 }
1403 }
1404 return size;
1405}
1406
1407/* Return the size required for the block returned by __builtin_apply,
1408 and initialize apply_result_mode. */
1409
1410static int
4682ae04 1411apply_result_size (void)
28f4ec01
BS
1412{
1413 static int size = -1;
1414 int align, regno;
28f4ec01
BS
1415
1416 /* The values computed by this function never change. */
1417 if (size < 0)
1418 {
1419 size = 0;
1420
1421 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
82f81f18 1422 if (targetm.calls.function_value_regno_p (regno))
28f4ec01 1423 {
b660eccf 1424 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
33521f7d 1425
298e6adc 1426 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1427
1428 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1429 if (size % align != 0)
1430 size = CEIL (size, align) * align;
1431 size += GET_MODE_SIZE (mode);
1432 apply_result_mode[regno] = mode;
1433 }
1434 else
b660eccf 1435 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1436
1437 /* Allow targets that use untyped_call and untyped_return to override
1438 the size so that machine-specific information can be stored here. */
1439#ifdef APPLY_RESULT_SIZE
1440 size = APPLY_RESULT_SIZE;
1441#endif
1442 }
1443 return size;
1444}
1445
28f4ec01
BS
1446/* Create a vector describing the result block RESULT. If SAVEP is true,
1447 the result block is used to save the values; otherwise it is used to
1448 restore the values. */
1449
1450static rtx
4682ae04 1451result_vector (int savep, rtx result)
28f4ec01
BS
1452{
1453 int regno, size, align, nelts;
b660eccf 1454 fixed_size_mode mode;
28f4ec01 1455 rtx reg, mem;
f883e0a7 1456 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
8d51ecf8 1457
28f4ec01
BS
1458 size = nelts = 0;
1459 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1460 if ((mode = apply_result_mode[regno]) != VOIDmode)
1461 {
1462 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1463 if (size % align != 0)
1464 size = CEIL (size, align) * align;
1465 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
f4ef873c 1466 mem = adjust_address (result, mode, size);
28f4ec01 1467 savevec[nelts++] = (savep
f7df4a84
RS
1468 ? gen_rtx_SET (mem, reg)
1469 : gen_rtx_SET (reg, mem));
28f4ec01
BS
1470 size += GET_MODE_SIZE (mode);
1471 }
1472 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1473}
28f4ec01
BS
1474
1475/* Save the state required to perform an untyped call with the same
1476 arguments as were passed to the current function. */
1477
1478static rtx
4682ae04 1479expand_builtin_apply_args_1 (void)
28f4ec01 1480{
88e541e1 1481 rtx registers, tem;
28f4ec01 1482 int size, align, regno;
b660eccf 1483 fixed_size_mode mode;
92f6864c 1484 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
28f4ec01
BS
1485
1486 /* Create a block where the arg-pointer, structure value address,
1487 and argument registers can be saved. */
1488 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1489
1490 /* Walk past the arg-pointer and structure value address. */
1491 size = GET_MODE_SIZE (Pmode);
92f6864c 1492 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1493 size += GET_MODE_SIZE (Pmode);
1494
1495 /* Save each register used in calling a function to the block. */
1496 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1497 if ((mode = apply_args_mode[regno]) != VOIDmode)
1498 {
28f4ec01
BS
1499 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1500 if (size % align != 0)
1501 size = CEIL (size, align) * align;
1502
1503 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1504
f4ef873c 1505 emit_move_insn (adjust_address (registers, mode, size), tem);
28f4ec01
BS
1506 size += GET_MODE_SIZE (mode);
1507 }
1508
1509 /* Save the arg pointer to the block. */
2e3f842f 1510 tem = copy_to_reg (crtl->args.internal_arg_pointer);
88e541e1 1511 /* We need the pointer as the caller actually passed them to us, not
ac3f5df7
HPN
1512 as we might have pretended they were passed. Make sure it's a valid
1513 operand, as emit_move_insn isn't expected to handle a PLUS. */
581edfa3
TS
1514 if (STACK_GROWS_DOWNWARD)
1515 tem
1516 = force_operand (plus_constant (Pmode, tem,
1517 crtl->args.pretend_args_size),
1518 NULL_RTX);
88e541e1 1519 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
33521f7d 1520
28f4ec01
BS
1521 size = GET_MODE_SIZE (Pmode);
1522
1523 /* Save the structure value address unless this is passed as an
1524 "invisible" first argument. */
61f71b34 1525 if (struct_incoming_value)
28f4ec01 1526 {
f4ef873c 1527 emit_move_insn (adjust_address (registers, Pmode, size),
61f71b34 1528 copy_to_reg (struct_incoming_value));
28f4ec01
BS
1529 size += GET_MODE_SIZE (Pmode);
1530 }
1531
1532 /* Return the address of the block. */
1533 return copy_addr_to_reg (XEXP (registers, 0));
1534}
1535
1536/* __builtin_apply_args returns block of memory allocated on
1537 the stack into which is stored the arg pointer, structure
1538 value address, static chain, and all the registers that might
1539 possibly be used in performing a function call. The code is
1540 moved to the start of the function so the incoming values are
1541 saved. */
5197bd50 1542
28f4ec01 1543static rtx
4682ae04 1544expand_builtin_apply_args (void)
28f4ec01
BS
1545{
1546 /* Don't do __builtin_apply_args more than once in a function.
1547 Save the result of the first call and reuse it. */
1548 if (apply_args_value != 0)
1549 return apply_args_value;
1550 {
1551 /* When this function is called, it means that registers must be
1552 saved on entry to this function. So we migrate the
1553 call to the first insn of this function. */
1554 rtx temp;
28f4ec01
BS
1555
1556 start_sequence ();
1557 temp = expand_builtin_apply_args_1 ();
e67d1102 1558 rtx_insn *seq = get_insns ();
28f4ec01
BS
1559 end_sequence ();
1560
1561 apply_args_value = temp;
1562
2f937369
DM
1563 /* Put the insns after the NOTE that starts the function.
1564 If this is inside a start_sequence, make the outer-level insn
28f4ec01 1565 chain current, so the code is placed at the start of the
1f21b6f4
JJ
1566 function. If internal_arg_pointer is a non-virtual pseudo,
1567 it needs to be placed after the function that initializes
1568 that pseudo. */
28f4ec01 1569 push_topmost_sequence ();
1f21b6f4
JJ
1570 if (REG_P (crtl->args.internal_arg_pointer)
1571 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1572 emit_insn_before (seq, parm_birth_insn);
1573 else
1574 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
28f4ec01
BS
1575 pop_topmost_sequence ();
1576 return temp;
1577 }
1578}
1579
1580/* Perform an untyped call and save the state required to perform an
1581 untyped return of whatever value was returned by the given function. */
1582
1583static rtx
4682ae04 1584expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
28f4ec01
BS
1585{
1586 int size, align, regno;
b660eccf 1587 fixed_size_mode mode;
58f4cf2a
DM
1588 rtx incoming_args, result, reg, dest, src;
1589 rtx_call_insn *call_insn;
28f4ec01
BS
1590 rtx old_stack_level = 0;
1591 rtx call_fusage = 0;
92f6864c 1592 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
28f4ec01 1593
5ae6cd0d 1594 arguments = convert_memory_address (Pmode, arguments);
ce2d32cd 1595
28f4ec01
BS
1596 /* Create a block where the return registers can be saved. */
1597 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1598
28f4ec01
BS
1599 /* Fetch the arg pointer from the ARGUMENTS block. */
1600 incoming_args = gen_reg_rtx (Pmode);
ce2d32cd 1601 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
581edfa3
TS
1602 if (!STACK_GROWS_DOWNWARD)
1603 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1604 incoming_args, 0, OPTAB_LIB_WIDEN);
28f4ec01 1605
9d53e585
JM
1606 /* Push a new argument block and copy the arguments. Do not allow
1607 the (potential) memcpy call below to interfere with our stack
1608 manipulations. */
28f4ec01 1609 do_pending_stack_adjust ();
9d53e585 1610 NO_DEFER_POP;
28f4ec01 1611
f9da5064 1612 /* Save the stack with nonlocal if available. */
4476e1a0 1613 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1614 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
28f4ec01 1615 else
9eac0f2a 1616 emit_stack_save (SAVE_BLOCK, &old_stack_level);
28f4ec01 1617
316d0b19 1618 /* Allocate a block of memory onto the stack and copy the memory
d3c12306
EB
1619 arguments to the outgoing arguments address. We can pass TRUE
1620 as the 4th argument because we just saved the stack pointer
1621 and will restore it right after the call. */
9e878cf1 1622 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2e3f842f
L
1623
1624 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1625 may have already set current_function_calls_alloca to true.
1626 current_function_calls_alloca won't be set if argsize is zero,
1627 so we have to guarantee need_drap is true here. */
1628 if (SUPPORTS_STACK_ALIGNMENT)
1629 crtl->need_drap = true;
1630
316d0b19 1631 dest = virtual_outgoing_args_rtx;
581edfa3
TS
1632 if (!STACK_GROWS_DOWNWARD)
1633 {
1634 if (CONST_INT_P (argsize))
1635 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1636 else
1637 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1638 }
8ac61af7
RK
1639 dest = gen_rtx_MEM (BLKmode, dest);
1640 set_mem_align (dest, PARM_BOUNDARY);
1641 src = gen_rtx_MEM (BLKmode, incoming_args);
1642 set_mem_align (src, PARM_BOUNDARY);
44bb111a 1643 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
28f4ec01
BS
1644
1645 /* Refer to the argument block. */
1646 apply_args_size ();
1647 arguments = gen_rtx_MEM (BLKmode, arguments);
8ac61af7 1648 set_mem_align (arguments, PARM_BOUNDARY);
28f4ec01
BS
1649
1650 /* Walk past the arg-pointer and structure value address. */
1651 size = GET_MODE_SIZE (Pmode);
61f71b34 1652 if (struct_value)
28f4ec01
BS
1653 size += GET_MODE_SIZE (Pmode);
1654
1655 /* Restore each of the registers previously saved. Make USE insns
1656 for each of these registers for use in making the call. */
1657 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1658 if ((mode = apply_args_mode[regno]) != VOIDmode)
1659 {
1660 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1661 if (size % align != 0)
1662 size = CEIL (size, align) * align;
1663 reg = gen_rtx_REG (mode, regno);
f4ef873c 1664 emit_move_insn (reg, adjust_address (arguments, mode, size));
28f4ec01
BS
1665 use_reg (&call_fusage, reg);
1666 size += GET_MODE_SIZE (mode);
1667 }
1668
1669 /* Restore the structure value address unless this is passed as an
1670 "invisible" first argument. */
1671 size = GET_MODE_SIZE (Pmode);
61f71b34 1672 if (struct_value)
28f4ec01
BS
1673 {
1674 rtx value = gen_reg_rtx (Pmode);
f4ef873c 1675 emit_move_insn (value, adjust_address (arguments, Pmode, size));
61f71b34 1676 emit_move_insn (struct_value, value);
f8cfc6aa 1677 if (REG_P (struct_value))
61f71b34 1678 use_reg (&call_fusage, struct_value);
28f4ec01
BS
1679 size += GET_MODE_SIZE (Pmode);
1680 }
1681
1682 /* All arguments and registers used for the call are set up by now! */
531ca746 1683 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
28f4ec01
BS
1684
1685 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1686 and we don't want to load it into a register as an optimization,
1687 because prepare_call_address already did it if it should be done. */
1688 if (GET_CODE (function) != SYMBOL_REF)
1689 function = memory_address (FUNCTION_MODE, function);
1690
1691 /* Generate the actual call instruction and save the return value. */
43c7dca8
RS
1692 if (targetm.have_untyped_call ())
1693 {
1694 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1695 emit_call_insn (targetm.gen_untyped_call (mem, result,
1696 result_vector (1, result)));
1697 }
58d745ec 1698 else if (targetm.have_call_value ())
28f4ec01
BS
1699 {
1700 rtx valreg = 0;
1701
1702 /* Locate the unique return register. It is not possible to
1703 express a call that sets more than one return register using
1704 call_value; use untyped_call for that. In fact, untyped_call
1705 only needs to save the return registers in the given block. */
1706 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1707 if ((mode = apply_result_mode[regno]) != VOIDmode)
1708 {
58d745ec 1709 gcc_assert (!valreg); /* have_untyped_call required. */
5906d013 1710
28f4ec01
BS
1711 valreg = gen_rtx_REG (mode, regno);
1712 }
1713
58d745ec
RS
1714 emit_insn (targetm.gen_call_value (valreg,
1715 gen_rtx_MEM (FUNCTION_MODE, function),
1716 const0_rtx, NULL_RTX, const0_rtx));
28f4ec01 1717
f4ef873c 1718 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
28f4ec01
BS
1719 }
1720 else
298e6adc 1721 gcc_unreachable ();
28f4ec01 1722
ee960939
OH
1723 /* Find the CALL insn we just emitted, and attach the register usage
1724 information. */
1725 call_insn = last_call_insn ();
1726 add_function_usage_to (call_insn, call_fusage);
28f4ec01
BS
1727
1728 /* Restore the stack. */
4476e1a0 1729 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1730 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
28f4ec01 1731 else
9eac0f2a 1732 emit_stack_restore (SAVE_BLOCK, old_stack_level);
c3284718 1733 fixup_args_size_notes (call_insn, get_last_insn (), 0);
28f4ec01 1734
9d53e585
JM
1735 OK_DEFER_POP;
1736
28f4ec01 1737 /* Return the address of the result block. */
5ae6cd0d
MM
1738 result = copy_addr_to_reg (XEXP (result, 0));
1739 return convert_memory_address (ptr_mode, result);
28f4ec01
BS
1740}
1741
1742/* Perform an untyped return. */
1743
1744static void
4682ae04 1745expand_builtin_return (rtx result)
28f4ec01
BS
1746{
1747 int size, align, regno;
b660eccf 1748 fixed_size_mode mode;
28f4ec01 1749 rtx reg;
fee3e72c 1750 rtx_insn *call_fusage = 0;
28f4ec01 1751
5ae6cd0d 1752 result = convert_memory_address (Pmode, result);
ce2d32cd 1753
28f4ec01
BS
1754 apply_result_size ();
1755 result = gen_rtx_MEM (BLKmode, result);
1756
43c7dca8 1757 if (targetm.have_untyped_return ())
28f4ec01 1758 {
43c7dca8
RS
1759 rtx vector = result_vector (0, result);
1760 emit_jump_insn (targetm.gen_untyped_return (result, vector));
28f4ec01
BS
1761 emit_barrier ();
1762 return;
1763 }
28f4ec01
BS
1764
1765 /* Restore the return value and note that each value is used. */
1766 size = 0;
1767 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1768 if ((mode = apply_result_mode[regno]) != VOIDmode)
1769 {
1770 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1771 if (size % align != 0)
1772 size = CEIL (size, align) * align;
1773 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
f4ef873c 1774 emit_move_insn (reg, adjust_address (result, mode, size));
28f4ec01
BS
1775
1776 push_to_sequence (call_fusage);
c41c1387 1777 emit_use (reg);
28f4ec01
BS
1778 call_fusage = get_insns ();
1779 end_sequence ();
1780 size += GET_MODE_SIZE (mode);
1781 }
1782
1783 /* Put the USE insns before the return. */
2f937369 1784 emit_insn (call_fusage);
28f4ec01
BS
1785
1786 /* Return whatever values was restored by jumping directly to the end
1787 of the function. */
6e3077c6 1788 expand_naked_return ();
28f4ec01
BS
1789}
1790
ad82abb8 1791/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
5197bd50 1792
ad82abb8 1793static enum type_class
4682ae04 1794type_to_class (tree type)
ad82abb8
ZW
1795{
1796 switch (TREE_CODE (type))
1797 {
1798 case VOID_TYPE: return void_type_class;
1799 case INTEGER_TYPE: return integer_type_class;
ad82abb8
ZW
1800 case ENUMERAL_TYPE: return enumeral_type_class;
1801 case BOOLEAN_TYPE: return boolean_type_class;
1802 case POINTER_TYPE: return pointer_type_class;
1803 case REFERENCE_TYPE: return reference_type_class;
1804 case OFFSET_TYPE: return offset_type_class;
1805 case REAL_TYPE: return real_type_class;
1806 case COMPLEX_TYPE: return complex_type_class;
1807 case FUNCTION_TYPE: return function_type_class;
1808 case METHOD_TYPE: return method_type_class;
1809 case RECORD_TYPE: return record_type_class;
1810 case UNION_TYPE:
1811 case QUAL_UNION_TYPE: return union_type_class;
1812 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1813 ? string_type_class : array_type_class);
ad82abb8
ZW
1814 case LANG_TYPE: return lang_type_class;
1815 default: return no_type_class;
1816 }
1817}
8d51ecf8 1818
5039610b 1819/* Expand a call EXP to __builtin_classify_type. */
5197bd50 1820
28f4ec01 1821static rtx
5039610b 1822expand_builtin_classify_type (tree exp)
28f4ec01 1823{
5039610b
SL
1824 if (call_expr_nargs (exp))
1825 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
28f4ec01
BS
1826 return GEN_INT (no_type_class);
1827}
1828
ee5fd23a
MM
1829/* This helper macro, meant to be used in mathfn_built_in below, determines
1830 which among a set of builtin math functions is appropriate for a given type
1831 mode. The `F' (float) and `L' (long double) are automatically generated
1832 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1833 types, there are additional types that are considered with 'F32', 'F64',
1834 'F128', etc. suffixes. */
b03ff92e
RS
1835#define CASE_MATHFN(MATHFN) \
1836 CASE_CFN_##MATHFN: \
1837 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1838 fcodel = BUILT_IN_##MATHFN##L ; break;
ee5fd23a
MM
1839/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1840 types. */
1841#define CASE_MATHFN_FLOATN(MATHFN) \
1842 CASE_CFN_##MATHFN: \
1843 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1844 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1845 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1846 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1847 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1848 break;
bf460eec 1849/* Similar to above, but appends _R after any F/L suffix. */
b03ff92e
RS
1850#define CASE_MATHFN_REENT(MATHFN) \
1851 case CFN_BUILT_IN_##MATHFN##_R: \
1852 case CFN_BUILT_IN_##MATHFN##F_R: \
1853 case CFN_BUILT_IN_##MATHFN##L_R: \
1854 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1855 fcodel = BUILT_IN_##MATHFN##L_R ; break;
daa027cc 1856
5c1a2e63
RS
1857/* Return a function equivalent to FN but operating on floating-point
1858 values of type TYPE, or END_BUILTINS if no such function exists.
b03ff92e
RS
1859 This is purely an operation on function codes; it does not guarantee
1860 that the target actually has an implementation of the function. */
05f41289 1861
5c1a2e63 1862static built_in_function
b03ff92e 1863mathfn_built_in_2 (tree type, combined_fn fn)
272f51a3 1864{
ee5fd23a 1865 tree mtype;
5c1a2e63 1866 built_in_function fcode, fcodef, fcodel;
ee5fd23a
MM
1867 built_in_function fcodef16 = END_BUILTINS;
1868 built_in_function fcodef32 = END_BUILTINS;
1869 built_in_function fcodef64 = END_BUILTINS;
1870 built_in_function fcodef128 = END_BUILTINS;
1871 built_in_function fcodef32x = END_BUILTINS;
1872 built_in_function fcodef64x = END_BUILTINS;
1873 built_in_function fcodef128x = END_BUILTINS;
daa027cc
KG
1874
1875 switch (fn)
1876 {
b03ff92e
RS
1877 CASE_MATHFN (ACOS)
1878 CASE_MATHFN (ACOSH)
1879 CASE_MATHFN (ASIN)
1880 CASE_MATHFN (ASINH)
1881 CASE_MATHFN (ATAN)
1882 CASE_MATHFN (ATAN2)
1883 CASE_MATHFN (ATANH)
1884 CASE_MATHFN (CBRT)
c6cfa2bf 1885 CASE_MATHFN_FLOATN (CEIL)
b03ff92e 1886 CASE_MATHFN (CEXPI)
ee5fd23a 1887 CASE_MATHFN_FLOATN (COPYSIGN)
b03ff92e
RS
1888 CASE_MATHFN (COS)
1889 CASE_MATHFN (COSH)
1890 CASE_MATHFN (DREM)
1891 CASE_MATHFN (ERF)
1892 CASE_MATHFN (ERFC)
1893 CASE_MATHFN (EXP)
1894 CASE_MATHFN (EXP10)
1895 CASE_MATHFN (EXP2)
1896 CASE_MATHFN (EXPM1)
1897 CASE_MATHFN (FABS)
1898 CASE_MATHFN (FDIM)
c6cfa2bf 1899 CASE_MATHFN_FLOATN (FLOOR)
ee5fd23a
MM
1900 CASE_MATHFN_FLOATN (FMA)
1901 CASE_MATHFN_FLOATN (FMAX)
1902 CASE_MATHFN_FLOATN (FMIN)
b03ff92e
RS
1903 CASE_MATHFN (FMOD)
1904 CASE_MATHFN (FREXP)
1905 CASE_MATHFN (GAMMA)
1906 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1907 CASE_MATHFN (HUGE_VAL)
1908 CASE_MATHFN (HYPOT)
1909 CASE_MATHFN (ILOGB)
1910 CASE_MATHFN (ICEIL)
1911 CASE_MATHFN (IFLOOR)
1912 CASE_MATHFN (INF)
1913 CASE_MATHFN (IRINT)
1914 CASE_MATHFN (IROUND)
1915 CASE_MATHFN (ISINF)
1916 CASE_MATHFN (J0)
1917 CASE_MATHFN (J1)
1918 CASE_MATHFN (JN)
1919 CASE_MATHFN (LCEIL)
1920 CASE_MATHFN (LDEXP)
1921 CASE_MATHFN (LFLOOR)
1922 CASE_MATHFN (LGAMMA)
1923 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1924 CASE_MATHFN (LLCEIL)
1925 CASE_MATHFN (LLFLOOR)
1926 CASE_MATHFN (LLRINT)
1927 CASE_MATHFN (LLROUND)
1928 CASE_MATHFN (LOG)
1929 CASE_MATHFN (LOG10)
1930 CASE_MATHFN (LOG1P)
1931 CASE_MATHFN (LOG2)
1932 CASE_MATHFN (LOGB)
1933 CASE_MATHFN (LRINT)
1934 CASE_MATHFN (LROUND)
1935 CASE_MATHFN (MODF)
1936 CASE_MATHFN (NAN)
1937 CASE_MATHFN (NANS)
c6cfa2bf 1938 CASE_MATHFN_FLOATN (NEARBYINT)
b03ff92e
RS
1939 CASE_MATHFN (NEXTAFTER)
1940 CASE_MATHFN (NEXTTOWARD)
1941 CASE_MATHFN (POW)
1942 CASE_MATHFN (POWI)
1943 CASE_MATHFN (POW10)
1944 CASE_MATHFN (REMAINDER)
1945 CASE_MATHFN (REMQUO)
c6cfa2bf
MM
1946 CASE_MATHFN_FLOATN (RINT)
1947 CASE_MATHFN_FLOATN (ROUND)
b03ff92e
RS
1948 CASE_MATHFN (SCALB)
1949 CASE_MATHFN (SCALBLN)
1950 CASE_MATHFN (SCALBN)
1951 CASE_MATHFN (SIGNBIT)
1952 CASE_MATHFN (SIGNIFICAND)
1953 CASE_MATHFN (SIN)
1954 CASE_MATHFN (SINCOS)
1955 CASE_MATHFN (SINH)
ee5fd23a 1956 CASE_MATHFN_FLOATN (SQRT)
b03ff92e
RS
1957 CASE_MATHFN (TAN)
1958 CASE_MATHFN (TANH)
1959 CASE_MATHFN (TGAMMA)
c6cfa2bf 1960 CASE_MATHFN_FLOATN (TRUNC)
b03ff92e
RS
1961 CASE_MATHFN (Y0)
1962 CASE_MATHFN (Y1)
1963 CASE_MATHFN (YN)
daa027cc 1964
b03ff92e
RS
1965 default:
1966 return END_BUILTINS;
1967 }
daa027cc 1968
ee5fd23a
MM
1969 mtype = TYPE_MAIN_VARIANT (type);
1970 if (mtype == double_type_node)
5c1a2e63 1971 return fcode;
ee5fd23a 1972 else if (mtype == float_type_node)
5c1a2e63 1973 return fcodef;
ee5fd23a 1974 else if (mtype == long_double_type_node)
5c1a2e63 1975 return fcodel;
ee5fd23a
MM
1976 else if (mtype == float16_type_node)
1977 return fcodef16;
1978 else if (mtype == float32_type_node)
1979 return fcodef32;
1980 else if (mtype == float64_type_node)
1981 return fcodef64;
1982 else if (mtype == float128_type_node)
1983 return fcodef128;
1984 else if (mtype == float32x_type_node)
1985 return fcodef32x;
1986 else if (mtype == float64x_type_node)
1987 return fcodef64x;
1988 else if (mtype == float128x_type_node)
1989 return fcodef128x;
daa027cc 1990 else
5c1a2e63
RS
1991 return END_BUILTINS;
1992}
1993
1994/* Return mathematic function equivalent to FN but operating directly on TYPE,
1995 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1996 otherwise use the explicit declaration. If we can't do the conversion,
1997 return null. */
1998
1999static tree
b03ff92e 2000mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
5c1a2e63
RS
2001{
2002 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2003 if (fcode2 == END_BUILTINS)
5039610b 2004 return NULL_TREE;
e79983f4
MM
2005
2006 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2007 return NULL_TREE;
2008
2009 return builtin_decl_explicit (fcode2);
272f51a3
JH
2010}
2011
b03ff92e 2012/* Like mathfn_built_in_1, but always use the implicit array. */
05f41289
KG
2013
2014tree
b03ff92e 2015mathfn_built_in (tree type, combined_fn fn)
05f41289
KG
2016{
2017 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2018}
2019
b03ff92e
RS
2020/* Like mathfn_built_in_1, but take a built_in_function and
2021 always use the implicit array. */
2022
2023tree
2024mathfn_built_in (tree type, enum built_in_function fn)
2025{
2026 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2027}
2028
686ee971
RS
2029/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2030 return its code, otherwise return IFN_LAST. Note that this function
2031 only tests whether the function is defined in internals.def, not whether
2032 it is actually available on the target. */
2033
2034internal_fn
2035associated_internal_fn (tree fndecl)
2036{
2037 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2038 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2039 switch (DECL_FUNCTION_CODE (fndecl))
2040 {
2041#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2042 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
ee5fd23a
MM
2043#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2044 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2045 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
4959a752
RS
2046#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2047 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
686ee971
RS
2048#include "internal-fn.def"
2049
2050 CASE_FLT_FN (BUILT_IN_POW10):
2051 return IFN_EXP10;
2052
2053 CASE_FLT_FN (BUILT_IN_DREM):
2054 return IFN_REMAINDER;
2055
2056 CASE_FLT_FN (BUILT_IN_SCALBN):
2057 CASE_FLT_FN (BUILT_IN_SCALBLN):
2058 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2059 return IFN_LDEXP;
2060 return IFN_LAST;
2061
2062 default:
2063 return IFN_LAST;
2064 }
2065}
2066
2067/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2068 on the current target by a call to an internal function, return the
2069 code of that internal function, otherwise return IFN_LAST. The caller
2070 is responsible for ensuring that any side-effects of the built-in
2071 call are dealt with correctly. E.g. if CALL sets errno, the caller
2072 must decide that the errno result isn't needed or make it available
2073 in some other way. */
2074
2075internal_fn
2076replacement_internal_fn (gcall *call)
2077{
2078 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2079 {
2080 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2081 if (ifn != IFN_LAST)
2082 {
2083 tree_pair types = direct_internal_fn_types (ifn, call);
d95ab70a
RS
2084 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2085 if (direct_internal_fn_supported_p (ifn, types, opt_type))
686ee971
RS
2086 return ifn;
2087 }
2088 }
2089 return IFN_LAST;
2090}
2091
1b1562a5
MM
2092/* Expand a call to the builtin trinary math functions (fma).
2093 Return NULL_RTX if a normal call should be emitted rather than expanding the
2094 function in-line. EXP is the expression that is a call to the builtin
2095 function; if convenient, the result should be placed in TARGET.
2096 SUBTARGET may be used as the target for computing one of EXP's
2097 operands. */
2098
2099static rtx
2100expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2101{
2102 optab builtin_optab;
58f4cf2a
DM
2103 rtx op0, op1, op2, result;
2104 rtx_insn *insns;
1b1562a5
MM
2105 tree fndecl = get_callee_fndecl (exp);
2106 tree arg0, arg1, arg2;
ef4bddc2 2107 machine_mode mode;
1b1562a5
MM
2108
2109 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2110 return NULL_RTX;
2111
2112 arg0 = CALL_EXPR_ARG (exp, 0);
2113 arg1 = CALL_EXPR_ARG (exp, 1);
2114 arg2 = CALL_EXPR_ARG (exp, 2);
2115
2116 switch (DECL_FUNCTION_CODE (fndecl))
2117 {
2118 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 2119 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
2120 builtin_optab = fma_optab; break;
2121 default:
2122 gcc_unreachable ();
2123 }
2124
2125 /* Make a suitable register to place result in. */
2126 mode = TYPE_MODE (TREE_TYPE (exp));
2127
2128 /* Before working hard, check whether the instruction is available. */
2129 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2130 return NULL_RTX;
2131
04b80dbb 2132 result = gen_reg_rtx (mode);
1b1562a5
MM
2133
2134 /* Always stabilize the argument list. */
2135 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2136 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2137 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2138
2139 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2140 op1 = expand_normal (arg1);
2141 op2 = expand_normal (arg2);
2142
2143 start_sequence ();
2144
04b80dbb
RS
2145 /* Compute into RESULT.
2146 Set RESULT to wherever the result comes back. */
2147 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2148 result, 0);
1b1562a5
MM
2149
2150 /* If we were unable to expand via the builtin, stop the sequence
2151 (without outputting the insns) and call to the library function
2152 with the stabilized argument list. */
04b80dbb 2153 if (result == 0)
1b1562a5
MM
2154 {
2155 end_sequence ();
2156 return expand_call (exp, target, target == const0_rtx);
2157 }
2158
2159 /* Output the entire sequence. */
2160 insns = get_insns ();
2161 end_sequence ();
2162 emit_insn (insns);
2163
04b80dbb 2164 return result;
1b1562a5
MM
2165}
2166
6c7cf1f0 2167/* Expand a call to the builtin sin and cos math functions.
5039610b 2168 Return NULL_RTX if a normal call should be emitted rather than expanding the
6c7cf1f0
UB
2169 function in-line. EXP is the expression that is a call to the builtin
2170 function; if convenient, the result should be placed in TARGET.
2171 SUBTARGET may be used as the target for computing one of EXP's
2172 operands. */
2173
2174static rtx
2175expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2176{
2177 optab builtin_optab;
58f4cf2a
DM
2178 rtx op0;
2179 rtx_insn *insns;
6c7cf1f0 2180 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2181 machine_mode mode;
5799f732 2182 tree arg;
6c7cf1f0 2183
5039610b
SL
2184 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2185 return NULL_RTX;
6c7cf1f0 2186
5039610b 2187 arg = CALL_EXPR_ARG (exp, 0);
6c7cf1f0
UB
2188
2189 switch (DECL_FUNCTION_CODE (fndecl))
2190 {
ea6a6627
VR
2191 CASE_FLT_FN (BUILT_IN_SIN):
2192 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2193 builtin_optab = sincos_optab; break;
2194 default:
298e6adc 2195 gcc_unreachable ();
6c7cf1f0
UB
2196 }
2197
2198 /* Make a suitable register to place result in. */
2199 mode = TYPE_MODE (TREE_TYPE (exp));
2200
6c7cf1f0 2201 /* Check if sincos insn is available, otherwise fallback
9cf737f8 2202 to sin or cos insn. */
947131ba 2203 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6c7cf1f0
UB
2204 switch (DECL_FUNCTION_CODE (fndecl))
2205 {
ea6a6627 2206 CASE_FLT_FN (BUILT_IN_SIN):
6c7cf1f0 2207 builtin_optab = sin_optab; break;
ea6a6627 2208 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2209 builtin_optab = cos_optab; break;
2210 default:
298e6adc 2211 gcc_unreachable ();
6c7cf1f0 2212 }
6c7cf1f0
UB
2213
2214 /* Before working hard, check whether the instruction is available. */
947131ba 2215 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6c7cf1f0 2216 {
04b80dbb 2217 rtx result = gen_reg_rtx (mode);
6c7cf1f0
UB
2218
2219 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2220 need to expand the argument again. This way, we will not perform
2221 side-effects more the once. */
5799f732 2222 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6c7cf1f0 2223
49452c07 2224 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6c7cf1f0 2225
6c7cf1f0
UB
2226 start_sequence ();
2227
04b80dbb
RS
2228 /* Compute into RESULT.
2229 Set RESULT to wherever the result comes back. */
6c7cf1f0
UB
2230 if (builtin_optab == sincos_optab)
2231 {
04b80dbb 2232 int ok;
5906d013 2233
6c7cf1f0
UB
2234 switch (DECL_FUNCTION_CODE (fndecl))
2235 {
ea6a6627 2236 CASE_FLT_FN (BUILT_IN_SIN):
04b80dbb 2237 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6c7cf1f0 2238 break;
ea6a6627 2239 CASE_FLT_FN (BUILT_IN_COS):
04b80dbb 2240 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6c7cf1f0
UB
2241 break;
2242 default:
298e6adc 2243 gcc_unreachable ();
6c7cf1f0 2244 }
04b80dbb 2245 gcc_assert (ok);
6c7cf1f0
UB
2246 }
2247 else
04b80dbb 2248 result = expand_unop (mode, builtin_optab, op0, result, 0);
6c7cf1f0 2249
04b80dbb 2250 if (result != 0)
6c7cf1f0 2251 {
6c7cf1f0
UB
2252 /* Output the entire sequence. */
2253 insns = get_insns ();
2254 end_sequence ();
2255 emit_insn (insns);
04b80dbb 2256 return result;
6c7cf1f0
UB
2257 }
2258
2259 /* If we were unable to expand via the builtin, stop the sequence
2260 (without outputting the insns) and call to the library function
2261 with the stabilized argument list. */
2262 end_sequence ();
2263 }
2264
04b80dbb 2265 return expand_call (exp, target, target == const0_rtx);
6c7cf1f0
UB
2266}
2267
44e10129
MM
2268/* Given an interclass math builtin decl FNDECL and it's argument ARG
2269 return an RTL instruction code that implements the functionality.
2270 If that isn't possible or available return CODE_FOR_nothing. */
eaee4464 2271
44e10129
MM
2272static enum insn_code
2273interclass_mathfn_icode (tree arg, tree fndecl)
eaee4464 2274{
44e10129 2275 bool errno_set = false;
2225b9f2 2276 optab builtin_optab = unknown_optab;
ef4bddc2 2277 machine_mode mode;
eaee4464
UB
2278
2279 switch (DECL_FUNCTION_CODE (fndecl))
2280 {
2281 CASE_FLT_FN (BUILT_IN_ILOGB):
903c723b
TC
2282 errno_set = true; builtin_optab = ilogb_optab; break;
2283 CASE_FLT_FN (BUILT_IN_ISINF):
2284 builtin_optab = isinf_optab; break;
2285 case BUILT_IN_ISNORMAL:
2286 case BUILT_IN_ISFINITE:
2287 CASE_FLT_FN (BUILT_IN_FINITE):
2288 case BUILT_IN_FINITED32:
2289 case BUILT_IN_FINITED64:
2290 case BUILT_IN_FINITED128:
2291 case BUILT_IN_ISINFD32:
2292 case BUILT_IN_ISINFD64:
2293 case BUILT_IN_ISINFD128:
2294 /* These builtins have no optabs (yet). */
0c8d3c2b 2295 break;
eaee4464
UB
2296 default:
2297 gcc_unreachable ();
2298 }
2299
2300 /* There's no easy way to detect the case we need to set EDOM. */
2301 if (flag_errno_math && errno_set)
44e10129 2302 return CODE_FOR_nothing;
eaee4464
UB
2303
2304 /* Optab mode depends on the mode of the input argument. */
2305 mode = TYPE_MODE (TREE_TYPE (arg));
2306
0c8d3c2b 2307 if (builtin_optab)
947131ba 2308 return optab_handler (builtin_optab, mode);
44e10129
MM
2309 return CODE_FOR_nothing;
2310}
2311
2312/* Expand a call to one of the builtin math functions that operate on
903c723b
TC
2313 floating point argument and output an integer result (ilogb, isinf,
2314 isnan, etc).
44e10129
MM
2315 Return 0 if a normal call should be emitted rather than expanding the
2316 function in-line. EXP is the expression that is a call to the builtin
4359dc2a 2317 function; if convenient, the result should be placed in TARGET. */
44e10129
MM
2318
2319static rtx
4359dc2a 2320expand_builtin_interclass_mathfn (tree exp, rtx target)
44e10129
MM
2321{
2322 enum insn_code icode = CODE_FOR_nothing;
2323 rtx op0;
2324 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2325 machine_mode mode;
44e10129
MM
2326 tree arg;
2327
2328 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2329 return NULL_RTX;
2330
2331 arg = CALL_EXPR_ARG (exp, 0);
2332 icode = interclass_mathfn_icode (arg, fndecl);
2333 mode = TYPE_MODE (TREE_TYPE (arg));
2334
eaee4464
UB
2335 if (icode != CODE_FOR_nothing)
2336 {
a5c7d693 2337 struct expand_operand ops[1];
58f4cf2a 2338 rtx_insn *last = get_last_insn ();
8a0b1aa4 2339 tree orig_arg = arg;
eaee4464
UB
2340
2341 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2342 need to expand the argument again. This way, we will not perform
2343 side-effects more the once. */
5799f732 2344 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
eaee4464 2345
4359dc2a 2346 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
eaee4464
UB
2347
2348 if (mode != GET_MODE (op0))
2349 op0 = convert_to_mode (mode, op0, 0);
2350
a5c7d693
RS
2351 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2352 if (maybe_legitimize_operands (icode, 0, 1, ops)
2353 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2354 return ops[0].value;
2355
8a0b1aa4
MM
2356 delete_insns_since (last);
2357 CALL_EXPR_ARG (exp, 0) = orig_arg;
eaee4464
UB
2358 }
2359
44e10129 2360 return NULL_RTX;
eaee4464
UB
2361}
2362
403e54f0 2363/* Expand a call to the builtin sincos math function.
5039610b 2364 Return NULL_RTX if a normal call should be emitted rather than expanding the
403e54f0
RG
2365 function in-line. EXP is the expression that is a call to the builtin
2366 function. */
2367
2368static rtx
2369expand_builtin_sincos (tree exp)
2370{
2371 rtx op0, op1, op2, target1, target2;
ef4bddc2 2372 machine_mode mode;
403e54f0
RG
2373 tree arg, sinp, cosp;
2374 int result;
db3927fb 2375 location_t loc = EXPR_LOCATION (exp);
ca818bd9 2376 tree alias_type, alias_off;
403e54f0 2377
5039610b
SL
2378 if (!validate_arglist (exp, REAL_TYPE,
2379 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2380 return NULL_RTX;
403e54f0 2381
5039610b
SL
2382 arg = CALL_EXPR_ARG (exp, 0);
2383 sinp = CALL_EXPR_ARG (exp, 1);
2384 cosp = CALL_EXPR_ARG (exp, 2);
403e54f0
RG
2385
2386 /* Make a suitable register to place result in. */
2387 mode = TYPE_MODE (TREE_TYPE (arg));
2388
2389 /* Check if sincos insn is available, otherwise emit the call. */
947131ba 2390 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
403e54f0
RG
2391 return NULL_RTX;
2392
2393 target1 = gen_reg_rtx (mode);
2394 target2 = gen_reg_rtx (mode);
2395
84217346 2396 op0 = expand_normal (arg);
ca818bd9
RG
2397 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2398 alias_off = build_int_cst (alias_type, 0);
2399 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2400 sinp, alias_off));
2401 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2402 cosp, alias_off));
403e54f0
RG
2403
2404 /* Compute into target1 and target2.
2405 Set TARGET to wherever the result comes back. */
2406 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2407 gcc_assert (result);
2408
2409 /* Move target1 and target2 to the memory locations indicated
2410 by op1 and op2. */
2411 emit_move_insn (op1, target1);
2412 emit_move_insn (op2, target2);
2413
2414 return const0_rtx;
2415}
2416
75c7c595
RG
2417/* Expand a call to the internal cexpi builtin to the sincos math function.
2418 EXP is the expression that is a call to the builtin function; if convenient,
4359dc2a 2419 the result should be placed in TARGET. */
75c7c595
RG
2420
2421static rtx
4359dc2a 2422expand_builtin_cexpi (tree exp, rtx target)
75c7c595
RG
2423{
2424 tree fndecl = get_callee_fndecl (exp);
75c7c595 2425 tree arg, type;
ef4bddc2 2426 machine_mode mode;
75c7c595 2427 rtx op0, op1, op2;
db3927fb 2428 location_t loc = EXPR_LOCATION (exp);
75c7c595 2429
5039610b
SL
2430 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2431 return NULL_RTX;
75c7c595 2432
5039610b 2433 arg = CALL_EXPR_ARG (exp, 0);
75c7c595
RG
2434 type = TREE_TYPE (arg);
2435 mode = TYPE_MODE (TREE_TYPE (arg));
2436
2437 /* Try expanding via a sincos optab, fall back to emitting a libcall
b54c5497
RG
2438 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2439 is only generated from sincos, cexp or if we have either of them. */
947131ba 2440 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
75c7c595
RG
2441 {
2442 op1 = gen_reg_rtx (mode);
2443 op2 = gen_reg_rtx (mode);
2444
4359dc2a 2445 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2446
2447 /* Compute into op1 and op2. */
2448 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2449 }
d33d9e47 2450 else if (targetm.libc_has_function (function_sincos))
75c7c595 2451 {
5039610b 2452 tree call, fn = NULL_TREE;
75c7c595
RG
2453 tree top1, top2;
2454 rtx op1a, op2a;
2455
2456 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2457 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
75c7c595 2458 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2459 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
75c7c595 2460 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2461 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
5039610b
SL
2462 else
2463 gcc_unreachable ();
b8698a0f 2464
9474e8ab
MM
2465 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2466 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
18ae1560
UB
2467 op1a = copy_addr_to_reg (XEXP (op1, 0));
2468 op2a = copy_addr_to_reg (XEXP (op2, 0));
75c7c595
RG
2469 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2470 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2471
75c7c595
RG
2472 /* Make sure not to fold the sincos call again. */
2473 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5039610b
SL
2474 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2475 call, 3, arg, top1, top2));
75c7c595 2476 }
b54c5497
RG
2477 else
2478 {
9d972b2d 2479 tree call, fn = NULL_TREE, narg;
b54c5497
RG
2480 tree ctype = build_complex_type (type);
2481
9d972b2d 2482 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2483 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
9d972b2d 2484 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2485 fn = builtin_decl_explicit (BUILT_IN_CEXP);
9d972b2d 2486 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2487 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
5039610b
SL
2488 else
2489 gcc_unreachable ();
34a24c11
RG
2490
2491 /* If we don't have a decl for cexp create one. This is the
2492 friendliest fallback if the user calls __builtin_cexpi
2493 without full target C99 function support. */
2494 if (fn == NULL_TREE)
2495 {
2496 tree fntype;
2497 const char *name = NULL;
2498
2499 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2500 name = "cexpf";
2501 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2502 name = "cexp";
2503 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2504 name = "cexpl";
2505
2506 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2507 fn = build_fn_decl (name, fntype);
2508 }
2509
db3927fb 2510 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
b54c5497
RG
2511 build_real (type, dconst0), arg);
2512
2513 /* Make sure not to fold the cexp call again. */
2514 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
b8698a0f 2515 return expand_expr (build_call_nary (ctype, call, 1, narg),
49452c07 2516 target, VOIDmode, EXPAND_NORMAL);
b54c5497 2517 }
75c7c595
RG
2518
2519 /* Now build the proper return type. */
2520 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2521 make_tree (TREE_TYPE (arg), op2),
2522 make_tree (TREE_TYPE (arg), op1)),
49452c07 2523 target, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2524}
2525
44e10129
MM
2526/* Conveniently construct a function call expression. FNDECL names the
2527 function to be called, N is the number of arguments, and the "..."
2528 parameters are the argument expressions. Unlike build_call_exr
2529 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2530
2531static tree
2532build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2533{
2534 va_list ap;
2535 tree fntype = TREE_TYPE (fndecl);
2536 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2537
2538 va_start (ap, n);
2539 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2540 va_end (ap);
2541 SET_EXPR_LOCATION (fn, loc);
2542 return fn;
2543}
44e10129 2544
0bfa1541
RG
2545/* Expand a call to one of the builtin rounding functions gcc defines
2546 as an extension (lfloor and lceil). As these are gcc extensions we
2547 do not need to worry about setting errno to EDOM.
d8b42d06
UB
2548 If expanding via optab fails, lower expression to (int)(floor(x)).
2549 EXP is the expression that is a call to the builtin function;
1856c8dc 2550 if convenient, the result should be placed in TARGET. */
d8b42d06
UB
2551
2552static rtx
1856c8dc 2553expand_builtin_int_roundingfn (tree exp, rtx target)
d8b42d06 2554{
c3a4177f 2555 convert_optab builtin_optab;
58f4cf2a
DM
2556 rtx op0, tmp;
2557 rtx_insn *insns;
d8b42d06 2558 tree fndecl = get_callee_fndecl (exp);
d8b42d06
UB
2559 enum built_in_function fallback_fn;
2560 tree fallback_fndecl;
ef4bddc2 2561 machine_mode mode;
968fc3b6 2562 tree arg;
d8b42d06 2563
5039610b 2564 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
d8b42d06
UB
2565 gcc_unreachable ();
2566
5039610b 2567 arg = CALL_EXPR_ARG (exp, 0);
d8b42d06
UB
2568
2569 switch (DECL_FUNCTION_CODE (fndecl))
2570 {
6c32ee74 2571 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
2572 CASE_FLT_FN (BUILT_IN_LCEIL):
2573 CASE_FLT_FN (BUILT_IN_LLCEIL):
f94b1661
UB
2574 builtin_optab = lceil_optab;
2575 fallback_fn = BUILT_IN_CEIL;
2576 break;
2577
6c32ee74 2578 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627
VR
2579 CASE_FLT_FN (BUILT_IN_LFLOOR):
2580 CASE_FLT_FN (BUILT_IN_LLFLOOR):
d8b42d06
UB
2581 builtin_optab = lfloor_optab;
2582 fallback_fn = BUILT_IN_FLOOR;
2583 break;
2584
2585 default:
2586 gcc_unreachable ();
2587 }
2588
2589 /* Make a suitable register to place result in. */
2590 mode = TYPE_MODE (TREE_TYPE (exp));
2591
c3a4177f 2592 target = gen_reg_rtx (mode);
d8b42d06 2593
c3a4177f
RG
2594 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2595 need to expand the argument again. This way, we will not perform
2596 side-effects more the once. */
5799f732 2597 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
d8b42d06 2598
1856c8dc 2599 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
d8b42d06 2600
c3a4177f 2601 start_sequence ();
d8b42d06 2602
c3a4177f
RG
2603 /* Compute into TARGET. */
2604 if (expand_sfix_optab (target, op0, builtin_optab))
2605 {
2606 /* Output the entire sequence. */
2607 insns = get_insns ();
d8b42d06 2608 end_sequence ();
c3a4177f
RG
2609 emit_insn (insns);
2610 return target;
d8b42d06
UB
2611 }
2612
c3a4177f
RG
2613 /* If we were unable to expand via the builtin, stop the sequence
2614 (without outputting the insns). */
2615 end_sequence ();
2616
d8b42d06
UB
2617 /* Fall back to floating point rounding optab. */
2618 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
34a24c11
RG
2619
2620 /* For non-C99 targets we may end up without a fallback fndecl here
2621 if the user called __builtin_lfloor directly. In this case emit
2622 a call to the floor/ceil variants nevertheless. This should result
2623 in the best user experience for not full C99 targets. */
2624 if (fallback_fndecl == NULL_TREE)
2625 {
2626 tree fntype;
2627 const char *name = NULL;
2628
2629 switch (DECL_FUNCTION_CODE (fndecl))
2630 {
6c32ee74 2631 case BUILT_IN_ICEIL:
34a24c11
RG
2632 case BUILT_IN_LCEIL:
2633 case BUILT_IN_LLCEIL:
2634 name = "ceil";
2635 break;
6c32ee74 2636 case BUILT_IN_ICEILF:
34a24c11
RG
2637 case BUILT_IN_LCEILF:
2638 case BUILT_IN_LLCEILF:
2639 name = "ceilf";
2640 break;
6c32ee74 2641 case BUILT_IN_ICEILL:
34a24c11
RG
2642 case BUILT_IN_LCEILL:
2643 case BUILT_IN_LLCEILL:
2644 name = "ceill";
2645 break;
6c32ee74 2646 case BUILT_IN_IFLOOR:
34a24c11
RG
2647 case BUILT_IN_LFLOOR:
2648 case BUILT_IN_LLFLOOR:
2649 name = "floor";
2650 break;
6c32ee74 2651 case BUILT_IN_IFLOORF:
34a24c11
RG
2652 case BUILT_IN_LFLOORF:
2653 case BUILT_IN_LLFLOORF:
2654 name = "floorf";
2655 break;
6c32ee74 2656 case BUILT_IN_IFLOORL:
34a24c11
RG
2657 case BUILT_IN_LFLOORL:
2658 case BUILT_IN_LLFLOORL:
2659 name = "floorl";
2660 break;
2661 default:
2662 gcc_unreachable ();
2663 }
2664
2665 fntype = build_function_type_list (TREE_TYPE (arg),
2666 TREE_TYPE (arg), NULL_TREE);
2667 fallback_fndecl = build_fn_decl (name, fntype);
2668 }
2669
aa493694 2670 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
d8b42d06 2671
39b1ec97 2672 tmp = expand_normal (exp);
9a002da8 2673 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
d8b42d06
UB
2674
2675 /* Truncate the result of floating point optab to integer
2676 via expand_fix (). */
2677 target = gen_reg_rtx (mode);
2678 expand_fix (target, tmp, 0);
2679
2680 return target;
2681}
2682
0bfa1541
RG
2683/* Expand a call to one of the builtin math functions doing integer
2684 conversion (lrint).
2685 Return 0 if a normal call should be emitted rather than expanding the
2686 function in-line. EXP is the expression that is a call to the builtin
1856c8dc 2687 function; if convenient, the result should be placed in TARGET. */
0bfa1541
RG
2688
2689static rtx
1856c8dc 2690expand_builtin_int_roundingfn_2 (tree exp, rtx target)
0bfa1541 2691{
bb7f0423 2692 convert_optab builtin_optab;
58f4cf2a
DM
2693 rtx op0;
2694 rtx_insn *insns;
0bfa1541 2695 tree fndecl = get_callee_fndecl (exp);
968fc3b6 2696 tree arg;
ef4bddc2 2697 machine_mode mode;
ff63ac4d 2698 enum built_in_function fallback_fn = BUILT_IN_NONE;
0bfa1541 2699
5039610b
SL
2700 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2701 gcc_unreachable ();
b8698a0f 2702
5039610b 2703 arg = CALL_EXPR_ARG (exp, 0);
0bfa1541
RG
2704
2705 switch (DECL_FUNCTION_CODE (fndecl))
2706 {
6c32ee74 2707 CASE_FLT_FN (BUILT_IN_IRINT):
ff63ac4d 2708 fallback_fn = BUILT_IN_LRINT;
81fea426 2709 gcc_fallthrough ();
0bfa1541
RG
2710 CASE_FLT_FN (BUILT_IN_LRINT):
2711 CASE_FLT_FN (BUILT_IN_LLRINT):
ff63ac4d
JJ
2712 builtin_optab = lrint_optab;
2713 break;
6c32ee74
UB
2714
2715 CASE_FLT_FN (BUILT_IN_IROUND):
ff63ac4d 2716 fallback_fn = BUILT_IN_LROUND;
81fea426 2717 gcc_fallthrough ();
4d81bf84
RG
2718 CASE_FLT_FN (BUILT_IN_LROUND):
2719 CASE_FLT_FN (BUILT_IN_LLROUND):
ff63ac4d
JJ
2720 builtin_optab = lround_optab;
2721 break;
6c32ee74 2722
0bfa1541
RG
2723 default:
2724 gcc_unreachable ();
2725 }
2726
ff63ac4d
JJ
2727 /* There's no easy way to detect the case we need to set EDOM. */
2728 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2729 return NULL_RTX;
2730
0bfa1541
RG
2731 /* Make a suitable register to place result in. */
2732 mode = TYPE_MODE (TREE_TYPE (exp));
2733
ff63ac4d
JJ
2734 /* There's no easy way to detect the case we need to set EDOM. */
2735 if (!flag_errno_math)
2736 {
04b80dbb 2737 rtx result = gen_reg_rtx (mode);
0bfa1541 2738
ff63ac4d
JJ
2739 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2740 need to expand the argument again. This way, we will not perform
2741 side-effects more the once. */
2742 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
0bfa1541 2743
ff63ac4d 2744 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
0bfa1541 2745
ff63ac4d 2746 start_sequence ();
0bfa1541 2747
04b80dbb 2748 if (expand_sfix_optab (result, op0, builtin_optab))
ff63ac4d
JJ
2749 {
2750 /* Output the entire sequence. */
2751 insns = get_insns ();
2752 end_sequence ();
2753 emit_insn (insns);
04b80dbb 2754 return result;
ff63ac4d
JJ
2755 }
2756
2757 /* If we were unable to expand via the builtin, stop the sequence
2758 (without outputting the insns) and call to the library function
2759 with the stabilized argument list. */
0bfa1541
RG
2760 end_sequence ();
2761 }
2762
ff63ac4d
JJ
2763 if (fallback_fn != BUILT_IN_NONE)
2764 {
2765 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2766 targets, (int) round (x) should never be transformed into
2767 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2768 a call to lround in the hope that the target provides at least some
2769 C99 functions. This should result in the best user experience for
2770 not full C99 targets. */
b03ff92e
RS
2771 tree fallback_fndecl = mathfn_built_in_1
2772 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
ff63ac4d
JJ
2773
2774 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2775 fallback_fndecl, 1, arg);
2776
2777 target = expand_call (exp, NULL_RTX, target == const0_rtx);
9a002da8 2778 target = maybe_emit_group_store (target, TREE_TYPE (exp));
ff63ac4d
JJ
2779 return convert_to_mode (mode, target, 0);
2780 }
bb7f0423 2781
04b80dbb 2782 return expand_call (exp, target, target == const0_rtx);
0bfa1541
RG
2783}
2784
5039610b 2785/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
17684d46
RG
2786 a normal call should be emitted rather than expanding the function
2787 in-line. EXP is the expression that is a call to the builtin
2788 function; if convenient, the result should be placed in TARGET. */
2789
2790static rtx
4359dc2a 2791expand_builtin_powi (tree exp, rtx target)
17684d46 2792{
17684d46
RG
2793 tree arg0, arg1;
2794 rtx op0, op1;
ef4bddc2
RS
2795 machine_mode mode;
2796 machine_mode mode2;
17684d46 2797
5039610b
SL
2798 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2799 return NULL_RTX;
17684d46 2800
5039610b
SL
2801 arg0 = CALL_EXPR_ARG (exp, 0);
2802 arg1 = CALL_EXPR_ARG (exp, 1);
17684d46
RG
2803 mode = TYPE_MODE (TREE_TYPE (exp));
2804
17684d46
RG
2805 /* Emit a libcall to libgcc. */
2806
5039610b 2807 /* Mode of the 2nd argument must match that of an int. */
f4b31647 2808 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
0b8495ae 2809
17684d46
RG
2810 if (target == NULL_RTX)
2811 target = gen_reg_rtx (mode);
2812
4359dc2a 2813 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
17684d46
RG
2814 if (GET_MODE (op0) != mode)
2815 op0 = convert_to_mode (mode, op0, 0);
49452c07 2816 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
0b8495ae
FJ
2817 if (GET_MODE (op1) != mode2)
2818 op1 = convert_to_mode (mode2, op1, 0);
17684d46 2819
8a33f100 2820 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
db69559b 2821 target, LCT_CONST, mode,
0b8495ae 2822 op0, mode, op1, mode2);
17684d46
RG
2823
2824 return target;
2825}
2826
b8698a0f 2827/* Expand expression EXP which is a call to the strlen builtin. Return
5039610b 2828 NULL_RTX if we failed the caller should emit a normal call, otherwise
0e9295cf 2829 try to get the result in TARGET, if convenient. */
3bdf5ad1 2830
28f4ec01 2831static rtx
5039610b 2832expand_builtin_strlen (tree exp, rtx target,
ef4bddc2 2833 machine_mode target_mode)
28f4ec01 2834{
5039610b
SL
2835 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2836 return NULL_RTX;
712b7a05 2837
16155777
MS
2838 struct expand_operand ops[4];
2839 rtx pat;
2840 tree len;
2841 tree src = CALL_EXPR_ARG (exp, 0);
2842 rtx src_reg;
2843 rtx_insn *before_strlen;
2844 machine_mode insn_mode;
2845 enum insn_code icode = CODE_FOR_nothing;
2846 unsigned int align;
ae808627 2847
16155777
MS
2848 /* If the length can be computed at compile-time, return it. */
2849 len = c_strlen (src, 0);
2850 if (len)
2851 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2852
2853 /* If the length can be computed at compile-time and is constant
2854 integer, but there are side-effects in src, evaluate
2855 src for side-effects, then return len.
2856 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2857 can be optimized into: i++; x = 3; */
2858 len = c_strlen (src, 1);
2859 if (len && TREE_CODE (len) == INTEGER_CST)
2860 {
2861 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2862 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2863 }
28f4ec01 2864
16155777 2865 align = get_pointer_alignment (src) / BITS_PER_UNIT;
28f4ec01 2866
16155777
MS
2867 /* If SRC is not a pointer type, don't do this operation inline. */
2868 if (align == 0)
2869 return NULL_RTX;
2870
2871 /* Bail out if we can't compute strlen in the right mode. */
2872 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2873 {
2874 icode = optab_handler (strlen_optab, insn_mode);
2875 if (icode != CODE_FOR_nothing)
2876 break;
2877 }
2878 if (insn_mode == VOIDmode)
2879 return NULL_RTX;
28f4ec01 2880
16155777
MS
2881 /* Make a place to hold the source address. We will not expand
2882 the actual source until we are sure that the expansion will
2883 not fail -- there are trees that cannot be expanded twice. */
2884 src_reg = gen_reg_rtx (Pmode);
28f4ec01 2885
16155777
MS
2886 /* Mark the beginning of the strlen sequence so we can emit the
2887 source operand later. */
2888 before_strlen = get_last_insn ();
28f4ec01 2889
16155777
MS
2890 create_output_operand (&ops[0], target, insn_mode);
2891 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2892 create_integer_operand (&ops[2], 0);
2893 create_integer_operand (&ops[3], align);
2894 if (!maybe_expand_insn (icode, 4, ops))
2895 return NULL_RTX;
dd05e4fa 2896
16155777
MS
2897 /* Check to see if the argument was declared attribute nonstring
2898 and if so, issue a warning since at this point it's not known
2899 to be nul-terminated. */
2900 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6a33d0ff 2901
16155777
MS
2902 /* Now that we are assured of success, expand the source. */
2903 start_sequence ();
2904 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2905 if (pat != src_reg)
2906 {
fa465762 2907#ifdef POINTERS_EXTEND_UNSIGNED
16155777
MS
2908 if (GET_MODE (pat) != Pmode)
2909 pat = convert_to_mode (Pmode, pat,
2910 POINTERS_EXTEND_UNSIGNED);
fa465762 2911#endif
16155777
MS
2912 emit_move_insn (src_reg, pat);
2913 }
2914 pat = get_insns ();
2915 end_sequence ();
fca9f642 2916
16155777
MS
2917 if (before_strlen)
2918 emit_insn_after (pat, before_strlen);
2919 else
2920 emit_insn_before (pat, get_insns ());
28f4ec01 2921
16155777
MS
2922 /* Return the value in the proper mode for this function. */
2923 if (GET_MODE (ops[0].value) == target_mode)
2924 target = ops[0].value;
2925 else if (target != 0)
2926 convert_move (target, ops[0].value, 0);
2927 else
2928 target = convert_to_mode (target_mode, ops[0].value, 0);
dd05e4fa 2929
16155777 2930 return target;
28f4ec01
BS
2931}
2932
57814e5e
JJ
2933/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2934 bytes from constant string DATA + OFFSET and return it as target
2935 constant. */
2936
2937static rtx
4682ae04 2938builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 2939 scalar_int_mode mode)
57814e5e
JJ
2940{
2941 const char *str = (const char *) data;
2942
298e6adc
NS
2943 gcc_assert (offset >= 0
2944 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2945 <= strlen (str) + 1));
57814e5e
JJ
2946
2947 return c_readstr (str + offset, mode);
2948}
2949
3918b108 2950/* LEN specify length of the block of memcpy/memset operation.
82bb7d4e
JH
2951 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2952 In some cases we can make very likely guess on max size, then we
2953 set it into PROBABLE_MAX_SIZE. */
3918b108
JH
2954
2955static void
2956determine_block_size (tree len, rtx len_rtx,
2957 unsigned HOST_WIDE_INT *min_size,
82bb7d4e
JH
2958 unsigned HOST_WIDE_INT *max_size,
2959 unsigned HOST_WIDE_INT *probable_max_size)
3918b108
JH
2960{
2961 if (CONST_INT_P (len_rtx))
2962 {
2738b4c7 2963 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3918b108
JH
2964 return;
2965 }
2966 else
2967 {
807e902e 2968 wide_int min, max;
82bb7d4e
JH
2969 enum value_range_type range_type = VR_UNDEFINED;
2970
2971 /* Determine bounds from the type. */
2972 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2973 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2974 else
2975 *min_size = 0;
2976 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2738b4c7
JJ
2977 *probable_max_size = *max_size
2978 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
82bb7d4e
JH
2979 else
2980 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2981
2982 if (TREE_CODE (len) == SSA_NAME)
2983 range_type = get_range_info (len, &min, &max);
2984 if (range_type == VR_RANGE)
3918b108 2985 {
807e902e 2986 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3918b108 2987 *min_size = min.to_uhwi ();
807e902e 2988 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
82bb7d4e 2989 *probable_max_size = *max_size = max.to_uhwi ();
3918b108 2990 }
82bb7d4e 2991 else if (range_type == VR_ANTI_RANGE)
3918b108 2992 {
70ec86ee 2993 /* Anti range 0...N lets us to determine minimal size to N+1. */
807e902e 2994 if (min == 0)
82bb7d4e 2995 {
807e902e
KZ
2996 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2997 *min_size = max.to_uhwi () + 1;
82bb7d4e
JH
2998 }
2999 /* Code like
3000
3001 int n;
3002 if (n < 100)
70ec86ee 3003 memcpy (a, b, n)
82bb7d4e
JH
3004
3005 Produce anti range allowing negative values of N. We still
3006 can use the information and make a guess that N is not negative.
3007 */
807e902e
KZ
3008 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3009 *probable_max_size = min.to_uhwi () - 1;
3918b108
JH
3010 }
3011 }
3012 gcc_checking_assert (*max_size <=
3013 (unsigned HOST_WIDE_INT)
3014 GET_MODE_MASK (GET_MODE (len_rtx)));
3015}
3016
ee92e7ba
MS
3017/* Try to verify that the sizes and lengths of the arguments to a string
3018 manipulation function given by EXP are within valid bounds and that
cc8bea0a
MS
3019 the operation does not lead to buffer overflow or read past the end.
3020 Arguments other than EXP may be null. When non-null, the arguments
3021 have the following meaning:
3022 DST is the destination of a copy call or NULL otherwise.
3023 SRC is the source of a copy call or NULL otherwise.
3024 DSTWRITE is the number of bytes written into the destination obtained
3025 from the user-supplied size argument to the function (such as in
3026 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3027 MAXREAD is the user-supplied bound on the length of the source sequence
ee92e7ba 3028 (such as in strncat(d, s, N). It specifies the upper limit on the number
cc8bea0a
MS
3029 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3030 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3031 expression EXP is a string function call (as opposed to a memory call
3032 like memcpy). As an exception, SRCSTR can also be an integer denoting
3033 the precomputed size of the source string or object (for functions like
3034 memcpy).
3035 DSTSIZE is the size of the destination object specified by the last
ee92e7ba 3036 argument to the _chk builtins, typically resulting from the expansion
cc8bea0a
MS
3037 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3038 DSTSIZE).
ee92e7ba 3039
cc8bea0a 3040 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
ee92e7ba
MS
3041 SIZE_MAX.
3042
cc8bea0a
MS
3043 If the call is successfully verified as safe return true, otherwise
3044 return false. */
ee92e7ba
MS
3045
3046static bool
cc8bea0a
MS
3047check_access (tree exp, tree, tree, tree dstwrite,
3048 tree maxread, tree srcstr, tree dstsize)
ee92e7ba 3049{
cc8bea0a
MS
3050 int opt = OPT_Wstringop_overflow_;
3051
ee92e7ba 3052 /* The size of the largest object is half the address space, or
cc8bea0a
MS
3053 PTRDIFF_MAX. (This is way too permissive.) */
3054 tree maxobjsize = max_object_size ();
ee92e7ba 3055
cc8bea0a
MS
3056 /* Either the length of the source string for string functions or
3057 the size of the source object for raw memory functions. */
ee92e7ba
MS
3058 tree slen = NULL_TREE;
3059
d9c5a8b9
MS
3060 tree range[2] = { NULL_TREE, NULL_TREE };
3061
ee92e7ba
MS
3062 /* Set to true when the exact number of bytes written by a string
3063 function like strcpy is not known and the only thing that is
3064 known is that it must be at least one (for the terminating nul). */
3065 bool at_least_one = false;
cc8bea0a 3066 if (srcstr)
ee92e7ba 3067 {
cc8bea0a 3068 /* SRCSTR is normally a pointer to string but as a special case
ee92e7ba 3069 it can be an integer denoting the length of a string. */
cc8bea0a 3070 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
ee92e7ba
MS
3071 {
3072 /* Try to determine the range of lengths the source string
d9c5a8b9 3073 refers to. If it can be determined and is less than
cc8bea0a 3074 the upper bound given by MAXREAD add one to it for
ee92e7ba 3075 the terminating nul. Otherwise, set it to one for
cc8bea0a
MS
3076 the same reason, or to MAXREAD as appropriate. */
3077 get_range_strlen (srcstr, range);
3078 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
d9c5a8b9 3079 {
cc8bea0a
MS
3080 if (maxread && tree_int_cst_le (maxread, range[0]))
3081 range[0] = range[1] = maxread;
d9c5a8b9
MS
3082 else
3083 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3084 range[0], size_one_node);
3085
cc8bea0a
MS
3086 if (maxread && tree_int_cst_le (maxread, range[1]))
3087 range[1] = maxread;
d9c5a8b9
MS
3088 else if (!integer_all_onesp (range[1]))
3089 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3090 range[1], size_one_node);
3091
3092 slen = range[0];
3093 }
ee92e7ba
MS
3094 else
3095 {
3096 at_least_one = true;
3097 slen = size_one_node;
3098 }
3099 }
3100 else
cc8bea0a 3101 slen = srcstr;
ee92e7ba
MS
3102 }
3103
cc8bea0a 3104 if (!dstwrite && !maxread)
ee92e7ba
MS
3105 {
3106 /* When the only available piece of data is the object size
3107 there is nothing to do. */
3108 if (!slen)
3109 return true;
3110
3111 /* Otherwise, when the length of the source sequence is known
cc8bea0a 3112 (as with strlen), set DSTWRITE to it. */
d9c5a8b9 3113 if (!range[0])
cc8bea0a 3114 dstwrite = slen;
ee92e7ba
MS
3115 }
3116
cc8bea0a
MS
3117 if (!dstsize)
3118 dstsize = maxobjsize;
ee92e7ba 3119
cc8bea0a
MS
3120 if (dstwrite)
3121 get_size_range (dstwrite, range);
ee92e7ba 3122
cc8bea0a 3123 tree func = get_callee_fndecl (exp);
ee92e7ba
MS
3124
3125 /* First check the number of bytes to be written against the maximum
3126 object size. */
3127 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3128 {
3129 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3130 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba
MS
3131
3132 if (range[0] == range[1])
3133 warning_at (loc, opt,
13c5654f 3134 "%K%qD specified size %E "
d9c5a8b9 3135 "exceeds maximum object size %E",
cc8bea0a 3136 exp, func, range[0], maxobjsize);
ee92e7ba
MS
3137 else
3138 warning_at (loc, opt,
13c5654f 3139 "%K%qD specified size between %E and %E "
d9c5a8b9 3140 "exceeds maximum object size %E",
cc8bea0a 3141 exp, func,
d9c5a8b9 3142 range[0], range[1], maxobjsize);
ee92e7ba
MS
3143 return false;
3144 }
3145
cc8bea0a
MS
3146 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3147 constant, and in range of unsigned HOST_WIDE_INT. */
3148 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3149
ee92e7ba
MS
3150 /* Next check the number of bytes to be written against the destination
3151 object size. */
cc8bea0a 3152 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
ee92e7ba
MS
3153 {
3154 if (range[0]
cc8bea0a
MS
3155 && ((tree_fits_uhwi_p (dstsize)
3156 && tree_int_cst_lt (dstsize, range[0]))
3157 || (tree_fits_uhwi_p (dstwrite)
3158 && tree_int_cst_lt (dstwrite, range[0]))))
ee92e7ba 3159 {
e0676e2e
MS
3160 if (TREE_NO_WARNING (exp))
3161 return false;
3162
ee92e7ba 3163 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3164 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba 3165
cc8bea0a 3166 if (dstwrite == slen && at_least_one)
d9c5a8b9
MS
3167 {
3168 /* This is a call to strcpy with a destination of 0 size
3169 and a source of unknown length. The call will write
3170 at least one byte past the end of the destination. */
3171 warning_at (loc, opt,
13c5654f 3172 "%K%qD writing %E or more bytes into a region "
d9c5a8b9 3173 "of size %E overflows the destination",
cc8bea0a 3174 exp, func, range[0], dstsize);
d9c5a8b9
MS
3175 }
3176 else if (tree_int_cst_equal (range[0], range[1]))
457442eb
MS
3177 warning_n (loc, opt, tree_to_uhwi (range[0]),
3178 "%K%qD writing %E byte into a region "
3179 "of size %E overflows the destination",
3180 "%K%qD writing %E bytes into a region "
3181 "of size %E overflows the destination",
3182 exp, func, range[0], dstsize);
d9c5a8b9
MS
3183 else if (tree_int_cst_sign_bit (range[1]))
3184 {
3185 /* Avoid printing the upper bound if it's invalid. */
3186 warning_at (loc, opt,
13c5654f 3187 "%K%qD writing %E or more bytes into a region "
d9c5a8b9 3188 "of size %E overflows the destination",
cc8bea0a 3189 exp, func, range[0], dstsize);
d9c5a8b9 3190 }
ee92e7ba
MS
3191 else
3192 warning_at (loc, opt,
13c5654f 3193 "%K%qD writing between %E and %E bytes into "
d9c5a8b9 3194 "a region of size %E overflows the destination",
cc8bea0a
MS
3195 exp, func, range[0], range[1],
3196 dstsize);
ee92e7ba
MS
3197
3198 /* Return error when an overflow has been detected. */
3199 return false;
3200 }
3201 }
3202
3203 /* Check the maximum length of the source sequence against the size
3204 of the destination object if known, or against the maximum size
3205 of an object. */
cc8bea0a 3206 if (maxread)
ee92e7ba 3207 {
cc8bea0a
MS
3208 get_size_range (maxread, range);
3209
3210 /* Use the lower end for MAXREAD from now on. */
3211 if (range[0])
3212 maxread = range[0];
ee92e7ba 3213
cc8bea0a 3214 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
ee92e7ba
MS
3215 {
3216 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3217 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba
MS
3218
3219 if (tree_int_cst_lt (maxobjsize, range[0]))
3220 {
e0676e2e
MS
3221 if (TREE_NO_WARNING (exp))
3222 return false;
3223
ee92e7ba
MS
3224 /* Warn about crazy big sizes first since that's more
3225 likely to be meaningful than saying that the bound
3226 is greater than the object size if both are big. */
3227 if (range[0] == range[1])
3228 warning_at (loc, opt,
13c5654f 3229 "%K%qD specified bound %E "
d9c5a8b9 3230 "exceeds maximum object size %E",
cc8bea0a 3231 exp, func,
d9c5a8b9 3232 range[0], maxobjsize);
ee92e7ba
MS
3233 else
3234 warning_at (loc, opt,
13c5654f 3235 "%K%qD specified bound between %E and %E "
d9c5a8b9 3236 "exceeds maximum object size %E",
cc8bea0a 3237 exp, func,
d9c5a8b9 3238 range[0], range[1], maxobjsize);
ee92e7ba
MS
3239
3240 return false;
3241 }
3242
cc8bea0a 3243 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
ee92e7ba 3244 {
e0676e2e
MS
3245 if (TREE_NO_WARNING (exp))
3246 return false;
3247
d9c5a8b9 3248 if (tree_int_cst_equal (range[0], range[1]))
ee92e7ba 3249 warning_at (loc, opt,
13c5654f 3250 "%K%qD specified bound %E "
d9c5a8b9 3251 "exceeds destination size %E",
cc8bea0a
MS
3252 exp, func,
3253 range[0], dstsize);
ee92e7ba
MS
3254 else
3255 warning_at (loc, opt,
13c5654f 3256 "%K%qD specified bound between %E and %E "
d9c5a8b9 3257 "exceeds destination size %E",
cc8bea0a
MS
3258 exp, func,
3259 range[0], range[1], dstsize);
ee92e7ba
MS
3260 return false;
3261 }
3262 }
3263 }
3264
cc8bea0a 3265 /* Check for reading past the end of SRC. */
d9c5a8b9 3266 if (slen
cc8bea0a
MS
3267 && slen == srcstr
3268 && dstwrite && range[0]
d9c5a8b9
MS
3269 && tree_int_cst_lt (slen, range[0]))
3270 {
e0676e2e
MS
3271 if (TREE_NO_WARNING (exp))
3272 return false;
3273
d9c5a8b9
MS
3274 location_t loc = tree_nonartificial_location (exp);
3275
3276 if (tree_int_cst_equal (range[0], range[1]))
457442eb
MS
3277 warning_n (loc, opt, tree_to_uhwi (range[0]),
3278 "%K%qD reading %E byte from a region of size %E",
3279 "%K%qD reading %E bytes from a region of size %E",
cc8bea0a 3280 exp, func, range[0], slen);
d9c5a8b9
MS
3281 else if (tree_int_cst_sign_bit (range[1]))
3282 {
3283 /* Avoid printing the upper bound if it's invalid. */
3284 warning_at (loc, opt,
13c5654f 3285 "%K%qD reading %E or more bytes from a region "
d9c5a8b9 3286 "of size %E",
cc8bea0a 3287 exp, func, range[0], slen);
d9c5a8b9
MS
3288 }
3289 else
3290 warning_at (loc, opt,
13c5654f 3291 "%K%qD reading between %E and %E bytes from a region "
d9c5a8b9 3292 "of size %E",
cc8bea0a 3293 exp, func, range[0], range[1], slen);
d9c5a8b9
MS
3294 return false;
3295 }
3296
ee92e7ba
MS
3297 return true;
3298}
3299
3300/* Helper to compute the size of the object referenced by the DEST
025d57f0 3301 expression which must have pointer type, using Object Size type
ee92e7ba 3302 OSTYPE (only the least significant 2 bits are used). Return
af3fa359
MS
3303 an estimate of the size of the object if successful or NULL when
3304 the size cannot be determined. When the referenced object involves
3305 a non-constant offset in some range the returned value represents
3306 the largest size given the smallest non-negative offset in the
3307 range. The function is intended for diagnostics and should not
3308 be used to influence code generation or optimization. */
ee92e7ba 3309
025d57f0 3310tree
d9c5a8b9 3311compute_objsize (tree dest, int ostype)
ee92e7ba
MS
3312{
3313 unsigned HOST_WIDE_INT size;
025d57f0
MS
3314
3315 /* Only the two least significant bits are meaningful. */
3316 ostype &= 3;
3317
3318 if (compute_builtin_object_size (dest, ostype, &size))
ee92e7ba
MS
3319 return build_int_cst (sizetype, size);
3320
025d57f0
MS
3321 if (TREE_CODE (dest) == SSA_NAME)
3322 {
3323 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3324 if (!is_gimple_assign (stmt))
3325 return NULL_TREE;
3326
af3fa359
MS
3327 dest = gimple_assign_rhs1 (stmt);
3328
025d57f0 3329 tree_code code = gimple_assign_rhs_code (stmt);
af3fa359
MS
3330 if (code == POINTER_PLUS_EXPR)
3331 {
3332 /* compute_builtin_object_size fails for addresses with
3333 non-constant offsets. Try to determine the range of
3334 such an offset here and use it to adjus the constant
3335 size. */
3336 tree off = gimple_assign_rhs2 (stmt);
3337 if (TREE_CODE (off) == SSA_NAME
3338 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3339 {
3340 wide_int min, max;
3341 enum value_range_type rng = get_range_info (off, &min, &max);
3342
3343 if (rng == VR_RANGE)
3344 {
3345 if (tree size = compute_objsize (dest, ostype))
3346 {
3347 wide_int wisiz = wi::to_wide (size);
3348
3349 /* Ignore negative offsets for now. For others,
3350 use the lower bound as the most optimistic
3351 estimate of the (remaining)size. */
3352 if (wi::sign_mask (min))
3353 ;
3354 else if (wi::ltu_p (min, wisiz))
3355 return wide_int_to_tree (TREE_TYPE (size),
3356 wi::sub (wisiz, min));
3357 else
3358 return size_zero_node;
3359 }
3360 }
3361 }
3362 }
3363 else if (code != ADDR_EXPR)
025d57f0 3364 return NULL_TREE;
025d57f0
MS
3365 }
3366
af3fa359
MS
3367 /* Unless computing the largest size (for memcpy and other raw memory
3368 functions), try to determine the size of the object from its type. */
3369 if (!ostype)
3370 return NULL_TREE;
3371
025d57f0
MS
3372 if (TREE_CODE (dest) != ADDR_EXPR)
3373 return NULL_TREE;
3374
3375 tree type = TREE_TYPE (dest);
3376 if (TREE_CODE (type) == POINTER_TYPE)
3377 type = TREE_TYPE (type);
3378
3379 type = TYPE_MAIN_VARIANT (type);
3380
3381 if (TREE_CODE (type) == ARRAY_TYPE
f1acdcd0 3382 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
025d57f0
MS
3383 {
3384 /* Return the constant size unless it's zero (that's a zero-length
3385 array likely at the end of a struct). */
3386 tree size = TYPE_SIZE_UNIT (type);
3387 if (size && TREE_CODE (size) == INTEGER_CST
3388 && !integer_zerop (size))
3389 return size;
3390 }
3391
ee92e7ba
MS
3392 return NULL_TREE;
3393}
3394
3395/* Helper to determine and check the sizes of the source and the destination
d9c5a8b9
MS
3396 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3397 call expression, DEST is the destination argument, SRC is the source
3398 argument or null, and LEN is the number of bytes. Use Object Size type-0
3399 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
ee92e7ba
MS
3400 (no overflow or invalid sizes), false otherwise. */
3401
3402static bool
cc8bea0a 3403check_memop_access (tree exp, tree dest, tree src, tree size)
ee92e7ba 3404{
ee92e7ba 3405 /* For functions like memset and memcpy that operate on raw memory
d9c5a8b9
MS
3406 try to determine the size of the largest source and destination
3407 object using type-0 Object Size regardless of the object size
3408 type specified by the option. */
3409 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3410 tree dstsize = compute_objsize (dest, 0);
ee92e7ba 3411
cc8bea0a
MS
3412 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3413 srcsize, dstsize);
d9c5a8b9
MS
3414}
3415
3416/* Validate memchr arguments without performing any expansion.
3417 Return NULL_RTX. */
3418
3419static rtx
3420expand_builtin_memchr (tree exp, rtx)
3421{
3422 if (!validate_arglist (exp,
3423 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3424 return NULL_RTX;
3425
3426 tree arg1 = CALL_EXPR_ARG (exp, 0);
3427 tree len = CALL_EXPR_ARG (exp, 2);
3428
3429 /* Diagnose calls where the specified length exceeds the size
3430 of the object. */
3431 if (warn_stringop_overflow)
3432 {
3433 tree size = compute_objsize (arg1, 0);
cc8bea0a
MS
3434 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3435 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
d9c5a8b9
MS
3436 }
3437
3438 return NULL_RTX;
ee92e7ba
MS
3439}
3440
5039610b
SL
3441/* Expand a call EXP to the memcpy builtin.
3442 Return NULL_RTX if we failed, the caller should emit a normal call,
9cb65f92 3443 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e 3444 mode MODE if that's convenient). */
5039610b 3445
28f4ec01 3446static rtx
44e10129 3447expand_builtin_memcpy (tree exp, rtx target)
28f4ec01 3448{
5039610b
SL
3449 if (!validate_arglist (exp,
3450 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3451 return NULL_RTX;
ee92e7ba
MS
3452
3453 tree dest = CALL_EXPR_ARG (exp, 0);
3454 tree src = CALL_EXPR_ARG (exp, 1);
3455 tree len = CALL_EXPR_ARG (exp, 2);
3456
cc8bea0a 3457 check_memop_access (exp, dest, src, len);
ee92e7ba 3458
671a00ee
ML
3459 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3460 /*endp=*/ 0);
edcf72f3 3461}
57814e5e 3462
e50d56a5
MS
3463/* Check a call EXP to the memmove built-in for validity.
3464 Return NULL_RTX on both success and failure. */
3465
3466static rtx
3467expand_builtin_memmove (tree exp, rtx)
3468{
3469 if (!validate_arglist (exp,
3470 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3471 return NULL_RTX;
3472
3473 tree dest = CALL_EXPR_ARG (exp, 0);
d9c5a8b9 3474 tree src = CALL_EXPR_ARG (exp, 1);
e50d56a5
MS
3475 tree len = CALL_EXPR_ARG (exp, 2);
3476
cc8bea0a 3477 check_memop_access (exp, dest, src, len);
e50d56a5
MS
3478
3479 return NULL_RTX;
3480}
3481
edcf72f3
IE
3482/* Expand an instrumented call EXP to the memcpy builtin.
3483 Return NULL_RTX if we failed, the caller should emit a normal call,
3484 otherwise try to get the result in TARGET, if convenient (and in
3485 mode MODE if that's convenient). */
28f4ec01 3486
edcf72f3
IE
3487static rtx
3488expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3489{
3490 if (!validate_arglist (exp,
3491 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3492 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3493 INTEGER_TYPE, VOID_TYPE))
3494 return NULL_RTX;
3495 else
3496 {
3497 tree dest = CALL_EXPR_ARG (exp, 0);
3498 tree src = CALL_EXPR_ARG (exp, 2);
3499 tree len = CALL_EXPR_ARG (exp, 4);
671a00ee
ML
3500 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3501 /*end_p=*/ 0);
28f4ec01 3502
edcf72f3
IE
3503 /* Return src bounds with the result. */
3504 if (res)
aa0f70e6 3505 {
30975f63 3506 rtx bnd = force_reg (targetm.chkp_bound_mode (),
edcf72f3
IE
3507 expand_normal (CALL_EXPR_ARG (exp, 1)));
3508 res = chkp_join_splitted_slot (res, bnd);
aa0f70e6 3509 }
edcf72f3 3510 return res;
28f4ec01
BS
3511 }
3512}
3513
5039610b
SL
3514/* Expand a call EXP to the mempcpy builtin.
3515 Return NULL_RTX if we failed; the caller should emit a normal call,
e3e9f108 3516 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e
JJ
3517 mode MODE if that's convenient). If ENDP is 0 return the
3518 destination pointer, if ENDP is 1 return the end pointer ala
3519 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3520 stpcpy. */
e3e9f108
JJ
3521
3522static rtx
671a00ee 3523expand_builtin_mempcpy (tree exp, rtx target)
e3e9f108 3524{
5039610b
SL
3525 if (!validate_arglist (exp,
3526 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3527 return NULL_RTX;
ee92e7ba
MS
3528
3529 tree dest = CALL_EXPR_ARG (exp, 0);
3530 tree src = CALL_EXPR_ARG (exp, 1);
3531 tree len = CALL_EXPR_ARG (exp, 2);
3532
af3fa359
MS
3533 /* Policy does not generally allow using compute_objsize (which
3534 is used internally by check_memop_size) to change code generation
3535 or drive optimization decisions.
3536
3537 In this instance it is safe because the code we generate has
3538 the same semantics regardless of the return value of
3539 check_memop_sizes. Exactly the same amount of data is copied
3540 and the return value is exactly the same in both cases.
3541
3542 Furthermore, check_memop_size always uses mode 0 for the call to
3543 compute_objsize, so the imprecise nature of compute_objsize is
3544 avoided. */
3545
ee92e7ba
MS
3546 /* Avoid expanding mempcpy into memcpy when the call is determined
3547 to overflow the buffer. This also prevents the same overflow
3548 from being diagnosed again when expanding memcpy. */
cc8bea0a 3549 if (!check_memop_access (exp, dest, src, len))
ee92e7ba
MS
3550 return NULL_RTX;
3551
3552 return expand_builtin_mempcpy_args (dest, src, len,
671a00ee 3553 target, exp, /*endp=*/ 1);
edcf72f3
IE
3554}
3555
3556/* Expand an instrumented call EXP to the mempcpy builtin.
3557 Return NULL_RTX if we failed, the caller should emit a normal call,
3558 otherwise try to get the result in TARGET, if convenient (and in
3559 mode MODE if that's convenient). */
3560
3561static rtx
671a00ee 3562expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
edcf72f3
IE
3563{
3564 if (!validate_arglist (exp,
3565 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3566 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3567 INTEGER_TYPE, VOID_TYPE))
3568 return NULL_RTX;
3569 else
3570 {
3571 tree dest = CALL_EXPR_ARG (exp, 0);
3572 tree src = CALL_EXPR_ARG (exp, 2);
3573 tree len = CALL_EXPR_ARG (exp, 4);
3574 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
671a00ee 3575 exp, 1);
edcf72f3
IE
3576
3577 /* Return src bounds with the result. */
3578 if (res)
3579 {
30975f63 3580 rtx bnd = force_reg (targetm.chkp_bound_mode (),
edcf72f3
IE
3581 expand_normal (CALL_EXPR_ARG (exp, 1)));
3582 res = chkp_join_splitted_slot (res, bnd);
3583 }
3584 return res;
5039610b
SL
3585 }
3586}
3587
671a00ee
ML
3588/* Helper function to do the actual work for expand of memory copy family
3589 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3590 of memory from SRC to DEST and assign to TARGET if convenient.
3591 If ENDP is 0 return the
3592 destination pointer, if ENDP is 1 return the end pointer ala
3593 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3594 stpcpy. */
5039610b
SL
3595
3596static rtx
671a00ee
ML
3597expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3598 rtx target, tree exp, int endp)
5039610b 3599{
671a00ee
ML
3600 const char *src_str;
3601 unsigned int src_align = get_pointer_alignment (src);
3602 unsigned int dest_align = get_pointer_alignment (dest);
3603 rtx dest_mem, src_mem, dest_addr, len_rtx;
3604 HOST_WIDE_INT expected_size = -1;
3605 unsigned int expected_align = 0;
3606 unsigned HOST_WIDE_INT min_size;
3607 unsigned HOST_WIDE_INT max_size;
3608 unsigned HOST_WIDE_INT probable_max_size;
edcf72f3 3609
671a00ee
ML
3610 /* If DEST is not a pointer type, call the normal function. */
3611 if (dest_align == 0)
3612 return NULL_RTX;
c22cacf3 3613
671a00ee
ML
3614 /* If either SRC is not a pointer type, don't do this
3615 operation in-line. */
3616 if (src_align == 0)
3617 return NULL_RTX;
8fd3cf4e 3618
671a00ee
ML
3619 if (currently_expanding_gimple_stmt)
3620 stringop_block_profile (currently_expanding_gimple_stmt,
3621 &expected_align, &expected_size);
33521f7d 3622
671a00ee
ML
3623 if (expected_align < dest_align)
3624 expected_align = dest_align;
3625 dest_mem = get_memory_rtx (dest, len);
3626 set_mem_align (dest_mem, dest_align);
3627 len_rtx = expand_normal (len);
3628 determine_block_size (len, len_rtx, &min_size, &max_size,
3629 &probable_max_size);
3630 src_str = c_getstr (src);
e3e9f108 3631
671a00ee
ML
3632 /* If SRC is a string constant and block move would be done
3633 by pieces, we can avoid loading the string from memory
3634 and only stored the computed constants. */
3635 if (src_str
3636 && CONST_INT_P (len_rtx)
3637 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3638 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3639 CONST_CAST (char *, src_str),
3640 dest_align, false))
3641 {
3642 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3643 builtin_memcpy_read_str,
7d3eecca 3644 CONST_CAST (char *, src_str),
671a00ee
ML
3645 dest_align, false, endp);
3646 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3647 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3648 return dest_mem;
3649 }
e3e9f108 3650
671a00ee
ML
3651 src_mem = get_memory_rtx (src, len);
3652 set_mem_align (src_mem, src_align);
8fd3cf4e 3653
671a00ee 3654 /* Copy word part most expediently. */
fdd33254
ML
3655 enum block_op_methods method = BLOCK_OP_NORMAL;
3656 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3657 method = BLOCK_OP_TAILCALL;
3658 if (endp == 1 && target != const0_rtx)
3659 method = BLOCK_OP_NO_LIBCALL_RET;
3660 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
671a00ee
ML
3661 expected_align, expected_size,
3662 min_size, max_size, probable_max_size);
fdd33254
ML
3663 if (dest_addr == pc_rtx)
3664 return NULL_RTX;
671a00ee
ML
3665
3666 if (dest_addr == 0)
3667 {
3668 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3669 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3670 }
3671
3672 if (endp && target != const0_rtx)
3673 {
3674 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3675 /* stpcpy pointer to last byte. */
3676 if (endp == 2)
3677 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
7d3eecca 3678 }
671a00ee
ML
3679
3680 return dest_addr;
3681}
3682
3683static rtx
3684expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3685 rtx target, tree orig_exp, int endp)
3686{
3687 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3688 endp);
e3e9f108
JJ
3689}
3690
5039610b 3691/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
beed8fc0
AO
3692 we failed, the caller should emit a normal call, otherwise try to
3693 get the result in TARGET, if convenient. If ENDP is 0 return the
3694 destination pointer, if ENDP is 1 return the end pointer ala
3695 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3696 stpcpy. */
3697
3698static rtx
3699expand_movstr (tree dest, tree src, rtx target, int endp)
3700{
a5c7d693 3701 struct expand_operand ops[3];
beed8fc0
AO
3702 rtx dest_mem;
3703 rtx src_mem;
beed8fc0 3704
7cff0471 3705 if (!targetm.have_movstr ())
5039610b 3706 return NULL_RTX;
beed8fc0 3707
435bb2a1
JJ
3708 dest_mem = get_memory_rtx (dest, NULL);
3709 src_mem = get_memory_rtx (src, NULL);
beed8fc0
AO
3710 if (!endp)
3711 {
3712 target = force_reg (Pmode, XEXP (dest_mem, 0));
3713 dest_mem = replace_equiv_address (dest_mem, target);
beed8fc0
AO
3714 }
3715
a5c7d693
RS
3716 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3717 create_fixed_operand (&ops[1], dest_mem);
3718 create_fixed_operand (&ops[2], src_mem);
7cff0471 3719 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
7c5425fa 3720 return NULL_RTX;
beed8fc0 3721
a5c7d693 3722 if (endp && target != const0_rtx)
7ce3fc8f 3723 {
a5c7d693
RS
3724 target = ops[0].value;
3725 /* movstr is supposed to set end to the address of the NUL
3726 terminator. If the caller requested a mempcpy-like return value,
3727 adjust it. */
3728 if (endp == 1)
3729 {
0a81f074
RS
3730 rtx tem = plus_constant (GET_MODE (target),
3731 gen_lowpart (GET_MODE (target), target), 1);
a5c7d693
RS
3732 emit_move_insn (target, force_operand (tem, NULL_RTX));
3733 }
7ce3fc8f 3734 }
beed8fc0
AO
3735 return target;
3736}
3737
ee92e7ba
MS
3738/* Do some very basic size validation of a call to the strcpy builtin
3739 given by EXP. Return NULL_RTX to have the built-in expand to a call
3740 to the library function. */
3741
3742static rtx
3743expand_builtin_strcat (tree exp, rtx)
3744{
3745 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3746 || !warn_stringop_overflow)
3747 return NULL_RTX;
3748
3749 tree dest = CALL_EXPR_ARG (exp, 0);
3750 tree src = CALL_EXPR_ARG (exp, 1);
3751
3752 /* There is no way here to determine the length of the string in
3753 the destination to which the SRC string is being appended so
3754 just diagnose cases when the souce string is longer than
3755 the destination object. */
3756
d9c5a8b9 3757 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba 3758
cc8bea0a
MS
3759 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3760 destsize);
ee92e7ba
MS
3761
3762 return NULL_RTX;
3763}
3764
b8698a0f
L
3765/* Expand expression EXP, which is a call to the strcpy builtin. Return
3766 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 3767 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 3768 convenient). */
fed3cef0 3769
28f4ec01 3770static rtx
44e10129 3771expand_builtin_strcpy (tree exp, rtx target)
28f4ec01 3772{
ee92e7ba
MS
3773 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3774 return NULL_RTX;
3775
3776 tree dest = CALL_EXPR_ARG (exp, 0);
3777 tree src = CALL_EXPR_ARG (exp, 1);
3778
3779 if (warn_stringop_overflow)
3780 {
d9c5a8b9 3781 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
cc8bea0a
MS
3782 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3783 src, destsize);
ee92e7ba
MS
3784 }
3785
3786 return expand_builtin_strcpy_args (dest, src, target);
5039610b
SL
3787}
3788
3789/* Helper function to do the actual work for expand_builtin_strcpy. The
3790 arguments to the builtin_strcpy call DEST and SRC are broken out
3791 so that this can also be called without constructing an actual CALL_EXPR.
3792 The other arguments and return value are the same as for
3793 expand_builtin_strcpy. */
3794
3795static rtx
44e10129 3796expand_builtin_strcpy_args (tree dest, tree src, rtx target)
5039610b 3797{
5039610b 3798 return expand_movstr (dest, src, target, /*endp=*/0);
28f4ec01
BS
3799}
3800
5039610b
SL
3801/* Expand a call EXP to the stpcpy builtin.
3802 Return NULL_RTX if we failed the caller should emit a normal call,
9cb65f92
KG
3803 otherwise try to get the result in TARGET, if convenient (and in
3804 mode MODE if that's convenient). */
3805
3806static rtx
ef4bddc2 3807expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
9cb65f92 3808{
5039610b 3809 tree dst, src;
db3927fb 3810 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
3811
3812 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3813 return NULL_RTX;
3814
3815 dst = CALL_EXPR_ARG (exp, 0);
3816 src = CALL_EXPR_ARG (exp, 1);
3817
e50d56a5
MS
3818 if (warn_stringop_overflow)
3819 {
d9c5a8b9 3820 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
cc8bea0a
MS
3821 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3822 src, destsize);
e50d56a5
MS
3823 }
3824
beed8fc0 3825 /* If return value is ignored, transform stpcpy into strcpy. */
e79983f4 3826 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
ad4319ec 3827 {
e79983f4 3828 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
aa493694 3829 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
0d2a6e08 3830 return expand_expr (result, target, mode, EXPAND_NORMAL);
ad4319ec 3831 }
9cb65f92
KG
3832 else
3833 {
5039610b 3834 tree len, lenp1;
beed8fc0 3835 rtx ret;
e3e9f108 3836
8fd3cf4e 3837 /* Ensure we get an actual string whose length can be evaluated at
c22cacf3
MS
3838 compile-time, not an expression containing a string. This is
3839 because the latter will potentially produce pessimized code
3840 when used to produce the return value. */
ae808627 3841 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
5039610b 3842 return expand_movstr (dst, src, target, /*endp=*/2);
9cb65f92 3843
db3927fb 3844 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
44e10129 3845 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
671a00ee 3846 target, exp, /*endp=*/2);
beed8fc0
AO
3847
3848 if (ret)
3849 return ret;
3850
3851 if (TREE_CODE (len) == INTEGER_CST)
3852 {
84217346 3853 rtx len_rtx = expand_normal (len);
beed8fc0 3854
481683e1 3855 if (CONST_INT_P (len_rtx))
beed8fc0 3856 {
44e10129 3857 ret = expand_builtin_strcpy_args (dst, src, target);
beed8fc0
AO
3858
3859 if (ret)
3860 {
3861 if (! target)
58ec6ece
SE
3862 {
3863 if (mode != VOIDmode)
3864 target = gen_reg_rtx (mode);
3865 else
3866 target = gen_reg_rtx (GET_MODE (ret));
3867 }
beed8fc0
AO
3868 if (GET_MODE (target) != GET_MODE (ret))
3869 ret = gen_lowpart (GET_MODE (target), ret);
3870
0a81f074 3871 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
7ce3fc8f 3872 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
298e6adc 3873 gcc_assert (ret);
beed8fc0
AO
3874
3875 return target;
3876 }
3877 }
3878 }
3879
5039610b 3880 return expand_movstr (dst, src, target, /*endp=*/2);
9cb65f92
KG
3881 }
3882}
3883
e50d56a5
MS
3884/* Check a call EXP to the stpncpy built-in for validity.
3885 Return NULL_RTX on both success and failure. */
3886
3887static rtx
3888expand_builtin_stpncpy (tree exp, rtx)
3889{
3890 if (!validate_arglist (exp,
3891 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3892 || !warn_stringop_overflow)
3893 return NULL_RTX;
3894
c6c02519 3895 /* The source and destination of the call. */
e50d56a5
MS
3896 tree dest = CALL_EXPR_ARG (exp, 0);
3897 tree src = CALL_EXPR_ARG (exp, 1);
3898
c6c02519 3899 /* The exact number of bytes to write (not the maximum). */
e50d56a5 3900 tree len = CALL_EXPR_ARG (exp, 2);
e50d56a5 3901
c6c02519 3902 /* The size of the destination object. */
d9c5a8b9 3903 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
e50d56a5 3904
cc8bea0a 3905 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
e50d56a5
MS
3906
3907 return NULL_RTX;
3908}
3909
57814e5e
JJ
3910/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3911 bytes from constant string DATA + OFFSET and return it as target
3912 constant. */
3913
14a43348 3914rtx
4682ae04 3915builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 3916 scalar_int_mode mode)
57814e5e
JJ
3917{
3918 const char *str = (const char *) data;
3919
3920 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3921 return const0_rtx;
3922
3923 return c_readstr (str + offset, mode);
3924}
3925
ee92e7ba
MS
3926/* Helper to check the sizes of sequences and the destination of calls
3927 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3928 success (no overflow or invalid sizes), false otherwise. */
3929
3930static bool
3931check_strncat_sizes (tree exp, tree objsize)
3932{
3933 tree dest = CALL_EXPR_ARG (exp, 0);
3934 tree src = CALL_EXPR_ARG (exp, 1);
cc8bea0a 3935 tree maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
3936
3937 /* Try to determine the range of lengths that the source expression
3938 refers to. */
3939 tree lenrange[2];
3940 get_range_strlen (src, lenrange);
3941
3942 /* Try to verify that the destination is big enough for the shortest
3943 string. */
3944
3945 if (!objsize && warn_stringop_overflow)
3946 {
3947 /* If it hasn't been provided by __strncat_chk, try to determine
3948 the size of the destination object into which the source is
3949 being copied. */
d9c5a8b9 3950 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
3951 }
3952
3953 /* Add one for the terminating nul. */
3954 tree srclen = (lenrange[0]
3955 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3956 size_one_node)
3957 : NULL_TREE);
3958
cc8bea0a
MS
3959 /* The strncat function copies at most MAXREAD bytes and always appends
3960 the terminating nul so the specified upper bound should never be equal
3961 to (or greater than) the size of the destination. */
3962 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3963 && tree_int_cst_equal (objsize, maxread))
ee92e7ba 3964 {
e50d56a5
MS
3965 location_t loc = tree_nonartificial_location (exp);
3966 loc = expansion_point_location_if_in_system_header (loc);
3967
3968 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 3969 "%K%qD specified bound %E equals destination size",
cc8bea0a 3970 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
3971
3972 return false;
3973 }
3974
3975 if (!srclen
cc8bea0a 3976 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 3977 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
3978 && tree_int_cst_lt (maxread, srclen)))
3979 srclen = maxread;
ee92e7ba 3980
cc8bea0a 3981 /* The number of bytes to write is LEN but check_access will also
ee92e7ba 3982 check SRCLEN if LEN's value isn't known. */
cc8bea0a
MS
3983 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
3984 objsize);
ee92e7ba
MS
3985}
3986
3987/* Similar to expand_builtin_strcat, do some very basic size validation
3988 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3989 the built-in expand to a call to the library function. */
3990
3991static rtx
3992expand_builtin_strncat (tree exp, rtx)
3993{
3994 if (!validate_arglist (exp,
3995 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3996 || !warn_stringop_overflow)
3997 return NULL_RTX;
3998
3999 tree dest = CALL_EXPR_ARG (exp, 0);
4000 tree src = CALL_EXPR_ARG (exp, 1);
4001 /* The upper bound on the number of bytes to write. */
cc8bea0a 4002 tree maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
4003 /* The length of the source sequence. */
4004 tree slen = c_strlen (src, 1);
4005
4006 /* Try to determine the range of lengths that the source expression
4007 refers to. */
4008 tree lenrange[2];
4009 if (slen)
4010 lenrange[0] = lenrange[1] = slen;
4011 else
4012 get_range_strlen (src, lenrange);
4013
4014 /* Try to verify that the destination is big enough for the shortest
4015 string. First try to determine the size of the destination object
4016 into which the source is being copied. */
d9c5a8b9 4017 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
4018
4019 /* Add one for the terminating nul. */
4020 tree srclen = (lenrange[0]
4021 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4022 size_one_node)
4023 : NULL_TREE);
4024
cc8bea0a
MS
4025 /* The strncat function copies at most MAXREAD bytes and always appends
4026 the terminating nul so the specified upper bound should never be equal
4027 to (or greater than) the size of the destination. */
4028 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4029 && tree_int_cst_equal (destsize, maxread))
ee92e7ba 4030 {
e50d56a5
MS
4031 location_t loc = tree_nonartificial_location (exp);
4032 loc = expansion_point_location_if_in_system_header (loc);
4033
4034 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 4035 "%K%qD specified bound %E equals destination size",
cc8bea0a 4036 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
4037
4038 return NULL_RTX;
4039 }
4040
4041 if (!srclen
cc8bea0a 4042 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 4043 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
4044 && tree_int_cst_lt (maxread, srclen)))
4045 srclen = maxread;
ee92e7ba 4046
cc8bea0a
MS
4047 /* The number of bytes to write is SRCLEN. */
4048 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
ee92e7ba
MS
4049
4050 return NULL_RTX;
4051}
4052
b8698a0f 4053/* Expand expression EXP, which is a call to the strncpy builtin. Return
5039610b 4054 NULL_RTX if we failed the caller should emit a normal call. */
da9e9f08
KG
4055
4056static rtx
44e10129 4057expand_builtin_strncpy (tree exp, rtx target)
da9e9f08 4058{
db3927fb 4059 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
4060
4061 if (validate_arglist (exp,
4062 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
da9e9f08 4063 {
5039610b
SL
4064 tree dest = CALL_EXPR_ARG (exp, 0);
4065 tree src = CALL_EXPR_ARG (exp, 1);
ee92e7ba 4066 /* The number of bytes to write (not the maximum). */
5039610b 4067 tree len = CALL_EXPR_ARG (exp, 2);
ee92e7ba 4068 /* The length of the source sequence. */
5039610b 4069 tree slen = c_strlen (src, 1);
57814e5e 4070
cc8bea0a
MS
4071 if (warn_stringop_overflow)
4072 {
4073 tree destsize = compute_objsize (dest,
4074 warn_stringop_overflow - 1);
4075
4076 /* The number of bytes to write is LEN but check_access will also
4077 check SLEN if LEN's value isn't known. */
4078 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4079 destsize);
4080 }
ee92e7ba 4081
559837f7 4082 /* We must be passed a constant len and src parameter. */
cc269bb6 4083 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
5039610b 4084 return NULL_RTX;
da9e9f08 4085
db3927fb 4086 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
da9e9f08
KG
4087
4088 /* We're required to pad with trailing zeros if the requested
c22cacf3 4089 len is greater than strlen(s2)+1. In that case try to
57814e5e 4090 use store_by_pieces, if it fails, punt. */
da9e9f08 4091 if (tree_int_cst_lt (slen, len))
57814e5e 4092 {
0eb77834 4093 unsigned int dest_align = get_pointer_alignment (dest);
5039610b 4094 const char *p = c_getstr (src);
57814e5e
JJ
4095 rtx dest_mem;
4096
cc269bb6 4097 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
ae7e9ddd 4098 || !can_store_by_pieces (tree_to_uhwi (len),
57814e5e 4099 builtin_strncpy_read_str,
f883e0a7
KG
4100 CONST_CAST (char *, p),
4101 dest_align, false))
5039610b 4102 return NULL_RTX;
57814e5e 4103
435bb2a1 4104 dest_mem = get_memory_rtx (dest, len);
ae7e9ddd 4105 store_by_pieces (dest_mem, tree_to_uhwi (len),
57814e5e 4106 builtin_strncpy_read_str,
f883e0a7 4107 CONST_CAST (char *, p), dest_align, false, 0);
44e10129 4108 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5ae6cd0d 4109 dest_mem = convert_memory_address (ptr_mode, dest_mem);
aa0f70e6 4110 return dest_mem;
57814e5e 4111 }
da9e9f08 4112 }
5039610b 4113 return NULL_RTX;
da9e9f08
KG
4114}
4115
ab937357
JJ
4116/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4117 bytes from constant string DATA + OFFSET and return it as target
4118 constant. */
4119
34d85166 4120rtx
4682ae04 4121builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4122 scalar_int_mode mode)
ab937357
JJ
4123{
4124 const char *c = (const char *) data;
f883e0a7 4125 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ab937357
JJ
4126
4127 memset (p, *c, GET_MODE_SIZE (mode));
4128
4129 return c_readstr (p, mode);
4130}
4131
1a887f86
RS
4132/* Callback routine for store_by_pieces. Return the RTL of a register
4133 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4134 char value given in the RTL register data. For example, if mode is
4135 4 bytes wide, return the RTL for 0x01010101*data. */
4136
4137static rtx
4682ae04 4138builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4139 scalar_int_mode mode)
1a887f86
RS
4140{
4141 rtx target, coeff;
4142 size_t size;
4143 char *p;
4144
4145 size = GET_MODE_SIZE (mode);
5ab2f7b7
KH
4146 if (size == 1)
4147 return (rtx) data;
1a887f86 4148
f883e0a7 4149 p = XALLOCAVEC (char, size);
1a887f86
RS
4150 memset (p, 1, size);
4151 coeff = c_readstr (p, mode);
4152
5ab2f7b7 4153 target = convert_to_mode (mode, (rtx) data, 1);
1a887f86
RS
4154 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4155 return force_reg (mode, target);
4156}
4157
b8698a0f
L
4158/* Expand expression EXP, which is a call to the memset builtin. Return
4159 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 4160 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 4161 convenient). */
fed3cef0 4162
28f4ec01 4163static rtx
ef4bddc2 4164expand_builtin_memset (tree exp, rtx target, machine_mode mode)
28f4ec01 4165{
5039610b
SL
4166 if (!validate_arglist (exp,
4167 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4168 return NULL_RTX;
ee92e7ba
MS
4169
4170 tree dest = CALL_EXPR_ARG (exp, 0);
4171 tree val = CALL_EXPR_ARG (exp, 1);
4172 tree len = CALL_EXPR_ARG (exp, 2);
4173
cc8bea0a 4174 check_memop_access (exp, dest, NULL_TREE, len);
ee92e7ba
MS
4175
4176 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5039610b 4177}
28f4ec01 4178
edcf72f3
IE
4179/* Expand expression EXP, which is an instrumented call to the memset builtin.
4180 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4181 try to get the result in TARGET, if convenient (and in mode MODE if that's
4182 convenient). */
4183
4184static rtx
4185expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4186{
4187 if (!validate_arglist (exp,
4188 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4189 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4190 return NULL_RTX;
4191 else
4192 {
4193 tree dest = CALL_EXPR_ARG (exp, 0);
4194 tree val = CALL_EXPR_ARG (exp, 2);
4195 tree len = CALL_EXPR_ARG (exp, 3);
4196 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4197
4198 /* Return src bounds with the result. */
4199 if (res)
4200 {
30975f63 4201 rtx bnd = force_reg (targetm.chkp_bound_mode (),
edcf72f3
IE
4202 expand_normal (CALL_EXPR_ARG (exp, 1)));
4203 res = chkp_join_splitted_slot (res, bnd);
4204 }
4205 return res;
4206 }
4207}
4208
5039610b
SL
4209/* Helper function to do the actual work for expand_builtin_memset. The
4210 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4211 so that this can also be called without constructing an actual CALL_EXPR.
4212 The other arguments and return value are the same as for
4213 expand_builtin_memset. */
880864cf 4214
5039610b
SL
4215static rtx
4216expand_builtin_memset_args (tree dest, tree val, tree len,
ef4bddc2 4217 rtx target, machine_mode mode, tree orig_exp)
5039610b
SL
4218{
4219 tree fndecl, fn;
4220 enum built_in_function fcode;
ef4bddc2 4221 machine_mode val_mode;
5039610b
SL
4222 char c;
4223 unsigned int dest_align;
4224 rtx dest_mem, dest_addr, len_rtx;
4225 HOST_WIDE_INT expected_size = -1;
4226 unsigned int expected_align = 0;
3918b108
JH
4227 unsigned HOST_WIDE_INT min_size;
4228 unsigned HOST_WIDE_INT max_size;
82bb7d4e 4229 unsigned HOST_WIDE_INT probable_max_size;
28f4ec01 4230
0eb77834 4231 dest_align = get_pointer_alignment (dest);
079a182e 4232
5039610b
SL
4233 /* If DEST is not a pointer type, don't do this operation in-line. */
4234 if (dest_align == 0)
4235 return NULL_RTX;
c2bd38e8 4236
a5883ba0
MM
4237 if (currently_expanding_gimple_stmt)
4238 stringop_block_profile (currently_expanding_gimple_stmt,
4239 &expected_align, &expected_size);
726a989a 4240
5039610b
SL
4241 if (expected_align < dest_align)
4242 expected_align = dest_align;
880864cf 4243
5039610b
SL
4244 /* If the LEN parameter is zero, return DEST. */
4245 if (integer_zerop (len))
4246 {
4247 /* Evaluate and ignore VAL in case it has side-effects. */
4248 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4249 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4250 }
57e84f18 4251
5039610b
SL
4252 /* Stabilize the arguments in case we fail. */
4253 dest = builtin_save_expr (dest);
4254 val = builtin_save_expr (val);
4255 len = builtin_save_expr (len);
1a887f86 4256
5039610b 4257 len_rtx = expand_normal (len);
82bb7d4e
JH
4258 determine_block_size (len, len_rtx, &min_size, &max_size,
4259 &probable_max_size);
5039610b 4260 dest_mem = get_memory_rtx (dest, len);
8a445129 4261 val_mode = TYPE_MODE (unsigned_char_type_node);
1a887f86 4262
5039610b
SL
4263 if (TREE_CODE (val) != INTEGER_CST)
4264 {
4265 rtx val_rtx;
1a887f86 4266
5039610b 4267 val_rtx = expand_normal (val);
8a445129 4268 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
28f4ec01 4269
5039610b
SL
4270 /* Assume that we can memset by pieces if we can store
4271 * the coefficients by pieces (in the required modes).
4272 * We can't pass builtin_memset_gen_str as that emits RTL. */
4273 c = 1;
cc269bb6 4274 if (tree_fits_uhwi_p (len)
ae7e9ddd 4275 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
4276 builtin_memset_read_str, &c, dest_align,
4277 true))
5039610b 4278 {
8a445129 4279 val_rtx = force_reg (val_mode, val_rtx);
ae7e9ddd 4280 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150
SL
4281 builtin_memset_gen_str, val_rtx, dest_align,
4282 true, 0);
5039610b
SL
4283 }
4284 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4285 dest_align, expected_align,
82bb7d4e
JH
4286 expected_size, min_size, max_size,
4287 probable_max_size))
880864cf 4288 goto do_libcall;
b8698a0f 4289
5039610b
SL
4290 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4291 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4292 return dest_mem;
4293 }
28f4ec01 4294
5039610b
SL
4295 if (target_char_cast (val, &c))
4296 goto do_libcall;
ab937357 4297
5039610b
SL
4298 if (c)
4299 {
cc269bb6 4300 if (tree_fits_uhwi_p (len)
ae7e9ddd 4301 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
4302 builtin_memset_read_str, &c, dest_align,
4303 true))
ae7e9ddd 4304 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150 4305 builtin_memset_read_str, &c, dest_align, true, 0);
8a445129
RS
4306 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4307 gen_int_mode (c, val_mode),
5039610b 4308 dest_align, expected_align,
82bb7d4e
JH
4309 expected_size, min_size, max_size,
4310 probable_max_size))
5039610b 4311 goto do_libcall;
b8698a0f 4312
5039610b
SL
4313 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4314 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4315 return dest_mem;
4316 }
ab937357 4317
5039610b
SL
4318 set_mem_align (dest_mem, dest_align);
4319 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4320 CALL_EXPR_TAILCALL (orig_exp)
4321 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3918b108 4322 expected_align, expected_size,
82bb7d4e
JH
4323 min_size, max_size,
4324 probable_max_size);
28f4ec01 4325
5039610b
SL
4326 if (dest_addr == 0)
4327 {
4328 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4329 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4330 }
28f4ec01 4331
5039610b 4332 return dest_addr;
880864cf 4333
5039610b
SL
4334 do_libcall:
4335 fndecl = get_callee_fndecl (orig_exp);
4336 fcode = DECL_FUNCTION_CODE (fndecl);
edcf72f3
IE
4337 if (fcode == BUILT_IN_MEMSET
4338 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
aa493694
JJ
4339 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4340 dest, val, len);
5039610b 4341 else if (fcode == BUILT_IN_BZERO)
aa493694
JJ
4342 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4343 dest, len);
5039610b
SL
4344 else
4345 gcc_unreachable ();
44e10129
MM
4346 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4347 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5039610b 4348 return expand_call (fn, target, target == const0_rtx);
28f4ec01
BS
4349}
4350
b8698a0f 4351/* Expand expression EXP, which is a call to the bzero builtin. Return
5039610b 4352 NULL_RTX if we failed the caller should emit a normal call. */
5197bd50 4353
e3a709be 4354static rtx
8148fe65 4355expand_builtin_bzero (tree exp)
e3a709be 4356{
5039610b 4357 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3477addf 4358 return NULL_RTX;
e3a709be 4359
ee92e7ba
MS
4360 tree dest = CALL_EXPR_ARG (exp, 0);
4361 tree size = CALL_EXPR_ARG (exp, 1);
4362
cc8bea0a 4363 check_memop_access (exp, dest, NULL_TREE, size);
8d51ecf8 4364
3477addf 4365 /* New argument list transforming bzero(ptr x, int y) to
c2bd38e8
RS
4366 memset(ptr x, int 0, size_t y). This is done this way
4367 so that if it isn't expanded inline, we fallback to
4368 calling bzero instead of memset. */
8d51ecf8 4369
ee92e7ba
MS
4370 location_t loc = EXPR_LOCATION (exp);
4371
5039610b 4372 return expand_builtin_memset_args (dest, integer_zero_node,
0d82a1c8
RG
4373 fold_convert_loc (loc,
4374 size_type_node, size),
5039610b 4375 const0_rtx, VOIDmode, exp);
e3a709be
KG
4376}
4377
a666df60
RS
4378/* Try to expand cmpstr operation ICODE with the given operands.
4379 Return the result rtx on success, otherwise return null. */
4380
4381static rtx
4382expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4383 HOST_WIDE_INT align)
4384{
4385 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4386
4387 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4388 target = NULL_RTX;
4389
4390 struct expand_operand ops[4];
4391 create_output_operand (&ops[0], target, insn_mode);
4392 create_fixed_operand (&ops[1], arg1_rtx);
4393 create_fixed_operand (&ops[2], arg2_rtx);
4394 create_integer_operand (&ops[3], align);
4395 if (maybe_expand_insn (icode, 4, ops))
4396 return ops[0].value;
4397 return NULL_RTX;
4398}
4399
2be3b5ce 4400/* Expand expression EXP, which is a call to the memcmp built-in function.
9b0f6f5e 4401 Return NULL_RTX if we failed and the caller should emit a normal call,
36b85e43
BS
4402 otherwise try to get the result in TARGET, if convenient.
4403 RESULT_EQ is true if we can relax the returned value to be either zero
4404 or nonzero, without caring about the sign. */
5197bd50 4405
28f4ec01 4406static rtx
36b85e43 4407expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
28f4ec01 4408{
5039610b
SL
4409 if (!validate_arglist (exp,
4410 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4411 return NULL_RTX;
c2bd38e8 4412
7f9f48be
RS
4413 tree arg1 = CALL_EXPR_ARG (exp, 0);
4414 tree arg2 = CALL_EXPR_ARG (exp, 1);
4415 tree len = CALL_EXPR_ARG (exp, 2);
d9c5a8b9
MS
4416
4417 /* Diagnose calls where the specified length exceeds the size of either
4418 object. */
4419 if (warn_stringop_overflow)
4420 {
4421 tree size = compute_objsize (arg1, 0);
cc8bea0a
MS
4422 if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4423 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
d9c5a8b9
MS
4424 {
4425 size = compute_objsize (arg2, 0);
cc8bea0a
MS
4426 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4427 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
d9c5a8b9
MS
4428 }
4429 }
4430
36b85e43
BS
4431 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4432 location_t loc = EXPR_LOCATION (exp);
358b8f01 4433
7f9f48be
RS
4434 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4435 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
28f4ec01 4436
7f9f48be
RS
4437 /* If we don't have POINTER_TYPE, call the function. */
4438 if (arg1_align == 0 || arg2_align == 0)
4439 return NULL_RTX;
28f4ec01 4440
7f9f48be
RS
4441 rtx arg1_rtx = get_memory_rtx (arg1, len);
4442 rtx arg2_rtx = get_memory_rtx (arg2, len);
36b85e43 4443 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
28f4ec01 4444
7f9f48be 4445 /* Set MEM_SIZE as appropriate. */
36b85e43 4446 if (CONST_INT_P (len_rtx))
7f9f48be 4447 {
36b85e43
BS
4448 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4449 set_mem_size (arg2_rtx, INTVAL (len_rtx));
7f9f48be 4450 }
6cbaec9e 4451
36b85e43
BS
4452 by_pieces_constfn constfn = NULL;
4453
d0d7f887
BS
4454 const char *src_str = c_getstr (arg2);
4455 if (result_eq && src_str == NULL)
4456 {
4457 src_str = c_getstr (arg1);
4458 if (src_str != NULL)
4f353581 4459 std::swap (arg1_rtx, arg2_rtx);
d0d7f887 4460 }
36b85e43
BS
4461
4462 /* If SRC is a string constant and block move would be done
4463 by pieces, we can avoid loading the string from memory
4464 and only stored the computed constants. */
4465 if (src_str
4466 && CONST_INT_P (len_rtx)
4467 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4468 constfn = builtin_memcpy_read_str;
4469
4470 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4471 TREE_TYPE (len), target,
4472 result_eq, constfn,
4473 CONST_CAST (char *, src_str));
4474
7f9f48be
RS
4475 if (result)
4476 {
4477 /* Return the value in the proper mode for this function. */
4478 if (GET_MODE (result) == mode)
4479 return result;
6cbaec9e 4480
7f9f48be
RS
4481 if (target != 0)
4482 {
4483 convert_move (target, result, 0);
4484 return target;
4485 }
8878e913 4486
28f4ec01 4487 return convert_to_mode (mode, result, 0);
7f9f48be 4488 }
28f4ec01 4489
ee516de9 4490 return NULL_RTX;
c2bd38e8
RS
4491}
4492
5039610b 4493/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
28f4ec01
BS
4494 if we failed the caller should emit a normal call, otherwise try to get
4495 the result in TARGET, if convenient. */
fed3cef0 4496
28f4ec01 4497static rtx
44e10129 4498expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
28f4ec01 4499{
5039610b
SL
4500 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4501 return NULL_RTX;
8d51ecf8 4502
a666df60
RS
4503 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4504 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
4505 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4506 return NULL_RTX;
c22cacf3 4507
16155777
MS
4508 tree arg1 = CALL_EXPR_ARG (exp, 0);
4509 tree arg2 = CALL_EXPR_ARG (exp, 1);
40c1d5f8 4510
16155777
MS
4511 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4512 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
2be3b5ce 4513
16155777
MS
4514 /* If we don't have POINTER_TYPE, call the function. */
4515 if (arg1_align == 0 || arg2_align == 0)
4516 return NULL_RTX;
2be3b5ce 4517
16155777
MS
4518 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4519 arg1 = builtin_save_expr (arg1);
4520 arg2 = builtin_save_expr (arg2);
28f4ec01 4521
16155777
MS
4522 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4523 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
a666df60 4524
16155777
MS
4525 rtx result = NULL_RTX;
4526 /* Try to call cmpstrsi. */
4527 if (cmpstr_icode != CODE_FOR_nothing)
4528 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4529 MIN (arg1_align, arg2_align));
40c1d5f8 4530
16155777
MS
4531 /* Try to determine at least one length and call cmpstrnsi. */
4532 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4533 {
4534 tree len;
4535 rtx arg3_rtx;
4536
4537 tree len1 = c_strlen (arg1, 1);
4538 tree len2 = c_strlen (arg2, 1);
4539
4540 if (len1)
4541 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4542 if (len2)
4543 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4544
4545 /* If we don't have a constant length for the first, use the length
4546 of the second, if we know it. We don't require a constant for
4547 this case; some cost analysis could be done if both are available
4548 but neither is constant. For now, assume they're equally cheap,
4549 unless one has side effects. If both strings have constant lengths,
4550 use the smaller. */
4551
4552 if (!len1)
4553 len = len2;
4554 else if (!len2)
4555 len = len1;
4556 else if (TREE_SIDE_EFFECTS (len1))
4557 len = len2;
4558 else if (TREE_SIDE_EFFECTS (len2))
4559 len = len1;
4560 else if (TREE_CODE (len1) != INTEGER_CST)
4561 len = len2;
4562 else if (TREE_CODE (len2) != INTEGER_CST)
4563 len = len1;
4564 else if (tree_int_cst_lt (len1, len2))
4565 len = len1;
4566 else
4567 len = len2;
c43fa1f5 4568
16155777
MS
4569 /* If both arguments have side effects, we cannot optimize. */
4570 if (len && !TREE_SIDE_EFFECTS (len))
40c1d5f8 4571 {
16155777
MS
4572 arg3_rtx = expand_normal (len);
4573 result = expand_cmpstrn_or_cmpmem
4574 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4575 arg3_rtx, MIN (arg1_align, arg2_align));
40c1d5f8 4576 }
16155777
MS
4577 }
4578
4579 /* Check to see if the argument was declared attribute nonstring
4580 and if so, issue a warning since at this point it's not known
4581 to be nul-terminated. */
4582 tree fndecl = get_callee_fndecl (exp);
4583 maybe_warn_nonstring_arg (fndecl, exp);
fed3cef0 4584
16155777
MS
4585 if (result)
4586 {
4587 /* Return the value in the proper mode for this function. */
4588 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4589 if (GET_MODE (result) == mode)
4590 return result;
4591 if (target == 0)
4592 return convert_to_mode (mode, result, 0);
4593 convert_move (target, result, 0);
4594 return target;
40c1d5f8 4595 }
16155777
MS
4596
4597 /* Expand the library call ourselves using a stabilized argument
4598 list to avoid re-evaluating the function's arguments twice. */
4599 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4600 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4601 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4602 return expand_call (fn, target, target == const0_rtx);
2dee4af1 4603}
28f4ec01 4604
b8698a0f 4605/* Expand expression EXP, which is a call to the strncmp builtin. Return
5039610b 4606 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
da9e9f08 4607 the result in TARGET, if convenient. */
5197bd50 4608
da9e9f08 4609static rtx
44e10129 4610expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
ef4bddc2 4611 ATTRIBUTE_UNUSED machine_mode mode)
da9e9f08 4612{
5039610b
SL
4613 if (!validate_arglist (exp,
4614 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4615 return NULL_RTX;
da9e9f08 4616
819c1488 4617 /* If c_strlen can determine an expression for one of the string
40c1d5f8 4618 lengths, and it doesn't have side effects, then emit cmpstrnsi
2be3b5ce 4619 using length MIN(strlen(string)+1, arg3). */
a666df60 4620 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
4621 if (cmpstrn_icode == CODE_FOR_nothing)
4622 return NULL_RTX;
5197bd50 4623
16155777
MS
4624 tree len;
4625
4626 tree arg1 = CALL_EXPR_ARG (exp, 0);
4627 tree arg2 = CALL_EXPR_ARG (exp, 1);
4628 tree arg3 = CALL_EXPR_ARG (exp, 2);
4629
4630 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4631 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4632
4633 tree len1 = c_strlen (arg1, 1);
4634 tree len2 = c_strlen (arg2, 1);
4635
4636 location_t loc = EXPR_LOCATION (exp);
4637
4638 if (len1)
4639 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4640 if (len2)
4641 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4642
4643 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4644
4645 /* If we don't have a constant length for the first, use the length
4646 of the second, if we know it. If neither string is constant length,
4647 use the given length argument. We don't require a constant for
4648 this case; some cost analysis could be done if both are available
4649 but neither is constant. For now, assume they're equally cheap,
4650 unless one has side effects. If both strings have constant lengths,
4651 use the smaller. */
4652
4653 if (!len1 && !len2)
4654 len = len3;
4655 else if (!len1)
4656 len = len2;
4657 else if (!len2)
4658 len = len1;
4659 else if (TREE_SIDE_EFFECTS (len1))
4660 len = len2;
4661 else if (TREE_SIDE_EFFECTS (len2))
4662 len = len1;
4663 else if (TREE_CODE (len1) != INTEGER_CST)
4664 len = len2;
4665 else if (TREE_CODE (len2) != INTEGER_CST)
4666 len = len1;
4667 else if (tree_int_cst_lt (len1, len2))
4668 len = len1;
4669 else
4670 len = len2;
4671
4672 /* If we are not using the given length, we must incorporate it here.
4673 The actual new length parameter will be MIN(len,arg3) in this case. */
4674 if (len != len3)
4675 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4676 rtx arg1_rtx = get_memory_rtx (arg1, len);
4677 rtx arg2_rtx = get_memory_rtx (arg2, len);
4678 rtx arg3_rtx = expand_normal (len);
4679 rtx result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4680 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4681 MIN (arg1_align, arg2_align));
4682
4683 /* Check to see if the argument was declared attribute nonstring
4684 and if so, issue a warning since at this point it's not known
4685 to be nul-terminated. */
4686 tree fndecl = get_callee_fndecl (exp);
4687 maybe_warn_nonstring_arg (fndecl, exp);
4688
4689 if (result)
4690 {
4691 /* Return the value in the proper mode for this function. */
4692 mode = TYPE_MODE (TREE_TYPE (exp));
4693 if (GET_MODE (result) == mode)
4694 return result;
4695 if (target == 0)
4696 return convert_to_mode (mode, result, 0);
4697 convert_move (target, result, 0);
4698 return target;
4699 }
4700
4701 /* Expand the library call ourselves using a stabilized argument
4702 list to avoid re-evaluating the function's arguments twice. */
4703 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4704 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4705 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4706 return expand_call (fn, target, target == const0_rtx);
d118937d
KG
4707}
4708
d3707adb
RH
4709/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4710 if that's convenient. */
fed3cef0 4711
d3707adb 4712rtx
4682ae04 4713expand_builtin_saveregs (void)
28f4ec01 4714{
58f4cf2a
DM
4715 rtx val;
4716 rtx_insn *seq;
28f4ec01
BS
4717
4718 /* Don't do __builtin_saveregs more than once in a function.
4719 Save the result of the first call and reuse it. */
4720 if (saveregs_value != 0)
4721 return saveregs_value;
28f4ec01 4722
d3707adb
RH
4723 /* When this function is called, it means that registers must be
4724 saved on entry to this function. So we migrate the call to the
4725 first insn of this function. */
4726
4727 start_sequence ();
28f4ec01 4728
d3707adb 4729 /* Do whatever the machine needs done in this case. */
61f71b34 4730 val = targetm.calls.expand_builtin_saveregs ();
28f4ec01 4731
d3707adb
RH
4732 seq = get_insns ();
4733 end_sequence ();
28f4ec01 4734
d3707adb 4735 saveregs_value = val;
28f4ec01 4736
2f937369
DM
4737 /* Put the insns after the NOTE that starts the function. If this
4738 is inside a start_sequence, make the outer-level insn chain current, so
d3707adb
RH
4739 the code is placed at the start of the function. */
4740 push_topmost_sequence ();
242229bb 4741 emit_insn_after (seq, entry_of_function ());
d3707adb
RH
4742 pop_topmost_sequence ();
4743
4744 return val;
28f4ec01
BS
4745}
4746
8870e212 4747/* Expand a call to __builtin_next_arg. */
5197bd50 4748
28f4ec01 4749static rtx
8870e212 4750expand_builtin_next_arg (void)
28f4ec01 4751{
8870e212
JJ
4752 /* Checking arguments is already done in fold_builtin_next_arg
4753 that must be called before this function. */
4319e38c 4754 return expand_binop (ptr_mode, add_optab,
38173d38
JH
4755 crtl->args.internal_arg_pointer,
4756 crtl->args.arg_offset_rtx,
28f4ec01
BS
4757 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4758}
4759
d3707adb
RH
4760/* Make it easier for the backends by protecting the valist argument
4761 from multiple evaluations. */
4762
4763static tree
db3927fb 4764stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
d3707adb 4765{
35cbb299
KT
4766 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4767
70f34814
RG
4768 /* The current way of determining the type of valist is completely
4769 bogus. We should have the information on the va builtin instead. */
4770 if (!vatype)
4771 vatype = targetm.fn_abi_va_list (cfun->decl);
35cbb299
KT
4772
4773 if (TREE_CODE (vatype) == ARRAY_TYPE)
d3707adb 4774 {
9f720c3e
GK
4775 if (TREE_SIDE_EFFECTS (valist))
4776 valist = save_expr (valist);
8ebecc3b 4777
9f720c3e 4778 /* For this case, the backends will be expecting a pointer to
35cbb299
KT
4779 vatype, but it's possible we've actually been given an array
4780 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
9f720c3e
GK
4781 So fix it. */
4782 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
daf68dd7 4783 {
35cbb299 4784 tree p1 = build_pointer_type (TREE_TYPE (vatype));
db3927fb 4785 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
daf68dd7 4786 }
d3707adb 4787 }
8ebecc3b 4788 else
d3707adb 4789 {
70f34814 4790 tree pt = build_pointer_type (vatype);
8ebecc3b 4791
9f720c3e
GK
4792 if (! needs_lvalue)
4793 {
8ebecc3b
RH
4794 if (! TREE_SIDE_EFFECTS (valist))
4795 return valist;
8d51ecf8 4796
db3927fb 4797 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
d3707adb 4798 TREE_SIDE_EFFECTS (valist) = 1;
d3707adb 4799 }
9f720c3e 4800
8ebecc3b 4801 if (TREE_SIDE_EFFECTS (valist))
9f720c3e 4802 valist = save_expr (valist);
70f34814
RG
4803 valist = fold_build2_loc (loc, MEM_REF,
4804 vatype, valist, build_int_cst (pt, 0));
d3707adb
RH
4805 }
4806
4807 return valist;
4808}
4809
c35d187f
RH
4810/* The "standard" definition of va_list is void*. */
4811
4812tree
4813std_build_builtin_va_list (void)
4814{
4815 return ptr_type_node;
4816}
4817
35cbb299
KT
4818/* The "standard" abi va_list is va_list_type_node. */
4819
4820tree
4821std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4822{
4823 return va_list_type_node;
4824}
4825
4826/* The "standard" type of va_list is va_list_type_node. */
4827
4828tree
4829std_canonical_va_list_type (tree type)
4830{
4831 tree wtype, htype;
4832
35cbb299
KT
4833 wtype = va_list_type_node;
4834 htype = type;
431e31a9
TV
4835
4836 if (TREE_CODE (wtype) == ARRAY_TYPE)
35cbb299
KT
4837 {
4838 /* If va_list is an array type, the argument may have decayed
4839 to a pointer type, e.g. by being passed to another function.
4840 In that case, unwrap both types so that we can compare the
4841 underlying records. */
4842 if (TREE_CODE (htype) == ARRAY_TYPE
4843 || POINTER_TYPE_P (htype))
4844 {
4845 wtype = TREE_TYPE (wtype);
4846 htype = TREE_TYPE (htype);
4847 }
4848 }
4849 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4850 return va_list_type_node;
4851
4852 return NULL_TREE;
4853}
4854
d3707adb
RH
4855/* The "standard" implementation of va_start: just assign `nextarg' to
4856 the variable. */
5197bd50 4857
d3707adb 4858void
4682ae04 4859std_expand_builtin_va_start (tree valist, rtx nextarg)
d3707adb 4860{
508dabda
ILT
4861 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4862 convert_move (va_r, nextarg, 0);
d5e254e1
IE
4863
4864 /* We do not have any valid bounds for the pointer, so
4865 just store zero bounds for it. */
4866 if (chkp_function_instrumented_p (current_function_decl))
4867 chkp_expand_bounds_reset_for_mem (valist,
4868 make_tree (TREE_TYPE (valist),
4869 nextarg));
d3707adb
RH
4870}
4871
5039610b 4872/* Expand EXP, a call to __builtin_va_start. */
5197bd50 4873
d3707adb 4874static rtx
5039610b 4875expand_builtin_va_start (tree exp)
d3707adb
RH
4876{
4877 rtx nextarg;
5039610b 4878 tree valist;
db3927fb 4879 location_t loc = EXPR_LOCATION (exp);
d3707adb 4880
5039610b 4881 if (call_expr_nargs (exp) < 2)
c69c9b36 4882 {
db3927fb 4883 error_at (loc, "too few arguments to function %<va_start%>");
c69c9b36
JM
4884 return const0_rtx;
4885 }
d3707adb 4886
5039610b 4887 if (fold_builtin_next_arg (exp, true))
8870e212 4888 return const0_rtx;
d3147f64 4889
8870e212 4890 nextarg = expand_builtin_next_arg ();
db3927fb 4891 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
d3707adb 4892
d7bd8aeb
JJ
4893 if (targetm.expand_builtin_va_start)
4894 targetm.expand_builtin_va_start (valist, nextarg);
4895 else
4896 std_expand_builtin_va_start (valist, nextarg);
d3707adb
RH
4897
4898 return const0_rtx;
4899}
4900
5039610b 4901/* Expand EXP, a call to __builtin_va_end. */
3bdf5ad1 4902
d3707adb 4903static rtx
5039610b 4904expand_builtin_va_end (tree exp)
d3707adb 4905{
5039610b 4906 tree valist = CALL_EXPR_ARG (exp, 0);
daf68dd7 4907
daf68dd7
RH
4908 /* Evaluate for side effects, if needed. I hate macros that don't
4909 do that. */
4910 if (TREE_SIDE_EFFECTS (valist))
4911 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
d3707adb
RH
4912
4913 return const0_rtx;
4914}
4915
5039610b 4916/* Expand EXP, a call to __builtin_va_copy. We do this as a
d3707adb
RH
4917 builtin rather than just as an assignment in stdarg.h because of the
4918 nastiness of array-type va_list types. */
3bdf5ad1 4919
d3707adb 4920static rtx
5039610b 4921expand_builtin_va_copy (tree exp)
d3707adb
RH
4922{
4923 tree dst, src, t;
db3927fb 4924 location_t loc = EXPR_LOCATION (exp);
d3707adb 4925
5039610b
SL
4926 dst = CALL_EXPR_ARG (exp, 0);
4927 src = CALL_EXPR_ARG (exp, 1);
d3707adb 4928
db3927fb
AH
4929 dst = stabilize_va_list_loc (loc, dst, 1);
4930 src = stabilize_va_list_loc (loc, src, 0);
d3707adb 4931
35cbb299
KT
4932 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4933
4934 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
d3707adb 4935 {
35cbb299 4936 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
d3707adb
RH
4937 TREE_SIDE_EFFECTS (t) = 1;
4938 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4939 }
4940 else
4941 {
8ebecc3b
RH
4942 rtx dstb, srcb, size;
4943
4944 /* Evaluate to pointers. */
4945 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4946 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
35cbb299
KT
4947 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4948 NULL_RTX, VOIDmode, EXPAND_NORMAL);
8ebecc3b 4949
5ae6cd0d
MM
4950 dstb = convert_memory_address (Pmode, dstb);
4951 srcb = convert_memory_address (Pmode, srcb);
ce2d32cd 4952
8ebecc3b
RH
4953 /* "Dereference" to BLKmode memories. */
4954 dstb = gen_rtx_MEM (BLKmode, dstb);
ba4828e0 4955 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
35cbb299 4956 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b 4957 srcb = gen_rtx_MEM (BLKmode, srcb);
ba4828e0 4958 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
35cbb299 4959 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b
RH
4960
4961 /* Copy. */
44bb111a 4962 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
d3707adb
RH
4963 }
4964
4965 return const0_rtx;
4966}
4967
28f4ec01
BS
4968/* Expand a call to one of the builtin functions __builtin_frame_address or
4969 __builtin_return_address. */
5197bd50 4970
28f4ec01 4971static rtx
5039610b 4972expand_builtin_frame_address (tree fndecl, tree exp)
28f4ec01 4973{
28f4ec01
BS
4974 /* The argument must be a nonnegative integer constant.
4975 It counts the number of frames to scan up the stack.
8423e57c
MS
4976 The value is either the frame pointer value or the return
4977 address saved in that frame. */
5039610b 4978 if (call_expr_nargs (exp) == 0)
28f4ec01
BS
4979 /* Warning about missing arg was already issued. */
4980 return const0_rtx;
cc269bb6 4981 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
28f4ec01 4982 {
8423e57c 4983 error ("invalid argument to %qD", fndecl);
28f4ec01
BS
4984 return const0_rtx;
4985 }
4986 else
4987 {
8423e57c
MS
4988 /* Number of frames to scan up the stack. */
4989 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4990
4991 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
28f4ec01
BS
4992
4993 /* Some ports cannot access arbitrary stack frames. */
4994 if (tem == NULL)
4995 {
8423e57c 4996 warning (0, "unsupported argument to %qD", fndecl);
28f4ec01
BS
4997 return const0_rtx;
4998 }
4999
8423e57c
MS
5000 if (count)
5001 {
5002 /* Warn since no effort is made to ensure that any frame
5003 beyond the current one exists or can be safely reached. */
5004 warning (OPT_Wframe_address, "calling %qD with "
5005 "a nonzero argument is unsafe", fndecl);
5006 }
5007
28f4ec01
BS
5008 /* For __builtin_frame_address, return what we've got. */
5009 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5010 return tem;
5011
f8cfc6aa 5012 if (!REG_P (tem)
28f4ec01 5013 && ! CONSTANT_P (tem))
18ae1560 5014 tem = copy_addr_to_reg (tem);
28f4ec01
BS
5015 return tem;
5016 }
5017}
5018
d3c12306 5019/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
b7e52782 5020 failed and the caller should emit a normal call. */
d5457140 5021
28f4ec01 5022static rtx
b7e52782 5023expand_builtin_alloca (tree exp)
28f4ec01
BS
5024{
5025 rtx op0;
d5457140 5026 rtx result;
13e49da9 5027 unsigned int align;
8bd9f164 5028 tree fndecl = get_callee_fndecl (exp);
9e878cf1
EB
5029 HOST_WIDE_INT max_size;
5030 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
b7e52782 5031 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
8bd9f164 5032 bool valid_arglist
9e878cf1
EB
5033 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5034 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5035 VOID_TYPE)
5036 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5037 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5038 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
13e49da9
TV
5039
5040 if (!valid_arglist)
5039610b 5041 return NULL_RTX;
28f4ec01 5042
9e878cf1
EB
5043 if ((alloca_for_var && !warn_vla_limit)
5044 || (!alloca_for_var && !warn_alloca_limit))
8bd9f164
MS
5045 {
5046 /* -Walloca-larger-than and -Wvla-larger-than settings override
5047 the more general -Walloc-size-larger-than so unless either of
5048 the former options is specified check the alloca arguments for
5049 overflow. */
5050 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5051 int idx[] = { 0, -1 };
5052 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5053 }
5054
28f4ec01 5055 /* Compute the argument. */
5039610b 5056 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
28f4ec01 5057
13e49da9 5058 /* Compute the alignment. */
9e878cf1
EB
5059 align = (fcode == BUILT_IN_ALLOCA
5060 ? BIGGEST_ALIGNMENT
5061 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5062
5063 /* Compute the maximum size. */
5064 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5065 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5066 : -1);
13e49da9 5067
b7e52782
EB
5068 /* Allocate the desired space. If the allocation stems from the declaration
5069 of a variable-sized object, it cannot accumulate. */
9e878cf1
EB
5070 result
5071 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5ae6cd0d 5072 result = convert_memory_address (ptr_mode, result);
d5457140
RK
5073
5074 return result;
28f4ec01
BS
5075}
5076
7504c3bf
JJ
5077/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5078 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5079 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5080 handle_builtin_stack_restore function. */
e3174bdf
MO
5081
5082static rtx
5083expand_asan_emit_allocas_unpoison (tree exp)
5084{
5085 tree arg0 = CALL_EXPR_ARG (exp, 0);
7504c3bf 5086 tree arg1 = CALL_EXPR_ARG (exp, 1);
8f4956ca 5087 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7504c3bf
JJ
5088 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5089 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5090 stack_pointer_rtx, NULL_RTX, 0,
5091 OPTAB_LIB_WIDEN);
5092 off = convert_modes (ptr_mode, Pmode, off, 0);
5093 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5094 OPTAB_LIB_WIDEN);
e3174bdf 5095 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
db69559b
RS
5096 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5097 top, ptr_mode, bot, ptr_mode);
e3174bdf
MO
5098 return ret;
5099}
5100
ac868f29
EB
5101/* Expand a call to bswap builtin in EXP.
5102 Return NULL_RTX if a normal call should be emitted rather than expanding the
5103 function in-line. If convenient, the result should be placed in TARGET.
5104 SUBTARGET may be used as the target for computing one of EXP's operands. */
167fa32c
EC
5105
5106static rtx
ef4bddc2 5107expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
ac868f29 5108 rtx subtarget)
167fa32c 5109{
167fa32c
EC
5110 tree arg;
5111 rtx op0;
5112
5039610b
SL
5113 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5114 return NULL_RTX;
167fa32c 5115
5039610b 5116 arg = CALL_EXPR_ARG (exp, 0);
ac868f29
EB
5117 op0 = expand_expr (arg,
5118 subtarget && GET_MODE (subtarget) == target_mode
5119 ? subtarget : NULL_RTX,
5120 target_mode, EXPAND_NORMAL);
5121 if (GET_MODE (op0) != target_mode)
5122 op0 = convert_to_mode (target_mode, op0, 1);
167fa32c 5123
ac868f29 5124 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
167fa32c
EC
5125
5126 gcc_assert (target);
5127
ac868f29 5128 return convert_to_mode (target_mode, target, 1);
167fa32c
EC
5129}
5130
5039610b
SL
5131/* Expand a call to a unary builtin in EXP.
5132 Return NULL_RTX if a normal call should be emitted rather than expanding the
28f4ec01
BS
5133 function in-line. If convenient, the result should be placed in TARGET.
5134 SUBTARGET may be used as the target for computing one of EXP's operands. */
d5457140 5135
28f4ec01 5136static rtx
ef4bddc2 5137expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4682ae04 5138 rtx subtarget, optab op_optab)
28f4ec01
BS
5139{
5140 rtx op0;
5039610b
SL
5141
5142 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5143 return NULL_RTX;
28f4ec01
BS
5144
5145 /* Compute the argument. */
4359dc2a
JJ
5146 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5147 (subtarget
5148 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5149 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
49452c07 5150 VOIDmode, EXPAND_NORMAL);
2928cd7a 5151 /* Compute op, into TARGET if possible.
28f4ec01 5152 Set TARGET to wherever the result comes back. */
5039610b 5153 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
146aef0b 5154 op_optab, op0, target, op_optab != clrsb_optab);
298e6adc 5155 gcc_assert (target);
5906d013 5156
6c537d03 5157 return convert_to_mode (target_mode, target, 0);
28f4ec01 5158}
994a57cd 5159
b8698a0f 5160/* Expand a call to __builtin_expect. We just return our argument
ef950eba
JH
5161 as the builtin_expect semantic should've been already executed by
5162 tree branch prediction pass. */
994a57cd
RH
5163
5164static rtx
5039610b 5165expand_builtin_expect (tree exp, rtx target)
994a57cd 5166{
451409e4 5167 tree arg;
994a57cd 5168
5039610b 5169 if (call_expr_nargs (exp) < 2)
994a57cd 5170 return const0_rtx;
5039610b 5171 arg = CALL_EXPR_ARG (exp, 0);
994a57cd 5172
5039610b 5173 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
ef950eba 5174 /* When guessing was done, the hints should be already stripped away. */
1d8381f1 5175 gcc_assert (!flag_guess_branch_prob
1da2ed5f 5176 || optimize == 0 || seen_error ());
994a57cd
RH
5177 return target;
5178}
5f2d6cfa 5179
45d439ac
JJ
5180/* Expand a call to __builtin_assume_aligned. We just return our first
5181 argument as the builtin_assume_aligned semantic should've been already
5182 executed by CCP. */
5183
5184static rtx
5185expand_builtin_assume_aligned (tree exp, rtx target)
5186{
5187 if (call_expr_nargs (exp) < 2)
5188 return const0_rtx;
5189 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5190 EXPAND_NORMAL);
5191 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5192 && (call_expr_nargs (exp) < 3
5193 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5194 return target;
5195}
5196
1e188d1e 5197void
4682ae04 5198expand_builtin_trap (void)
9602f5a0 5199{
eb6f47fb 5200 if (targetm.have_trap ())
206604dc 5201 {
eb6f47fb 5202 rtx_insn *insn = emit_insn (targetm.gen_trap ());
206604dc
JJ
5203 /* For trap insns when not accumulating outgoing args force
5204 REG_ARGS_SIZE note to prevent crossjumping of calls with
5205 different args sizes. */
5206 if (!ACCUMULATE_OUTGOING_ARGS)
68184180 5207 add_args_size_note (insn, stack_pointer_delta);
206604dc 5208 }
9602f5a0 5209 else
ee516de9
EB
5210 {
5211 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5212 tree call_expr = build_call_expr (fn, 0);
5213 expand_call (call_expr, NULL_RTX, false);
5214 }
5215
9602f5a0
RH
5216 emit_barrier ();
5217}
075ec276 5218
468059bc
DD
5219/* Expand a call to __builtin_unreachable. We do nothing except emit
5220 a barrier saying that control flow will not pass here.
5221
5222 It is the responsibility of the program being compiled to ensure
5223 that control flow does never reach __builtin_unreachable. */
5224static void
5225expand_builtin_unreachable (void)
5226{
5227 emit_barrier ();
5228}
5229
5039610b
SL
5230/* Expand EXP, a call to fabs, fabsf or fabsl.
5231 Return NULL_RTX if a normal call should be emitted rather than expanding
075ec276
RS
5232 the function inline. If convenient, the result should be placed
5233 in TARGET. SUBTARGET may be used as the target for computing
5234 the operand. */
5235
5236static rtx
5039610b 5237expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
075ec276 5238{
ef4bddc2 5239 machine_mode mode;
075ec276
RS
5240 tree arg;
5241 rtx op0;
5242
5039610b
SL
5243 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5244 return NULL_RTX;
075ec276 5245
5039610b 5246 arg = CALL_EXPR_ARG (exp, 0);
4cd8e76f 5247 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
075ec276 5248 mode = TYPE_MODE (TREE_TYPE (arg));
49452c07 5249 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
075ec276
RS
5250 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5251}
5252
5039610b 5253/* Expand EXP, a call to copysign, copysignf, or copysignl.
046625fa
RH
5254 Return NULL is a normal call should be emitted rather than expanding the
5255 function inline. If convenient, the result should be placed in TARGET.
5256 SUBTARGET may be used as the target for computing the operand. */
5257
5258static rtx
5039610b 5259expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
046625fa
RH
5260{
5261 rtx op0, op1;
5262 tree arg;
5263
5039610b
SL
5264 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5265 return NULL_RTX;
046625fa 5266
5039610b 5267 arg = CALL_EXPR_ARG (exp, 0);
84217346 5268 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
046625fa 5269
5039610b 5270 arg = CALL_EXPR_ARG (exp, 1);
84217346 5271 op1 = expand_normal (arg);
046625fa
RH
5272
5273 return expand_copysign (op0, op1, target);
5274}
5275
677feb77
DD
5276/* Expand a call to __builtin___clear_cache. */
5277
5278static rtx
f2cf13bd 5279expand_builtin___clear_cache (tree exp)
677feb77 5280{
f2cf13bd
RS
5281 if (!targetm.code_for_clear_cache)
5282 {
677feb77 5283#ifdef CLEAR_INSN_CACHE
f2cf13bd
RS
5284 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5285 does something. Just do the default expansion to a call to
5286 __clear_cache(). */
5287 return NULL_RTX;
677feb77 5288#else
f2cf13bd
RS
5289 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5290 does nothing. There is no need to call it. Do nothing. */
5291 return const0_rtx;
677feb77 5292#endif /* CLEAR_INSN_CACHE */
f2cf13bd
RS
5293 }
5294
677feb77
DD
5295 /* We have a "clear_cache" insn, and it will handle everything. */
5296 tree begin, end;
5297 rtx begin_rtx, end_rtx;
677feb77
DD
5298
5299 /* We must not expand to a library call. If we did, any
5300 fallback library function in libgcc that might contain a call to
5301 __builtin___clear_cache() would recurse infinitely. */
5302 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5303 {
5304 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5305 return const0_rtx;
5306 }
5307
f2cf13bd 5308 if (targetm.have_clear_cache ())
677feb77 5309 {
a5c7d693 5310 struct expand_operand ops[2];
677feb77
DD
5311
5312 begin = CALL_EXPR_ARG (exp, 0);
5313 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77
DD
5314
5315 end = CALL_EXPR_ARG (exp, 1);
5316 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77 5317
a5c7d693
RS
5318 create_address_operand (&ops[0], begin_rtx);
5319 create_address_operand (&ops[1], end_rtx);
f2cf13bd 5320 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
a5c7d693 5321 return const0_rtx;
677feb77
DD
5322 }
5323 return const0_rtx;
677feb77
DD
5324}
5325
6de9cd9a
DN
5326/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5327
5328static rtx
5329round_trampoline_addr (rtx tramp)
5330{
5331 rtx temp, addend, mask;
5332
5333 /* If we don't need too much alignment, we'll have been guaranteed
5334 proper alignment by get_trampoline_type. */
5335 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5336 return tramp;
5337
5338 /* Round address up to desired boundary. */
5339 temp = gen_reg_rtx (Pmode);
2f1cd2eb
RS
5340 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5341 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6de9cd9a
DN
5342
5343 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5344 temp, 0, OPTAB_LIB_WIDEN);
5345 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5346 temp, 0, OPTAB_LIB_WIDEN);
5347
5348 return tramp;
5349}
5350
5351static rtx
183dd130 5352expand_builtin_init_trampoline (tree exp, bool onstack)
6de9cd9a
DN
5353{
5354 tree t_tramp, t_func, t_chain;
531ca746 5355 rtx m_tramp, r_tramp, r_chain, tmp;
6de9cd9a 5356
5039610b 5357 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6de9cd9a
DN
5358 POINTER_TYPE, VOID_TYPE))
5359 return NULL_RTX;
5360
5039610b
SL
5361 t_tramp = CALL_EXPR_ARG (exp, 0);
5362 t_func = CALL_EXPR_ARG (exp, 1);
5363 t_chain = CALL_EXPR_ARG (exp, 2);
6de9cd9a 5364
84217346 5365 r_tramp = expand_normal (t_tramp);
531ca746
RH
5366 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5367 MEM_NOTRAP_P (m_tramp) = 1;
5368
183dd130
ILT
5369 /* If ONSTACK, the TRAMP argument should be the address of a field
5370 within the local function's FRAME decl. Either way, let's see if
5371 we can fill in the MEM_ATTRs for this memory. */
531ca746 5372 if (TREE_CODE (t_tramp) == ADDR_EXPR)
ad2e5b71 5373 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
531ca746 5374
183dd130
ILT
5375 /* Creator of a heap trampoline is responsible for making sure the
5376 address is aligned to at least STACK_BOUNDARY. Normally malloc
5377 will ensure this anyhow. */
531ca746
RH
5378 tmp = round_trampoline_addr (r_tramp);
5379 if (tmp != r_tramp)
5380 {
5381 m_tramp = change_address (m_tramp, BLKmode, tmp);
5382 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
f5541398 5383 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
531ca746
RH
5384 }
5385
5386 /* The FUNC argument should be the address of the nested function.
5387 Extract the actual function decl to pass to the hook. */
5388 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5389 t_func = TREE_OPERAND (t_func, 0);
5390 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5391
84217346 5392 r_chain = expand_normal (t_chain);
6de9cd9a
DN
5393
5394 /* Generate insns to initialize the trampoline. */
531ca746 5395 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6de9cd9a 5396
183dd130
ILT
5397 if (onstack)
5398 {
5399 trampolines_created = 1;
8ffadef9 5400
4c640e26
EB
5401 if (targetm.calls.custom_function_descriptors != 0)
5402 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5403 "trampoline generated for nested function %qD", t_func);
183dd130 5404 }
8ffadef9 5405
6de9cd9a
DN
5406 return const0_rtx;
5407}
5408
5409static rtx
5039610b 5410expand_builtin_adjust_trampoline (tree exp)
6de9cd9a
DN
5411{
5412 rtx tramp;
5413
5039610b 5414 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
5415 return NULL_RTX;
5416
5039610b 5417 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6de9cd9a 5418 tramp = round_trampoline_addr (tramp);
531ca746
RH
5419 if (targetm.calls.trampoline_adjust_address)
5420 tramp = targetm.calls.trampoline_adjust_address (tramp);
6de9cd9a
DN
5421
5422 return tramp;
5423}
5424
4c640e26
EB
5425/* Expand a call to the builtin descriptor initialization routine.
5426 A descriptor is made up of a couple of pointers to the static
5427 chain and the code entry in this order. */
5428
5429static rtx
5430expand_builtin_init_descriptor (tree exp)
5431{
5432 tree t_descr, t_func, t_chain;
5433 rtx m_descr, r_descr, r_func, r_chain;
5434
5435 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5436 VOID_TYPE))
5437 return NULL_RTX;
5438
5439 t_descr = CALL_EXPR_ARG (exp, 0);
5440 t_func = CALL_EXPR_ARG (exp, 1);
5441 t_chain = CALL_EXPR_ARG (exp, 2);
5442
5443 r_descr = expand_normal (t_descr);
5444 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5445 MEM_NOTRAP_P (m_descr) = 1;
5446
5447 r_func = expand_normal (t_func);
5448 r_chain = expand_normal (t_chain);
5449
5450 /* Generate insns to initialize the descriptor. */
5451 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5452 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5453 POINTER_SIZE / BITS_PER_UNIT), r_func);
5454
5455 return const0_rtx;
5456}
5457
5458/* Expand a call to the builtin descriptor adjustment routine. */
5459
5460static rtx
5461expand_builtin_adjust_descriptor (tree exp)
5462{
5463 rtx tramp;
5464
5465 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5466 return NULL_RTX;
5467
5468 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5469
5470 /* Unalign the descriptor to allow runtime identification. */
5471 tramp = plus_constant (ptr_mode, tramp,
5472 targetm.calls.custom_function_descriptors);
5473
5474 return force_operand (tramp, NULL_RTX);
5475}
5476
0f67fa83
WG
5477/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5478 function. The function first checks whether the back end provides
5479 an insn to implement signbit for the respective mode. If not, it
5480 checks whether the floating point format of the value is such that
61717a45
FXC
5481 the sign bit can be extracted. If that is not the case, error out.
5482 EXP is the expression that is a call to the builtin function; if
5483 convenient, the result should be placed in TARGET. */
ef79730c
RS
5484static rtx
5485expand_builtin_signbit (tree exp, rtx target)
5486{
5487 const struct real_format *fmt;
b5f2d801 5488 scalar_float_mode fmode;
095a2d76 5489 scalar_int_mode rmode, imode;
5039610b 5490 tree arg;
e4fbead1 5491 int word, bitpos;
d0c9d431 5492 enum insn_code icode;
ef79730c 5493 rtx temp;
db3927fb 5494 location_t loc = EXPR_LOCATION (exp);
ef79730c 5495
5039610b
SL
5496 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5497 return NULL_RTX;
ef79730c 5498
5039610b 5499 arg = CALL_EXPR_ARG (exp, 0);
b5f2d801 5500 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
7a504f33 5501 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
ef79730c
RS
5502 fmt = REAL_MODE_FORMAT (fmode);
5503
0f67fa83
WG
5504 arg = builtin_save_expr (arg);
5505
5506 /* Expand the argument yielding a RTX expression. */
5507 temp = expand_normal (arg);
5508
5509 /* Check if the back end provides an insn that handles signbit for the
5510 argument's mode. */
947131ba 5511 icode = optab_handler (signbit_optab, fmode);
d0c9d431 5512 if (icode != CODE_FOR_nothing)
0f67fa83 5513 {
58f4cf2a 5514 rtx_insn *last = get_last_insn ();
0f67fa83 5515 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8a0b1aa4
MM
5516 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5517 return target;
5518 delete_insns_since (last);
0f67fa83
WG
5519 }
5520
ef79730c
RS
5521 /* For floating point formats without a sign bit, implement signbit
5522 as "ARG < 0.0". */
b87a0206 5523 bitpos = fmt->signbit_ro;
e4fbead1 5524 if (bitpos < 0)
ef79730c
RS
5525 {
5526 /* But we can't do this if the format supports signed zero. */
61717a45 5527 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
ef79730c 5528
db3927fb 5529 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
987b67bc 5530 build_real (TREE_TYPE (arg), dconst0));
ef79730c
RS
5531 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5532 }
5533
e4fbead1 5534 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
ef79730c 5535 {
304b9962 5536 imode = int_mode_for_mode (fmode).require ();
e4fbead1 5537 temp = gen_lowpart (imode, temp);
254878ea
RS
5538 }
5539 else
5540 {
e4fbead1
RS
5541 imode = word_mode;
5542 /* Handle targets with different FP word orders. */
5543 if (FLOAT_WORDS_BIG_ENDIAN)
c22cacf3 5544 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
e4fbead1 5545 else
c22cacf3 5546 word = bitpos / BITS_PER_WORD;
e4fbead1
RS
5547 temp = operand_subword_force (temp, word, fmode);
5548 bitpos = bitpos % BITS_PER_WORD;
5549 }
5550
210e1852
RS
5551 /* Force the intermediate word_mode (or narrower) result into a
5552 register. This avoids attempting to create paradoxical SUBREGs
5553 of floating point modes below. */
5554 temp = force_reg (imode, temp);
5555
e4fbead1
RS
5556 /* If the bitpos is within the "result mode" lowpart, the operation
5557 can be implement with a single bitwise AND. Otherwise, we need
5558 a right shift and an AND. */
5559
5560 if (bitpos < GET_MODE_BITSIZE (rmode))
5561 {
807e902e 5562 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
ef79730c 5563
515e442a 5564 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
e4fbead1 5565 temp = gen_lowpart (rmode, temp);
254878ea 5566 temp = expand_binop (rmode, and_optab, temp,
807e902e 5567 immed_wide_int_const (mask, rmode),
e4fbead1 5568 NULL_RTX, 1, OPTAB_LIB_WIDEN);
ef79730c 5569 }
e4fbead1
RS
5570 else
5571 {
5572 /* Perform a logical right shift to place the signbit in the least
c22cacf3 5573 significant bit, then truncate the result to the desired mode
e4fbead1 5574 and mask just this bit. */
eb6c3df1 5575 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
e4fbead1
RS
5576 temp = gen_lowpart (rmode, temp);
5577 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5578 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5579 }
5580
ef79730c
RS
5581 return temp;
5582}
d1c38823
ZD
5583
5584/* Expand fork or exec calls. TARGET is the desired target of the
5039610b 5585 call. EXP is the call. FN is the
d1c38823
ZD
5586 identificator of the actual function. IGNORE is nonzero if the
5587 value is to be ignored. */
5588
5589static rtx
5039610b 5590expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
d1c38823
ZD
5591{
5592 tree id, decl;
5593 tree call;
5594
5595 /* If we are not profiling, just call the function. */
5596 if (!profile_arc_flag)
5597 return NULL_RTX;
5598
5599 /* Otherwise call the wrapper. This should be equivalent for the rest of
5600 compiler, so the code does not diverge, and the wrapper may run the
2b8a92de 5601 code necessary for keeping the profiling sane. */
d1c38823
ZD
5602
5603 switch (DECL_FUNCTION_CODE (fn))
5604 {
5605 case BUILT_IN_FORK:
5606 id = get_identifier ("__gcov_fork");
5607 break;
5608
5609 case BUILT_IN_EXECL:
5610 id = get_identifier ("__gcov_execl");
5611 break;
5612
5613 case BUILT_IN_EXECV:
5614 id = get_identifier ("__gcov_execv");
5615 break;
5616
5617 case BUILT_IN_EXECLP:
5618 id = get_identifier ("__gcov_execlp");
5619 break;
5620
5621 case BUILT_IN_EXECLE:
5622 id = get_identifier ("__gcov_execle");
5623 break;
5624
5625 case BUILT_IN_EXECVP:
5626 id = get_identifier ("__gcov_execvp");
5627 break;
5628
5629 case BUILT_IN_EXECVE:
5630 id = get_identifier ("__gcov_execve");
5631 break;
5632
5633 default:
298e6adc 5634 gcc_unreachable ();
d1c38823
ZD
5635 }
5636
c2255bc4
AH
5637 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5638 FUNCTION_DECL, id, TREE_TYPE (fn));
d1c38823
ZD
5639 DECL_EXTERNAL (decl) = 1;
5640 TREE_PUBLIC (decl) = 1;
5641 DECL_ARTIFICIAL (decl) = 1;
5642 TREE_NOTHROW (decl) = 1;
ac382b62
JM
5643 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5644 DECL_VISIBILITY_SPECIFIED (decl) = 1;
db3927fb 5645 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
d1c38823 5646 return expand_call (call, target, ignore);
5039610b 5647 }
b8698a0f 5648
48ae6c13
RH
5649
5650\f
02ee605c
RH
5651/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5652 the pointer in these functions is void*, the tree optimizers may remove
5653 casts. The mode computed in expand_builtin isn't reliable either, due
5654 to __sync_bool_compare_and_swap.
5655
5656 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5657 group of builtins. This gives us log2 of the mode size. */
5658
ef4bddc2 5659static inline machine_mode
02ee605c
RH
5660get_builtin_sync_mode (int fcode_diff)
5661{
2de0aa52
HPN
5662 /* The size is not negotiable, so ask not to get BLKmode in return
5663 if the target indicates that a smaller size would be better. */
f4b31647 5664 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
02ee605c
RH
5665}
5666
1387fef3
AS
5667/* Expand the memory expression LOC and return the appropriate memory operand
5668 for the builtin_sync operations. */
5669
5670static rtx
ef4bddc2 5671get_builtin_sync_mem (tree loc, machine_mode mode)
1387fef3
AS
5672{
5673 rtx addr, mem;
5674
f46835f5
JJ
5675 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5676 addr = convert_memory_address (Pmode, addr);
1387fef3
AS
5677
5678 /* Note that we explicitly do not want any alias information for this
5679 memory, so that we kill all other live memories. Otherwise we don't
5680 satisfy the full barrier semantics of the intrinsic. */
5681 mem = validize_mem (gen_rtx_MEM (mode, addr));
5682
1be38ccb
RG
5683 /* The alignment needs to be at least according to that of the mode. */
5684 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
0eb77834 5685 get_pointer_alignment (loc)));
9cd9e512 5686 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
1387fef3
AS
5687 MEM_VOLATILE_P (mem) = 1;
5688
5689 return mem;
5690}
5691
86951993
AM
5692/* Make sure an argument is in the right mode.
5693 EXP is the tree argument.
5694 MODE is the mode it should be in. */
5695
5696static rtx
ef4bddc2 5697expand_expr_force_mode (tree exp, machine_mode mode)
86951993
AM
5698{
5699 rtx val;
ef4bddc2 5700 machine_mode old_mode;
86951993
AM
5701
5702 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5703 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5704 of CONST_INTs, where we know the old_mode only from the call argument. */
5705
5706 old_mode = GET_MODE (val);
5707 if (old_mode == VOIDmode)
5708 old_mode = TYPE_MODE (TREE_TYPE (exp));
5709 val = convert_modes (mode, old_mode, val, 1);
5710 return val;
5711}
5712
5713
48ae6c13 5714/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5039610b 5715 EXP is the CALL_EXPR. CODE is the rtx code
48ae6c13
RH
5716 that corresponds to the arithmetic or logical operation from the name;
5717 an exception here is that NOT actually means NAND. TARGET is an optional
5718 place for us to store the results; AFTER is true if this is the
86951993 5719 fetch_and_xxx form. */
48ae6c13
RH
5720
5721static rtx
ef4bddc2 5722expand_builtin_sync_operation (machine_mode mode, tree exp,
02ee605c 5723 enum rtx_code code, bool after,
86951993 5724 rtx target)
48ae6c13 5725{
1387fef3 5726 rtx val, mem;
c2255bc4 5727 location_t loc = EXPR_LOCATION (exp);
48ae6c13 5728
23462d4d
UB
5729 if (code == NOT && warn_sync_nand)
5730 {
5731 tree fndecl = get_callee_fndecl (exp);
5732 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5733
5734 static bool warned_f_a_n, warned_n_a_f;
5735
5736 switch (fcode)
5737 {
e0a8ecf2
AM
5738 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5739 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5740 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5741 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5742 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
23462d4d
UB
5743 if (warned_f_a_n)
5744 break;
5745
e79983f4 5746 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
c2255bc4 5747 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
5748 warned_f_a_n = true;
5749 break;
5750
e0a8ecf2
AM
5751 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5752 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5753 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5754 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5755 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
23462d4d
UB
5756 if (warned_n_a_f)
5757 break;
5758
e79983f4 5759 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
c2255bc4 5760 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
5761 warned_n_a_f = true;
5762 break;
5763
5764 default:
5765 gcc_unreachable ();
5766 }
5767 }
5768
48ae6c13 5769 /* Expand the operands. */
5039610b 5770 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993 5771 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
48ae6c13 5772
46b35980 5773 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
86951993 5774 after);
48ae6c13
RH
5775}
5776
5777/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5039610b 5778 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
48ae6c13
RH
5779 true if this is the boolean form. TARGET is a place for us to store the
5780 results; this is NOT optional if IS_BOOL is true. */
5781
5782static rtx
ef4bddc2 5783expand_builtin_compare_and_swap (machine_mode mode, tree exp,
02ee605c 5784 bool is_bool, rtx target)
48ae6c13 5785{
1387fef3 5786 rtx old_val, new_val, mem;
f0409b19 5787 rtx *pbool, *poval;
48ae6c13
RH
5788
5789 /* Expand the operands. */
5039610b 5790 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
5791 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5792 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
48ae6c13 5793
f0409b19
RH
5794 pbool = poval = NULL;
5795 if (target != const0_rtx)
5796 {
5797 if (is_bool)
5798 pbool = &target;
5799 else
5800 poval = &target;
5801 }
5802 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
46b35980
AM
5803 false, MEMMODEL_SYNC_SEQ_CST,
5804 MEMMODEL_SYNC_SEQ_CST))
86951993 5805 return NULL_RTX;
5039610b 5806
86951993 5807 return target;
48ae6c13
RH
5808}
5809
5810/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5811 general form is actually an atomic exchange, and some targets only
5812 support a reduced form with the second argument being a constant 1.
b8698a0f 5813 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5039610b 5814 the results. */
48ae6c13
RH
5815
5816static rtx
ef4bddc2 5817expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
86951993 5818 rtx target)
48ae6c13 5819{
1387fef3 5820 rtx val, mem;
48ae6c13
RH
5821
5822 /* Expand the operands. */
5039610b 5823 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
5824 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5825
744accb2 5826 return expand_sync_lock_test_and_set (target, mem, val);
86951993
AM
5827}
5828
5829/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5830
5831static void
ef4bddc2 5832expand_builtin_sync_lock_release (machine_mode mode, tree exp)
86951993
AM
5833{
5834 rtx mem;
5835
5836 /* Expand the operands. */
5837 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5838
46b35980 5839 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
86951993
AM
5840}
5841
5842/* Given an integer representing an ``enum memmodel'', verify its
5843 correctness and return the memory model enum. */
5844
5845static enum memmodel
5846get_memmodel (tree exp)
5847{
5848 rtx op;
5dcfdccd 5849 unsigned HOST_WIDE_INT val;
8d9fdb49
MP
5850 source_location loc
5851 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
5852
5853 /* If the parameter is not a constant, it's a run time value so we'll just
5854 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5855 if (TREE_CODE (exp) != INTEGER_CST)
5856 return MEMMODEL_SEQ_CST;
5857
5858 op = expand_normal (exp);
5dcfdccd
KY
5859
5860 val = INTVAL (op);
5861 if (targetm.memmodel_check)
5862 val = targetm.memmodel_check (val);
5863 else if (val & ~MEMMODEL_MASK)
5864 {
8d9fdb49
MP
5865 warning_at (loc, OPT_Winvalid_memory_model,
5866 "unknown architecture specifier in memory model to builtin");
5dcfdccd
KY
5867 return MEMMODEL_SEQ_CST;
5868 }
5869
46b35980
AM
5870 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5871 if (memmodel_base (val) >= MEMMODEL_LAST)
86951993 5872 {
8d9fdb49
MP
5873 warning_at (loc, OPT_Winvalid_memory_model,
5874 "invalid memory model argument to builtin");
86951993
AM
5875 return MEMMODEL_SEQ_CST;
5876 }
5dcfdccd 5877
8673b671
AM
5878 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5879 be conservative and promote consume to acquire. */
5880 if (val == MEMMODEL_CONSUME)
5881 val = MEMMODEL_ACQUIRE;
5882
5dcfdccd 5883 return (enum memmodel) val;
86951993
AM
5884}
5885
5886/* Expand the __atomic_exchange intrinsic:
5887 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5888 EXP is the CALL_EXPR.
5889 TARGET is an optional place for us to store the results. */
5890
5891static rtx
ef4bddc2 5892expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
86951993
AM
5893{
5894 rtx val, mem;
5895 enum memmodel model;
5896
5897 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
86951993
AM
5898
5899 if (!flag_inline_atomics)
5900 return NULL_RTX;
5901
5902 /* Expand the operands. */
5903 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5904 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5905
744accb2 5906 return expand_atomic_exchange (target, mem, val, model);
86951993
AM
5907}
5908
5909/* Expand the __atomic_compare_exchange intrinsic:
5910 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5911 TYPE desired, BOOL weak,
5912 enum memmodel success,
5913 enum memmodel failure)
5914 EXP is the CALL_EXPR.
5915 TARGET is an optional place for us to store the results. */
5916
5917static rtx
ef4bddc2 5918expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
86951993
AM
5919 rtx target)
5920{
58f4cf2a
DM
5921 rtx expect, desired, mem, oldval;
5922 rtx_code_label *label;
86951993
AM
5923 enum memmodel success, failure;
5924 tree weak;
5925 bool is_weak;
8d9fdb49
MP
5926 source_location loc
5927 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
5928
5929 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5930 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5931
77df5327
AM
5932 if (failure > success)
5933 {
8d9fdb49
MP
5934 warning_at (loc, OPT_Winvalid_memory_model,
5935 "failure memory model cannot be stronger than success "
5936 "memory model for %<__atomic_compare_exchange%>");
77df5327
AM
5937 success = MEMMODEL_SEQ_CST;
5938 }
5939
46b35980 5940 if (is_mm_release (failure) || is_mm_acq_rel (failure))
86951993 5941 {
8d9fdb49
MP
5942 warning_at (loc, OPT_Winvalid_memory_model,
5943 "invalid failure memory model for "
5944 "%<__atomic_compare_exchange%>");
77df5327
AM
5945 failure = MEMMODEL_SEQ_CST;
5946 success = MEMMODEL_SEQ_CST;
86951993
AM
5947 }
5948
77df5327 5949
86951993
AM
5950 if (!flag_inline_atomics)
5951 return NULL_RTX;
5952
5953 /* Expand the operands. */
5954 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5955
5956 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5957 expect = convert_memory_address (Pmode, expect);
215770ad 5958 expect = gen_rtx_MEM (mode, expect);
86951993
AM
5959 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5960
5961 weak = CALL_EXPR_ARG (exp, 3);
5962 is_weak = false;
9439e9a1 5963 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
86951993
AM
5964 is_weak = true;
5965
672ce939
RH
5966 if (target == const0_rtx)
5967 target = NULL;
672ce939 5968
2fdc29e8
RH
5969 /* Lest the rtl backend create a race condition with an imporoper store
5970 to memory, always create a new pseudo for OLDVAL. */
5971 oldval = NULL;
5972
5973 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
f0409b19 5974 is_weak, success, failure))
86951993
AM
5975 return NULL_RTX;
5976
672ce939
RH
5977 /* Conditionally store back to EXPECT, lest we create a race condition
5978 with an improper store to memory. */
5979 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5980 the normal case where EXPECT is totally private, i.e. a register. At
5981 which point the store can be unconditional. */
5982 label = gen_label_rtx ();
f8940d4a
JG
5983 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5984 GET_MODE (target), 1, label);
672ce939
RH
5985 emit_move_insn (expect, oldval);
5986 emit_label (label);
215770ad 5987
86951993
AM
5988 return target;
5989}
5990
849a76a5
JJ
5991/* Helper function for expand_ifn_atomic_compare_exchange - expand
5992 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5993 call. The weak parameter must be dropped to match the expected parameter
5994 list and the expected argument changed from value to pointer to memory
5995 slot. */
5996
5997static void
5998expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5999{
6000 unsigned int z;
6001 vec<tree, va_gc> *vec;
6002
6003 vec_alloc (vec, 5);
6004 vec->quick_push (gimple_call_arg (call, 0));
6005 tree expected = gimple_call_arg (call, 1);
6006 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6007 TREE_TYPE (expected));
6008 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6009 if (expd != x)
6010 emit_move_insn (x, expd);
6011 tree v = make_tree (TREE_TYPE (expected), x);
6012 vec->quick_push (build1 (ADDR_EXPR,
6013 build_pointer_type (TREE_TYPE (expected)), v));
6014 vec->quick_push (gimple_call_arg (call, 2));
6015 /* Skip the boolean weak parameter. */
6016 for (z = 4; z < 6; z++)
6017 vec->quick_push (gimple_call_arg (call, z));
4871e1ed 6018 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
cf098191 6019 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
4871e1ed 6020 gcc_assert (bytes_log2 < 5);
849a76a5
JJ
6021 built_in_function fncode
6022 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
4871e1ed 6023 + bytes_log2);
849a76a5
JJ
6024 tree fndecl = builtin_decl_explicit (fncode);
6025 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6026 fndecl);
6027 tree exp = build_call_vec (boolean_type_node, fn, vec);
6028 tree lhs = gimple_call_lhs (call);
6029 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6030 if (lhs)
6031 {
6032 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6033 if (GET_MODE (boolret) != mode)
6034 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6035 x = force_reg (mode, x);
6036 write_complex_part (target, boolret, true);
6037 write_complex_part (target, x, false);
6038 }
6039}
6040
6041/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6042
6043void
6044expand_ifn_atomic_compare_exchange (gcall *call)
6045{
6046 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6047 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
f4b31647 6048 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
849a76a5
JJ
6049 rtx expect, desired, mem, oldval, boolret;
6050 enum memmodel success, failure;
6051 tree lhs;
6052 bool is_weak;
6053 source_location loc
6054 = expansion_point_location_if_in_system_header (gimple_location (call));
6055
6056 success = get_memmodel (gimple_call_arg (call, 4));
6057 failure = get_memmodel (gimple_call_arg (call, 5));
6058
6059 if (failure > success)
6060 {
6061 warning_at (loc, OPT_Winvalid_memory_model,
6062 "failure memory model cannot be stronger than success "
6063 "memory model for %<__atomic_compare_exchange%>");
6064 success = MEMMODEL_SEQ_CST;
6065 }
6066
6067 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6068 {
6069 warning_at (loc, OPT_Winvalid_memory_model,
6070 "invalid failure memory model for "
6071 "%<__atomic_compare_exchange%>");
6072 failure = MEMMODEL_SEQ_CST;
6073 success = MEMMODEL_SEQ_CST;
6074 }
6075
6076 if (!flag_inline_atomics)
6077 {
6078 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6079 return;
6080 }
6081
6082 /* Expand the operands. */
6083 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6084
6085 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6086 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6087
6088 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6089
6090 boolret = NULL;
6091 oldval = NULL;
6092
6093 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6094 is_weak, success, failure))
6095 {
6096 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6097 return;
6098 }
6099
6100 lhs = gimple_call_lhs (call);
6101 if (lhs)
6102 {
6103 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6104 if (GET_MODE (boolret) != mode)
6105 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6106 write_complex_part (target, boolret, true);
6107 write_complex_part (target, oldval, false);
6108 }
6109}
6110
86951993
AM
6111/* Expand the __atomic_load intrinsic:
6112 TYPE __atomic_load (TYPE *object, enum memmodel)
6113 EXP is the CALL_EXPR.
6114 TARGET is an optional place for us to store the results. */
6115
6116static rtx
ef4bddc2 6117expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
86951993
AM
6118{
6119 rtx mem;
6120 enum memmodel model;
6121
6122 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
46b35980 6123 if (is_mm_release (model) || is_mm_acq_rel (model))
86951993 6124 {
8d9fdb49
MP
6125 source_location loc
6126 = expansion_point_location_if_in_system_header (input_location);
6127 warning_at (loc, OPT_Winvalid_memory_model,
6128 "invalid memory model for %<__atomic_load%>");
77df5327 6129 model = MEMMODEL_SEQ_CST;
86951993
AM
6130 }
6131
6132 if (!flag_inline_atomics)
6133 return NULL_RTX;
6134
6135 /* Expand the operand. */
6136 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6137
6138 return expand_atomic_load (target, mem, model);
6139}
6140
6141
6142/* Expand the __atomic_store intrinsic:
6143 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6144 EXP is the CALL_EXPR.
6145 TARGET is an optional place for us to store the results. */
6146
6147static rtx
ef4bddc2 6148expand_builtin_atomic_store (machine_mode mode, tree exp)
86951993
AM
6149{
6150 rtx mem, val;
6151 enum memmodel model;
6152
6153 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
46b35980
AM
6154 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6155 || is_mm_release (model)))
86951993 6156 {
8d9fdb49
MP
6157 source_location loc
6158 = expansion_point_location_if_in_system_header (input_location);
6159 warning_at (loc, OPT_Winvalid_memory_model,
6160 "invalid memory model for %<__atomic_store%>");
77df5327 6161 model = MEMMODEL_SEQ_CST;
86951993
AM
6162 }
6163
6164 if (!flag_inline_atomics)
6165 return NULL_RTX;
6166
6167 /* Expand the operands. */
6168 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6169 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6170
0669295b 6171 return expand_atomic_store (mem, val, model, false);
86951993
AM
6172}
6173
6174/* Expand the __atomic_fetch_XXX intrinsic:
6175 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6176 EXP is the CALL_EXPR.
6177 TARGET is an optional place for us to store the results.
6178 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6179 FETCH_AFTER is true if returning the result of the operation.
6180 FETCH_AFTER is false if returning the value before the operation.
6181 IGNORE is true if the result is not used.
6182 EXT_CALL is the correct builtin for an external call if this cannot be
6183 resolved to an instruction sequence. */
6184
6185static rtx
ef4bddc2 6186expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
86951993
AM
6187 enum rtx_code code, bool fetch_after,
6188 bool ignore, enum built_in_function ext_call)
6189{
6190 rtx val, mem, ret;
6191 enum memmodel model;
6192 tree fndecl;
6193 tree addr;
6194
6195 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6196
6197 /* Expand the operands. */
6198 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6199 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6200
6201 /* Only try generating instructions if inlining is turned on. */
6202 if (flag_inline_atomics)
6203 {
6204 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6205 if (ret)
6206 return ret;
6207 }
6208
6209 /* Return if a different routine isn't needed for the library call. */
6210 if (ext_call == BUILT_IN_NONE)
6211 return NULL_RTX;
6212
6213 /* Change the call to the specified function. */
6214 fndecl = get_callee_fndecl (exp);
6215 addr = CALL_EXPR_FN (exp);
6216 STRIP_NOPS (addr);
6217
6218 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
c3284718 6219 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
86951993 6220
08c273bb
SB
6221 /* If we will emit code after the call, the call can not be a tail call.
6222 If it is emitted as a tail call, a barrier is emitted after it, and
6223 then all trailing code is removed. */
6224 if (!ignore)
6225 CALL_EXPR_TAILCALL (exp) = 0;
6226
86951993
AM
6227 /* Expand the call here so we can emit trailing code. */
6228 ret = expand_call (exp, target, ignore);
6229
6230 /* Replace the original function just in case it matters. */
6231 TREE_OPERAND (addr, 0) = fndecl;
6232
6233 /* Then issue the arithmetic correction to return the right result. */
6234 if (!ignore)
154b68db
AM
6235 {
6236 if (code == NOT)
6237 {
6238 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6239 OPTAB_LIB_WIDEN);
6240 ret = expand_simple_unop (mode, NOT, ret, target, true);
6241 }
6242 else
6243 ret = expand_simple_binop (mode, code, ret, val, target, true,
6244 OPTAB_LIB_WIDEN);
6245 }
86951993
AM
6246 return ret;
6247}
6248
adedd5c1
JJ
6249/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6250
6251void
6252expand_ifn_atomic_bit_test_and (gcall *call)
6253{
6254 tree ptr = gimple_call_arg (call, 0);
6255 tree bit = gimple_call_arg (call, 1);
6256 tree flag = gimple_call_arg (call, 2);
6257 tree lhs = gimple_call_lhs (call);
6258 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6259 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6260 enum rtx_code code;
6261 optab optab;
6262 struct expand_operand ops[5];
6263
6264 gcc_assert (flag_inline_atomics);
6265
6266 if (gimple_call_num_args (call) == 4)
6267 model = get_memmodel (gimple_call_arg (call, 3));
6268
6269 rtx mem = get_builtin_sync_mem (ptr, mode);
6270 rtx val = expand_expr_force_mode (bit, mode);
6271
6272 switch (gimple_call_internal_fn (call))
6273 {
6274 case IFN_ATOMIC_BIT_TEST_AND_SET:
6275 code = IOR;
6276 optab = atomic_bit_test_and_set_optab;
6277 break;
6278 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6279 code = XOR;
6280 optab = atomic_bit_test_and_complement_optab;
6281 break;
6282 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6283 code = AND;
6284 optab = atomic_bit_test_and_reset_optab;
6285 break;
6286 default:
6287 gcc_unreachable ();
6288 }
6289
6290 if (lhs == NULL_TREE)
6291 {
6292 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6293 val, NULL_RTX, true, OPTAB_DIRECT);
6294 if (code == AND)
6295 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6296 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6297 return;
6298 }
6299
6300 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6301 enum insn_code icode = direct_optab_handler (optab, mode);
6302 gcc_assert (icode != CODE_FOR_nothing);
6303 create_output_operand (&ops[0], target, mode);
6304 create_fixed_operand (&ops[1], mem);
6305 create_convert_operand_to (&ops[2], val, mode, true);
6306 create_integer_operand (&ops[3], model);
6307 create_integer_operand (&ops[4], integer_onep (flag));
6308 if (maybe_expand_insn (icode, 5, ops))
6309 return;
6310
6311 rtx bitval = val;
6312 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6313 val, NULL_RTX, true, OPTAB_DIRECT);
6314 rtx maskval = val;
6315 if (code == AND)
6316 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6317 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6318 code, model, false);
6319 if (integer_onep (flag))
6320 {
6321 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6322 NULL_RTX, true, OPTAB_DIRECT);
6323 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6324 true, OPTAB_DIRECT);
6325 }
6326 else
6327 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6328 OPTAB_DIRECT);
6329 if (result != target)
6330 emit_move_insn (target, result);
6331}
6332
d660c35e
AM
6333/* Expand an atomic clear operation.
6334 void _atomic_clear (BOOL *obj, enum memmodel)
6335 EXP is the call expression. */
6336
6337static rtx
6338expand_builtin_atomic_clear (tree exp)
6339{
ef4bddc2 6340 machine_mode mode;
d660c35e
AM
6341 rtx mem, ret;
6342 enum memmodel model;
6343
f4b31647 6344 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
6345 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6346 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6347
46b35980 6348 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
d660c35e 6349 {
8d9fdb49
MP
6350 source_location loc
6351 = expansion_point_location_if_in_system_header (input_location);
6352 warning_at (loc, OPT_Winvalid_memory_model,
6353 "invalid memory model for %<__atomic_store%>");
77df5327 6354 model = MEMMODEL_SEQ_CST;
d660c35e
AM
6355 }
6356
6357 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6358 Failing that, a store is issued by __atomic_store. The only way this can
6359 fail is if the bool type is larger than a word size. Unlikely, but
6360 handle it anyway for completeness. Assume a single threaded model since
6361 there is no atomic support in this case, and no barriers are required. */
6362 ret = expand_atomic_store (mem, const0_rtx, model, true);
6363 if (!ret)
6364 emit_move_insn (mem, const0_rtx);
6365 return const0_rtx;
6366}
6367
6368/* Expand an atomic test_and_set operation.
6369 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6370 EXP is the call expression. */
6371
6372static rtx
744accb2 6373expand_builtin_atomic_test_and_set (tree exp, rtx target)
d660c35e 6374{
744accb2 6375 rtx mem;
d660c35e 6376 enum memmodel model;
ef4bddc2 6377 machine_mode mode;
d660c35e 6378
f4b31647 6379 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
6380 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6381 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6382
744accb2 6383 return expand_atomic_test_and_set (target, mem, model);
d660c35e
AM
6384}
6385
6386
86951993
AM
6387/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6388 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6389
6390static tree
6391fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6392{
6393 int size;
ef4bddc2 6394 machine_mode mode;
86951993
AM
6395 unsigned int mode_align, type_align;
6396
6397 if (TREE_CODE (arg0) != INTEGER_CST)
6398 return NULL_TREE;
48ae6c13 6399
f4b31647 6400 /* We need a corresponding integer mode for the access to be lock-free. */
86951993 6401 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
f4b31647
RS
6402 if (!int_mode_for_size (size, 0).exists (&mode))
6403 return boolean_false_node;
6404
86951993
AM
6405 mode_align = GET_MODE_ALIGNMENT (mode);
6406
310055e7
JW
6407 if (TREE_CODE (arg1) == INTEGER_CST)
6408 {
6409 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6410
6411 /* Either this argument is null, or it's a fake pointer encoding
6412 the alignment of the object. */
146ec50f 6413 val = least_bit_hwi (val);
310055e7
JW
6414 val *= BITS_PER_UNIT;
6415
6416 if (val == 0 || mode_align < val)
6417 type_align = mode_align;
6418 else
6419 type_align = val;
6420 }
86951993
AM
6421 else
6422 {
6423 tree ttype = TREE_TYPE (arg1);
6424
6425 /* This function is usually invoked and folded immediately by the front
6426 end before anything else has a chance to look at it. The pointer
6427 parameter at this point is usually cast to a void *, so check for that
6428 and look past the cast. */
7d9cf801
JJ
6429 if (CONVERT_EXPR_P (arg1)
6430 && POINTER_TYPE_P (ttype)
6431 && VOID_TYPE_P (TREE_TYPE (ttype))
6432 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
86951993
AM
6433 arg1 = TREE_OPERAND (arg1, 0);
6434
6435 ttype = TREE_TYPE (arg1);
6436 gcc_assert (POINTER_TYPE_P (ttype));
6437
6438 /* Get the underlying type of the object. */
6439 ttype = TREE_TYPE (ttype);
6440 type_align = TYPE_ALIGN (ttype);
6441 }
6442
026c3cfd 6443 /* If the object has smaller alignment, the lock free routines cannot
86951993
AM
6444 be used. */
6445 if (type_align < mode_align)
58d38fd2 6446 return boolean_false_node;
86951993
AM
6447
6448 /* Check if a compare_and_swap pattern exists for the mode which represents
6449 the required size. The pattern is not allowed to fail, so the existence
969a32ce
TR
6450 of the pattern indicates support is present. Also require that an
6451 atomic load exists for the required size. */
6452 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
58d38fd2 6453 return boolean_true_node;
86951993 6454 else
58d38fd2 6455 return boolean_false_node;
86951993
AM
6456}
6457
6458/* Return true if the parameters to call EXP represent an object which will
6459 always generate lock free instructions. The first argument represents the
6460 size of the object, and the second parameter is a pointer to the object
6461 itself. If NULL is passed for the object, then the result is based on
6462 typical alignment for an object of the specified size. Otherwise return
6463 false. */
6464
6465static rtx
6466expand_builtin_atomic_always_lock_free (tree exp)
6467{
6468 tree size;
6469 tree arg0 = CALL_EXPR_ARG (exp, 0);
6470 tree arg1 = CALL_EXPR_ARG (exp, 1);
6471
6472 if (TREE_CODE (arg0) != INTEGER_CST)
6473 {
6474 error ("non-constant argument 1 to __atomic_always_lock_free");
6475 return const0_rtx;
6476 }
6477
6478 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
58d38fd2 6479 if (size == boolean_true_node)
86951993
AM
6480 return const1_rtx;
6481 return const0_rtx;
6482}
6483
6484/* Return a one or zero if it can be determined that object ARG1 of size ARG
6485 is lock free on this architecture. */
6486
6487static tree
6488fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6489{
6490 if (!flag_inline_atomics)
6491 return NULL_TREE;
6492
6493 /* If it isn't always lock free, don't generate a result. */
58d38fd2
JJ
6494 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6495 return boolean_true_node;
86951993
AM
6496
6497 return NULL_TREE;
6498}
6499
6500/* Return true if the parameters to call EXP represent an object which will
6501 always generate lock free instructions. The first argument represents the
6502 size of the object, and the second parameter is a pointer to the object
6503 itself. If NULL is passed for the object, then the result is based on
6504 typical alignment for an object of the specified size. Otherwise return
6505 NULL*/
6506
6507static rtx
6508expand_builtin_atomic_is_lock_free (tree exp)
6509{
6510 tree size;
6511 tree arg0 = CALL_EXPR_ARG (exp, 0);
6512 tree arg1 = CALL_EXPR_ARG (exp, 1);
6513
6514 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6515 {
6516 error ("non-integer argument 1 to __atomic_is_lock_free");
6517 return NULL_RTX;
6518 }
6519
6520 if (!flag_inline_atomics)
6521 return NULL_RTX;
6522
6523 /* If the value is known at compile time, return the RTX for it. */
6524 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
58d38fd2 6525 if (size == boolean_true_node)
86951993
AM
6526 return const1_rtx;
6527
6528 return NULL_RTX;
6529}
6530
86951993
AM
6531/* Expand the __atomic_thread_fence intrinsic:
6532 void __atomic_thread_fence (enum memmodel)
6533 EXP is the CALL_EXPR. */
6534
6535static void
6536expand_builtin_atomic_thread_fence (tree exp)
6537{
c39169c8
RH
6538 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6539 expand_mem_thread_fence (model);
86951993
AM
6540}
6541
6542/* Expand the __atomic_signal_fence intrinsic:
6543 void __atomic_signal_fence (enum memmodel)
6544 EXP is the CALL_EXPR. */
6545
6546static void
6547expand_builtin_atomic_signal_fence (tree exp)
6548{
c39169c8
RH
6549 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6550 expand_mem_signal_fence (model);
48ae6c13
RH
6551}
6552
6553/* Expand the __sync_synchronize intrinsic. */
6554
6555static void
e0a8ecf2 6556expand_builtin_sync_synchronize (void)
48ae6c13 6557{
46b35980 6558 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
48ae6c13
RH
6559}
6560
f959607b
CLT
6561static rtx
6562expand_builtin_thread_pointer (tree exp, rtx target)
6563{
6564 enum insn_code icode;
6565 if (!validate_arglist (exp, VOID_TYPE))
6566 return const0_rtx;
6567 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6568 if (icode != CODE_FOR_nothing)
6569 {
6570 struct expand_operand op;
b8a542c6
AP
6571 /* If the target is not sutitable then create a new target. */
6572 if (target == NULL_RTX
6573 || !REG_P (target)
6574 || GET_MODE (target) != Pmode)
f959607b
CLT
6575 target = gen_reg_rtx (Pmode);
6576 create_output_operand (&op, target, Pmode);
6577 expand_insn (icode, 1, &op);
6578 return target;
6579 }
6580 error ("__builtin_thread_pointer is not supported on this target");
6581 return const0_rtx;
6582}
6583
6584static void
6585expand_builtin_set_thread_pointer (tree exp)
6586{
6587 enum insn_code icode;
6588 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6589 return;
6590 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6591 if (icode != CODE_FOR_nothing)
6592 {
6593 struct expand_operand op;
6594 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6595 Pmode, EXPAND_NORMAL);
5440a1b0 6596 create_input_operand (&op, val, Pmode);
f959607b
CLT
6597 expand_insn (icode, 1, &op);
6598 return;
6599 }
6600 error ("__builtin_set_thread_pointer is not supported on this target");
6601}
6602
28f4ec01 6603\f
862d0b35
DN
6604/* Emit code to restore the current value of stack. */
6605
6606static void
6607expand_stack_restore (tree var)
6608{
58f4cf2a
DM
6609 rtx_insn *prev;
6610 rtx sa = expand_normal (var);
862d0b35
DN
6611
6612 sa = convert_memory_address (Pmode, sa);
6613
6614 prev = get_last_insn ();
6615 emit_stack_restore (SAVE_BLOCK, sa);
d33606c3
EB
6616
6617 record_new_stack_level ();
6618
862d0b35
DN
6619 fixup_args_size_notes (prev, get_last_insn (), 0);
6620}
6621
862d0b35
DN
6622/* Emit code to save the current value of stack. */
6623
6624static rtx
6625expand_stack_save (void)
6626{
6627 rtx ret = NULL_RTX;
6628
862d0b35
DN
6629 emit_stack_save (SAVE_BLOCK, &ret);
6630 return ret;
6631}
6632
1f62d637
TV
6633/* Emit code to get the openacc gang, worker or vector id or size. */
6634
6635static rtx
6636expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6637{
6638 const char *name;
6639 rtx fallback_retval;
6640 rtx_insn *(*gen_fn) (rtx, rtx);
6641 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6642 {
6643 case BUILT_IN_GOACC_PARLEVEL_ID:
6644 name = "__builtin_goacc_parlevel_id";
6645 fallback_retval = const0_rtx;
6646 gen_fn = targetm.gen_oacc_dim_pos;
6647 break;
6648 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6649 name = "__builtin_goacc_parlevel_size";
6650 fallback_retval = const1_rtx;
6651 gen_fn = targetm.gen_oacc_dim_size;
6652 break;
6653 default:
6654 gcc_unreachable ();
6655 }
6656
6657 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6658 {
6659 error ("%qs only supported in OpenACC code", name);
6660 return const0_rtx;
6661 }
6662
6663 tree arg = CALL_EXPR_ARG (exp, 0);
6664 if (TREE_CODE (arg) != INTEGER_CST)
6665 {
6666 error ("non-constant argument 0 to %qs", name);
6667 return const0_rtx;
6668 }
6669
6670 int dim = TREE_INT_CST_LOW (arg);
6671 switch (dim)
6672 {
6673 case GOMP_DIM_GANG:
6674 case GOMP_DIM_WORKER:
6675 case GOMP_DIM_VECTOR:
6676 break;
6677 default:
6678 error ("illegal argument 0 to %qs", name);
6679 return const0_rtx;
6680 }
6681
6682 if (ignore)
6683 return target;
6684
6685 if (!targetm.have_oacc_dim_size ())
6686 {
6687 emit_move_insn (target, fallback_retval);
6688 return target;
6689 }
6690
6691 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6692 emit_insn (gen_fn (reg, GEN_INT (dim)));
6693 if (reg != target)
6694 emit_move_insn (target, reg);
6695
6696 return target;
6697}
41dbbb37 6698
28f4ec01
BS
6699/* Expand an expression EXP that calls a built-in function,
6700 with result going to TARGET if that's convenient
6701 (and in mode MODE if that's convenient).
6702 SUBTARGET may be used as the target for computing one of EXP's operands.
6703 IGNORE is nonzero if the value is to be ignored. */
6704
6705rtx
ef4bddc2 6706expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
4682ae04 6707 int ignore)
28f4ec01 6708{
2f503025 6709 tree fndecl = get_callee_fndecl (exp);
28f4ec01 6710 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
ef4bddc2 6711 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9e3920e9 6712 int flags;
28f4ec01 6713
d51151b2
JJ
6714 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6715 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6716
bdea98ca
MO
6717 /* When ASan is enabled, we don't want to expand some memory/string
6718 builtins and rely on libsanitizer's hooks. This allows us to avoid
6719 redundant checks and be sure, that possible overflow will be detected
6720 by ASan. */
6721
6722 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6723 return expand_call (exp, target, ignore);
6724
28f4ec01
BS
6725 /* When not optimizing, generate calls to library functions for a certain
6726 set of builtins. */
d25225de 6727 if (!optimize
48ae6c13 6728 && !called_as_built_in (fndecl)
63bf9a90
JH
6729 && fcode != BUILT_IN_FORK
6730 && fcode != BUILT_IN_EXECL
6731 && fcode != BUILT_IN_EXECV
6732 && fcode != BUILT_IN_EXECLP
6733 && fcode != BUILT_IN_EXECLE
6734 && fcode != BUILT_IN_EXECVP
6735 && fcode != BUILT_IN_EXECVE
9e878cf1 6736 && !ALLOCA_FUNCTION_CODE_P (fcode)
d5e254e1
IE
6737 && fcode != BUILT_IN_FREE
6738 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6739 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6740 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6741 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6742 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6743 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6744 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6745 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6746 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6747 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6748 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6749 && fcode != BUILT_IN_CHKP_BNDRET)
d25225de 6750 return expand_call (exp, target, ignore);
28f4ec01 6751
0a45ec5c
RS
6752 /* The built-in function expanders test for target == const0_rtx
6753 to determine whether the function's result will be ignored. */
6754 if (ignore)
6755 target = const0_rtx;
6756
6757 /* If the result of a pure or const built-in function is ignored, and
6758 none of its arguments are volatile, we can avoid expanding the
6759 built-in call and just evaluate the arguments for side-effects. */
6760 if (target == const0_rtx
9e3920e9
JJ
6761 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6762 && !(flags & ECF_LOOPING_CONST_OR_PURE))
0a45ec5c
RS
6763 {
6764 bool volatilep = false;
6765 tree arg;
5039610b 6766 call_expr_arg_iterator iter;
0a45ec5c 6767
5039610b
SL
6768 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6769 if (TREE_THIS_VOLATILE (arg))
0a45ec5c
RS
6770 {
6771 volatilep = true;
6772 break;
6773 }
6774
6775 if (! volatilep)
6776 {
5039610b
SL
6777 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6778 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a45ec5c
RS
6779 return const0_rtx;
6780 }
6781 }
6782
edcf72f3
IE
6783 /* expand_builtin_with_bounds is supposed to be used for
6784 instrumented builtin calls. */
d5e254e1
IE
6785 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6786
28f4ec01
BS
6787 switch (fcode)
6788 {
ea6a6627 6789 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 6790 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
6791 case BUILT_IN_FABSD32:
6792 case BUILT_IN_FABSD64:
6793 case BUILT_IN_FABSD128:
5039610b 6794 target = expand_builtin_fabs (exp, target, subtarget);
075ec276 6795 if (target)
c22cacf3 6796 return target;
075ec276
RS
6797 break;
6798
ea6a6627 6799 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6dc198e3 6800 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5039610b 6801 target = expand_builtin_copysign (exp, target, subtarget);
046625fa
RH
6802 if (target)
6803 return target;
6804 break;
6805
5906d013
EC
6806 /* Just do a normal library call if we were unable to fold
6807 the values. */
ea6a6627 6808 CASE_FLT_FN (BUILT_IN_CABS):
075ec276 6809 break;
28f4ec01 6810
1b1562a5 6811 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 6812 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
6813 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6814 if (target)
6815 return target;
6816 break;
6817
eaee4464
UB
6818 CASE_FLT_FN (BUILT_IN_ILOGB):
6819 if (! flag_unsafe_math_optimizations)
6820 break;
903c723b
TC
6821 gcc_fallthrough ();
6822 CASE_FLT_FN (BUILT_IN_ISINF):
6823 CASE_FLT_FN (BUILT_IN_FINITE):
6824 case BUILT_IN_ISFINITE:
6825 case BUILT_IN_ISNORMAL:
4359dc2a 6826 target = expand_builtin_interclass_mathfn (exp, target);
eaee4464
UB
6827 if (target)
6828 return target;
6829 break;
6830
6c32ee74 6831 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
6832 CASE_FLT_FN (BUILT_IN_LCEIL):
6833 CASE_FLT_FN (BUILT_IN_LLCEIL):
6834 CASE_FLT_FN (BUILT_IN_LFLOOR):
6c32ee74 6835 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627 6836 CASE_FLT_FN (BUILT_IN_LLFLOOR):
1856c8dc 6837 target = expand_builtin_int_roundingfn (exp, target);
d8b42d06
UB
6838 if (target)
6839 return target;
6840 break;
6841
6c32ee74 6842 CASE_FLT_FN (BUILT_IN_IRINT):
0bfa1541
RG
6843 CASE_FLT_FN (BUILT_IN_LRINT):
6844 CASE_FLT_FN (BUILT_IN_LLRINT):
6c32ee74 6845 CASE_FLT_FN (BUILT_IN_IROUND):
4d81bf84
RG
6846 CASE_FLT_FN (BUILT_IN_LROUND):
6847 CASE_FLT_FN (BUILT_IN_LLROUND):
1856c8dc 6848 target = expand_builtin_int_roundingfn_2 (exp, target);
0bfa1541
RG
6849 if (target)
6850 return target;
6851 break;
6852
ea6a6627 6853 CASE_FLT_FN (BUILT_IN_POWI):
4359dc2a 6854 target = expand_builtin_powi (exp, target);
17684d46
RG
6855 if (target)
6856 return target;
6857 break;
6858
75c7c595 6859 CASE_FLT_FN (BUILT_IN_CEXPI):
4359dc2a 6860 target = expand_builtin_cexpi (exp, target);
75c7c595
RG
6861 gcc_assert (target);
6862 return target;
6863
ea6a6627
VR
6864 CASE_FLT_FN (BUILT_IN_SIN):
6865 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
6866 if (! flag_unsafe_math_optimizations)
6867 break;
6868 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6869 if (target)
6870 return target;
6871 break;
6872
403e54f0
RG
6873 CASE_FLT_FN (BUILT_IN_SINCOS):
6874 if (! flag_unsafe_math_optimizations)
6875 break;
6876 target = expand_builtin_sincos (exp);
6877 if (target)
6878 return target;
6879 break;
6880
28f4ec01
BS
6881 case BUILT_IN_APPLY_ARGS:
6882 return expand_builtin_apply_args ();
6883
6884 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6885 FUNCTION with a copy of the parameters described by
6886 ARGUMENTS, and ARGSIZE. It returns a block of memory
6887 allocated on the stack into which is stored all the registers
6888 that might possibly be used for returning the result of a
6889 function. ARGUMENTS is the value returned by
6890 __builtin_apply_args. ARGSIZE is the number of bytes of
6891 arguments that must be copied. ??? How should this value be
6892 computed? We'll also need a safe worst case value for varargs
6893 functions. */
6894 case BUILT_IN_APPLY:
5039610b 6895 if (!validate_arglist (exp, POINTER_TYPE,
019fa094 6896 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5039610b 6897 && !validate_arglist (exp, REFERENCE_TYPE,
019fa094 6898 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01
BS
6899 return const0_rtx;
6900 else
6901 {
28f4ec01
BS
6902 rtx ops[3];
6903
5039610b
SL
6904 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6905 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6906 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
28f4ec01
BS
6907
6908 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6909 }
6910
6911 /* __builtin_return (RESULT) causes the function to return the
6912 value described by RESULT. RESULT is address of the block of
6913 memory returned by __builtin_apply. */
6914 case BUILT_IN_RETURN:
5039610b
SL
6915 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6916 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
28f4ec01
BS
6917 return const0_rtx;
6918
6919 case BUILT_IN_SAVEREGS:
d3707adb 6920 return expand_builtin_saveregs ();
28f4ec01 6921
6ef5231b
JJ
6922 case BUILT_IN_VA_ARG_PACK:
6923 /* All valid uses of __builtin_va_arg_pack () are removed during
6924 inlining. */
c94ed7a1 6925 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6ef5231b
JJ
6926 return const0_rtx;
6927
ab0e176c
JJ
6928 case BUILT_IN_VA_ARG_PACK_LEN:
6929 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6930 inlining. */
c94ed7a1 6931 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
ab0e176c
JJ
6932 return const0_rtx;
6933
28f4ec01
BS
6934 /* Return the address of the first anonymous stack arg. */
6935 case BUILT_IN_NEXT_ARG:
5039610b 6936 if (fold_builtin_next_arg (exp, false))
c22cacf3 6937 return const0_rtx;
8870e212 6938 return expand_builtin_next_arg ();
28f4ec01 6939
677feb77
DD
6940 case BUILT_IN_CLEAR_CACHE:
6941 target = expand_builtin___clear_cache (exp);
6942 if (target)
6943 return target;
6944 break;
6945
28f4ec01 6946 case BUILT_IN_CLASSIFY_TYPE:
5039610b 6947 return expand_builtin_classify_type (exp);
28f4ec01
BS
6948
6949 case BUILT_IN_CONSTANT_P:
6de9cd9a 6950 return const0_rtx;
28f4ec01
BS
6951
6952 case BUILT_IN_FRAME_ADDRESS:
6953 case BUILT_IN_RETURN_ADDRESS:
5039610b 6954 return expand_builtin_frame_address (fndecl, exp);
28f4ec01
BS
6955
6956 /* Returns the address of the area where the structure is returned.
6957 0 otherwise. */
6958 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5039610b 6959 if (call_expr_nargs (exp) != 0
ca7fd9cd 6960 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3c0cb5de 6961 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
ca7fd9cd 6962 return const0_rtx;
28f4ec01 6963 else
ca7fd9cd 6964 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
28f4ec01 6965
9e878cf1 6966 CASE_BUILT_IN_ALLOCA:
b7e52782 6967 target = expand_builtin_alloca (exp);
28f4ec01
BS
6968 if (target)
6969 return target;
6970 break;
6971
e3174bdf
MO
6972 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6973 return expand_asan_emit_allocas_unpoison (exp);
6974
6de9cd9a
DN
6975 case BUILT_IN_STACK_SAVE:
6976 return expand_stack_save ();
6977
6978 case BUILT_IN_STACK_RESTORE:
5039610b 6979 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6de9cd9a
DN
6980 return const0_rtx;
6981
ac868f29 6982 case BUILT_IN_BSWAP16:
167fa32c
EC
6983 case BUILT_IN_BSWAP32:
6984 case BUILT_IN_BSWAP64:
ac868f29 6985 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
167fa32c
EC
6986 if (target)
6987 return target;
6988 break;
6989
ea6a6627 6990 CASE_INT_FN (BUILT_IN_FFS):
5039610b 6991 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 6992 subtarget, ffs_optab);
2928cd7a
RH
6993 if (target)
6994 return target;
6995 break;
6996
ea6a6627 6997 CASE_INT_FN (BUILT_IN_CLZ):
5039610b 6998 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 6999 subtarget, clz_optab);
2928cd7a
RH
7000 if (target)
7001 return target;
7002 break;
7003
ea6a6627 7004 CASE_INT_FN (BUILT_IN_CTZ):
5039610b 7005 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7006 subtarget, ctz_optab);
2928cd7a
RH
7007 if (target)
7008 return target;
7009 break;
7010
3801c801 7011 CASE_INT_FN (BUILT_IN_CLRSB):
3801c801
BS
7012 target = expand_builtin_unop (target_mode, exp, target,
7013 subtarget, clrsb_optab);
7014 if (target)
7015 return target;
7016 break;
7017
ea6a6627 7018 CASE_INT_FN (BUILT_IN_POPCOUNT):
5039610b 7019 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7020 subtarget, popcount_optab);
2928cd7a
RH
7021 if (target)
7022 return target;
7023 break;
7024
ea6a6627 7025 CASE_INT_FN (BUILT_IN_PARITY):
5039610b 7026 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7027 subtarget, parity_optab);
28f4ec01
BS
7028 if (target)
7029 return target;
7030 break;
7031
7032 case BUILT_IN_STRLEN:
5039610b 7033 target = expand_builtin_strlen (exp, target, target_mode);
28f4ec01
BS
7034 if (target)
7035 return target;
7036 break;
7037
ee92e7ba
MS
7038 case BUILT_IN_STRCAT:
7039 target = expand_builtin_strcat (exp, target);
7040 if (target)
7041 return target;
7042 break;
7043
28f4ec01 7044 case BUILT_IN_STRCPY:
44e10129 7045 target = expand_builtin_strcpy (exp, target);
28f4ec01
BS
7046 if (target)
7047 return target;
7048 break;
8d51ecf8 7049
ee92e7ba
MS
7050 case BUILT_IN_STRNCAT:
7051 target = expand_builtin_strncat (exp, target);
7052 if (target)
7053 return target;
7054 break;
7055
da9e9f08 7056 case BUILT_IN_STRNCPY:
44e10129 7057 target = expand_builtin_strncpy (exp, target);
da9e9f08
KG
7058 if (target)
7059 return target;
7060 break;
8d51ecf8 7061
9cb65f92 7062 case BUILT_IN_STPCPY:
609ae0e2 7063 target = expand_builtin_stpcpy (exp, target, mode);
9cb65f92
KG
7064 if (target)
7065 return target;
7066 break;
7067
e50d56a5
MS
7068 case BUILT_IN_STPNCPY:
7069 target = expand_builtin_stpncpy (exp, target);
7070 if (target)
7071 return target;
7072 break;
7073
d9c5a8b9
MS
7074 case BUILT_IN_MEMCHR:
7075 target = expand_builtin_memchr (exp, target);
7076 if (target)
7077 return target;
7078 break;
7079
28f4ec01 7080 case BUILT_IN_MEMCPY:
44e10129 7081 target = expand_builtin_memcpy (exp, target);
9cb65f92
KG
7082 if (target)
7083 return target;
7084 break;
7085
e50d56a5
MS
7086 case BUILT_IN_MEMMOVE:
7087 target = expand_builtin_memmove (exp, target);
7088 if (target)
7089 return target;
7090 break;
7091
9cb65f92 7092 case BUILT_IN_MEMPCPY:
671a00ee 7093 target = expand_builtin_mempcpy (exp, target);
28f4ec01
BS
7094 if (target)
7095 return target;
7096 break;
7097
7098 case BUILT_IN_MEMSET:
5039610b 7099 target = expand_builtin_memset (exp, target, mode);
28f4ec01
BS
7100 if (target)
7101 return target;
7102 break;
7103
e3a709be 7104 case BUILT_IN_BZERO:
8148fe65 7105 target = expand_builtin_bzero (exp);
e3a709be
KG
7106 if (target)
7107 return target;
7108 break;
7109
28f4ec01 7110 case BUILT_IN_STRCMP:
44e10129 7111 target = expand_builtin_strcmp (exp, target);
28f4ec01
BS
7112 if (target)
7113 return target;
7114 break;
7115
da9e9f08
KG
7116 case BUILT_IN_STRNCMP:
7117 target = expand_builtin_strncmp (exp, target, mode);
7118 if (target)
7119 return target;
7120 break;
7121
4b2a62db 7122 case BUILT_IN_BCMP:
28f4ec01 7123 case BUILT_IN_MEMCMP:
36b85e43
BS
7124 case BUILT_IN_MEMCMP_EQ:
7125 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
28f4ec01
BS
7126 if (target)
7127 return target;
36b85e43
BS
7128 if (fcode == BUILT_IN_MEMCMP_EQ)
7129 {
7130 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7131 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7132 }
28f4ec01 7133 break;
28f4ec01
BS
7134
7135 case BUILT_IN_SETJMP:
903c723b 7136 /* This should have been lowered to the builtins below. */
4f6c2131
EB
7137 gcc_unreachable ();
7138
7139 case BUILT_IN_SETJMP_SETUP:
7140 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7141 and the receiver label. */
5039610b 7142 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4f6c2131 7143 {
5039610b 7144 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4f6c2131 7145 VOIDmode, EXPAND_NORMAL);
5039610b 7146 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
e67d1102 7147 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
7148
7149 /* This is copied from the handling of non-local gotos. */
7150 expand_builtin_setjmp_setup (buf_addr, label_r);
7151 nonlocal_goto_handler_labels
b5241a5a 7152 = gen_rtx_INSN_LIST (VOIDmode, label_r,
4f6c2131
EB
7153 nonlocal_goto_handler_labels);
7154 /* ??? Do not let expand_label treat us as such since we would
7155 not want to be both on the list of non-local labels and on
7156 the list of forced labels. */
7157 FORCED_LABEL (label) = 0;
7158 return const0_rtx;
7159 }
7160 break;
7161
4f6c2131
EB
7162 case BUILT_IN_SETJMP_RECEIVER:
7163 /* __builtin_setjmp_receiver is passed the receiver label. */
5039610b 7164 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4f6c2131 7165 {
5039610b 7166 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
e67d1102 7167 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
7168
7169 expand_builtin_setjmp_receiver (label_r);
7170 return const0_rtx;
7171 }
250d07b6 7172 break;
28f4ec01
BS
7173
7174 /* __builtin_longjmp is passed a pointer to an array of five words.
7175 It's similar to the C library longjmp function but works with
7176 __builtin_setjmp above. */
7177 case BUILT_IN_LONGJMP:
5039610b 7178 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01 7179 {
5039610b 7180 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
84217346 7181 VOIDmode, EXPAND_NORMAL);
5039610b 7182 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
28f4ec01
BS
7183
7184 if (value != const1_rtx)
7185 {
9e637a26 7186 error ("%<__builtin_longjmp%> second argument must be 1");
28f4ec01
BS
7187 return const0_rtx;
7188 }
7189
7190 expand_builtin_longjmp (buf_addr, value);
7191 return const0_rtx;
7192 }
4f6c2131 7193 break;
28f4ec01 7194
6de9cd9a 7195 case BUILT_IN_NONLOCAL_GOTO:
5039610b 7196 target = expand_builtin_nonlocal_goto (exp);
6de9cd9a
DN
7197 if (target)
7198 return target;
7199 break;
7200
2b92e7f5
RK
7201 /* This updates the setjmp buffer that is its argument with the value
7202 of the current stack pointer. */
7203 case BUILT_IN_UPDATE_SETJMP_BUF:
5039610b 7204 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2b92e7f5
RK
7205 {
7206 rtx buf_addr
5039610b 7207 = expand_normal (CALL_EXPR_ARG (exp, 0));
2b92e7f5
RK
7208
7209 expand_builtin_update_setjmp_buf (buf_addr);
7210 return const0_rtx;
7211 }
7212 break;
7213
28f4ec01 7214 case BUILT_IN_TRAP:
9602f5a0 7215 expand_builtin_trap ();
28f4ec01
BS
7216 return const0_rtx;
7217
468059bc
DD
7218 case BUILT_IN_UNREACHABLE:
7219 expand_builtin_unreachable ();
7220 return const0_rtx;
7221
ea6a6627 7222 CASE_FLT_FN (BUILT_IN_SIGNBIT):
44aea9ac
JJ
7223 case BUILT_IN_SIGNBITD32:
7224 case BUILT_IN_SIGNBITD64:
7225 case BUILT_IN_SIGNBITD128:
ef79730c
RS
7226 target = expand_builtin_signbit (exp, target);
7227 if (target)
7228 return target;
7229 break;
7230
28f4ec01
BS
7231 /* Various hooks for the DWARF 2 __throw routine. */
7232 case BUILT_IN_UNWIND_INIT:
7233 expand_builtin_unwind_init ();
7234 return const0_rtx;
7235 case BUILT_IN_DWARF_CFA:
7236 return virtual_cfa_rtx;
7237#ifdef DWARF2_UNWIND_INFO
9c80ff25
RH
7238 case BUILT_IN_DWARF_SP_COLUMN:
7239 return expand_builtin_dwarf_sp_column ();
d9d5c9de 7240 case BUILT_IN_INIT_DWARF_REG_SIZES:
5039610b 7241 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
d9d5c9de 7242 return const0_rtx;
28f4ec01
BS
7243#endif
7244 case BUILT_IN_FROB_RETURN_ADDR:
5039610b 7245 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 7246 case BUILT_IN_EXTRACT_RETURN_ADDR:
5039610b 7247 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 7248 case BUILT_IN_EH_RETURN:
5039610b
SL
7249 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7250 CALL_EXPR_ARG (exp, 1));
28f4ec01 7251 return const0_rtx;
52a11cbf 7252 case BUILT_IN_EH_RETURN_DATA_REGNO:
5039610b 7253 return expand_builtin_eh_return_data_regno (exp);
c76362b4 7254 case BUILT_IN_EXTEND_POINTER:
5039610b 7255 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
1d65f45c
RH
7256 case BUILT_IN_EH_POINTER:
7257 return expand_builtin_eh_pointer (exp);
7258 case BUILT_IN_EH_FILTER:
7259 return expand_builtin_eh_filter (exp);
7260 case BUILT_IN_EH_COPY_VALUES:
7261 return expand_builtin_eh_copy_values (exp);
c76362b4 7262
6c535c69 7263 case BUILT_IN_VA_START:
5039610b 7264 return expand_builtin_va_start (exp);
d3707adb 7265 case BUILT_IN_VA_END:
5039610b 7266 return expand_builtin_va_end (exp);
d3707adb 7267 case BUILT_IN_VA_COPY:
5039610b 7268 return expand_builtin_va_copy (exp);
994a57cd 7269 case BUILT_IN_EXPECT:
5039610b 7270 return expand_builtin_expect (exp, target);
45d439ac
JJ
7271 case BUILT_IN_ASSUME_ALIGNED:
7272 return expand_builtin_assume_aligned (exp, target);
a9ccbb60 7273 case BUILT_IN_PREFETCH:
5039610b 7274 expand_builtin_prefetch (exp);
a9ccbb60
JJ
7275 return const0_rtx;
7276
6de9cd9a 7277 case BUILT_IN_INIT_TRAMPOLINE:
183dd130
ILT
7278 return expand_builtin_init_trampoline (exp, true);
7279 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7280 return expand_builtin_init_trampoline (exp, false);
6de9cd9a 7281 case BUILT_IN_ADJUST_TRAMPOLINE:
5039610b 7282 return expand_builtin_adjust_trampoline (exp);
6de9cd9a 7283
4c640e26
EB
7284 case BUILT_IN_INIT_DESCRIPTOR:
7285 return expand_builtin_init_descriptor (exp);
7286 case BUILT_IN_ADJUST_DESCRIPTOR:
7287 return expand_builtin_adjust_descriptor (exp);
7288
d1c38823
ZD
7289 case BUILT_IN_FORK:
7290 case BUILT_IN_EXECL:
7291 case BUILT_IN_EXECV:
7292 case BUILT_IN_EXECLP:
7293 case BUILT_IN_EXECLE:
7294 case BUILT_IN_EXECVP:
7295 case BUILT_IN_EXECVE:
5039610b 7296 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
d1c38823
ZD
7297 if (target)
7298 return target;
7299 break;
28f4ec01 7300
e0a8ecf2
AM
7301 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7302 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7303 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7304 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7305 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7306 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
86951993 7307 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
48ae6c13
RH
7308 if (target)
7309 return target;
7310 break;
7311
e0a8ecf2
AM
7312 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7313 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7314 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7315 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7316 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7317 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
86951993 7318 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
48ae6c13
RH
7319 if (target)
7320 return target;
7321 break;
7322
e0a8ecf2
AM
7323 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7324 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7325 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7326 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7327 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7328 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
86951993 7329 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
48ae6c13
RH
7330 if (target)
7331 return target;
7332 break;
7333
e0a8ecf2
AM
7334 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7335 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7336 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7337 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7338 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7339 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
86951993 7340 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
48ae6c13
RH
7341 if (target)
7342 return target;
7343 break;
7344
e0a8ecf2
AM
7345 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7346 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7347 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7348 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7349 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7350 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
86951993 7351 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
48ae6c13
RH
7352 if (target)
7353 return target;
7354 break;
7355
e0a8ecf2
AM
7356 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7357 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7358 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7359 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7360 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7361 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
86951993 7362 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
48ae6c13
RH
7363 if (target)
7364 return target;
7365 break;
7366
e0a8ecf2
AM
7367 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7368 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7369 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7370 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7371 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7372 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
86951993 7373 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
48ae6c13
RH
7374 if (target)
7375 return target;
7376 break;
7377
e0a8ecf2
AM
7378 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7379 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7380 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7381 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7382 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7383 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
86951993 7384 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
48ae6c13
RH
7385 if (target)
7386 return target;
7387 break;
7388
e0a8ecf2
AM
7389 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7390 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7391 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7392 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7393 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7394 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
86951993 7395 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
48ae6c13
RH
7396 if (target)
7397 return target;
7398 break;
7399
e0a8ecf2
AM
7400 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7401 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7402 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7403 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7404 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7405 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
86951993 7406 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
48ae6c13
RH
7407 if (target)
7408 return target;
7409 break;
7410
e0a8ecf2
AM
7411 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7412 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7413 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7414 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7415 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7416 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
86951993 7417 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
48ae6c13
RH
7418 if (target)
7419 return target;
7420 break;
7421
e0a8ecf2
AM
7422 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7423 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7424 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7425 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7426 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7427 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
86951993 7428 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
48ae6c13
RH
7429 if (target)
7430 return target;
7431 break;
7432
e0a8ecf2
AM
7433 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7434 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7435 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7436 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7437 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
5b5513d0
RH
7438 if (mode == VOIDmode)
7439 mode = TYPE_MODE (boolean_type_node);
48ae6c13
RH
7440 if (!target || !register_operand (target, mode))
7441 target = gen_reg_rtx (mode);
02ee605c 7442
e0a8ecf2
AM
7443 mode = get_builtin_sync_mode
7444 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
5039610b 7445 target = expand_builtin_compare_and_swap (mode, exp, true, target);
48ae6c13
RH
7446 if (target)
7447 return target;
7448 break;
7449
e0a8ecf2
AM
7450 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7451 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7452 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7453 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7454 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7455 mode = get_builtin_sync_mode
7456 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
5039610b 7457 target = expand_builtin_compare_and_swap (mode, exp, false, target);
48ae6c13
RH
7458 if (target)
7459 return target;
7460 break;
7461
e0a8ecf2
AM
7462 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7463 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7464 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7465 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7466 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7467 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7468 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
48ae6c13
RH
7469 if (target)
7470 return target;
7471 break;
7472
e0a8ecf2
AM
7473 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7474 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7475 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7476 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7477 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7478 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7479 expand_builtin_sync_lock_release (mode, exp);
48ae6c13
RH
7480 return const0_rtx;
7481
e0a8ecf2
AM
7482 case BUILT_IN_SYNC_SYNCHRONIZE:
7483 expand_builtin_sync_synchronize ();
48ae6c13
RH
7484 return const0_rtx;
7485
86951993
AM
7486 case BUILT_IN_ATOMIC_EXCHANGE_1:
7487 case BUILT_IN_ATOMIC_EXCHANGE_2:
7488 case BUILT_IN_ATOMIC_EXCHANGE_4:
7489 case BUILT_IN_ATOMIC_EXCHANGE_8:
7490 case BUILT_IN_ATOMIC_EXCHANGE_16:
7491 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7492 target = expand_builtin_atomic_exchange (mode, exp, target);
7493 if (target)
7494 return target;
7495 break;
7496
7497 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7498 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7499 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7500 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7501 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
e351ae85
AM
7502 {
7503 unsigned int nargs, z;
9771b263 7504 vec<tree, va_gc> *vec;
e351ae85
AM
7505
7506 mode =
7507 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7508 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7509 if (target)
7510 return target;
7511
7512 /* If this is turned into an external library call, the weak parameter
7513 must be dropped to match the expected parameter list. */
7514 nargs = call_expr_nargs (exp);
9771b263 7515 vec_alloc (vec, nargs - 1);
e351ae85 7516 for (z = 0; z < 3; z++)
9771b263 7517 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
7518 /* Skip the boolean weak parameter. */
7519 for (z = 4; z < 6; z++)
9771b263 7520 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
7521 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7522 break;
7523 }
86951993
AM
7524
7525 case BUILT_IN_ATOMIC_LOAD_1:
7526 case BUILT_IN_ATOMIC_LOAD_2:
7527 case BUILT_IN_ATOMIC_LOAD_4:
7528 case BUILT_IN_ATOMIC_LOAD_8:
7529 case BUILT_IN_ATOMIC_LOAD_16:
7530 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7531 target = expand_builtin_atomic_load (mode, exp, target);
7532 if (target)
7533 return target;
7534 break;
7535
7536 case BUILT_IN_ATOMIC_STORE_1:
7537 case BUILT_IN_ATOMIC_STORE_2:
7538 case BUILT_IN_ATOMIC_STORE_4:
7539 case BUILT_IN_ATOMIC_STORE_8:
7540 case BUILT_IN_ATOMIC_STORE_16:
7541 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7542 target = expand_builtin_atomic_store (mode, exp);
7543 if (target)
7544 return const0_rtx;
7545 break;
7546
7547 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7548 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7549 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7550 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7551 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7552 {
7553 enum built_in_function lib;
7554 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7555 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7556 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7557 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7558 ignore, lib);
7559 if (target)
7560 return target;
7561 break;
7562 }
7563 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7564 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7565 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7566 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7567 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7568 {
7569 enum built_in_function lib;
7570 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7571 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7572 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7573 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7574 ignore, lib);
7575 if (target)
7576 return target;
7577 break;
7578 }
7579 case BUILT_IN_ATOMIC_AND_FETCH_1:
7580 case BUILT_IN_ATOMIC_AND_FETCH_2:
7581 case BUILT_IN_ATOMIC_AND_FETCH_4:
7582 case BUILT_IN_ATOMIC_AND_FETCH_8:
7583 case BUILT_IN_ATOMIC_AND_FETCH_16:
7584 {
7585 enum built_in_function lib;
7586 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7587 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7588 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7589 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7590 ignore, lib);
7591 if (target)
7592 return target;
7593 break;
7594 }
7595 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7596 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7597 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7598 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7599 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7600 {
7601 enum built_in_function lib;
7602 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7603 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7604 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7605 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7606 ignore, lib);
7607 if (target)
7608 return target;
7609 break;
7610 }
7611 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7612 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7613 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7614 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7615 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7616 {
7617 enum built_in_function lib;
7618 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7619 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7620 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7621 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7622 ignore, lib);
7623 if (target)
7624 return target;
7625 break;
7626 }
7627 case BUILT_IN_ATOMIC_OR_FETCH_1:
7628 case BUILT_IN_ATOMIC_OR_FETCH_2:
7629 case BUILT_IN_ATOMIC_OR_FETCH_4:
7630 case BUILT_IN_ATOMIC_OR_FETCH_8:
7631 case BUILT_IN_ATOMIC_OR_FETCH_16:
7632 {
7633 enum built_in_function lib;
7634 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7635 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7636 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7637 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7638 ignore, lib);
7639 if (target)
7640 return target;
7641 break;
7642 }
7643 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7644 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7645 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7646 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7647 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7648 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7649 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7650 ignore, BUILT_IN_NONE);
7651 if (target)
7652 return target;
7653 break;
7654
7655 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7656 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7657 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7658 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7659 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7660 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7661 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7662 ignore, BUILT_IN_NONE);
7663 if (target)
7664 return target;
7665 break;
7666
7667 case BUILT_IN_ATOMIC_FETCH_AND_1:
7668 case BUILT_IN_ATOMIC_FETCH_AND_2:
7669 case BUILT_IN_ATOMIC_FETCH_AND_4:
7670 case BUILT_IN_ATOMIC_FETCH_AND_8:
7671 case BUILT_IN_ATOMIC_FETCH_AND_16:
7672 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7673 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7674 ignore, BUILT_IN_NONE);
7675 if (target)
7676 return target;
7677 break;
7678
7679 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7680 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7681 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7682 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7683 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7684 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7685 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7686 ignore, BUILT_IN_NONE);
7687 if (target)
7688 return target;
7689 break;
7690
7691 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7692 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7693 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7694 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7695 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7696 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7697 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7698 ignore, BUILT_IN_NONE);
7699 if (target)
7700 return target;
7701 break;
7702
7703 case BUILT_IN_ATOMIC_FETCH_OR_1:
7704 case BUILT_IN_ATOMIC_FETCH_OR_2:
7705 case BUILT_IN_ATOMIC_FETCH_OR_4:
7706 case BUILT_IN_ATOMIC_FETCH_OR_8:
7707 case BUILT_IN_ATOMIC_FETCH_OR_16:
7708 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7709 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7710 ignore, BUILT_IN_NONE);
7711 if (target)
7712 return target;
7713 break;
d660c35e
AM
7714
7715 case BUILT_IN_ATOMIC_TEST_AND_SET:
744accb2 7716 return expand_builtin_atomic_test_and_set (exp, target);
d660c35e
AM
7717
7718 case BUILT_IN_ATOMIC_CLEAR:
7719 return expand_builtin_atomic_clear (exp);
86951993
AM
7720
7721 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7722 return expand_builtin_atomic_always_lock_free (exp);
7723
7724 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7725 target = expand_builtin_atomic_is_lock_free (exp);
7726 if (target)
7727 return target;
7728 break;
7729
7730 case BUILT_IN_ATOMIC_THREAD_FENCE:
7731 expand_builtin_atomic_thread_fence (exp);
7732 return const0_rtx;
7733
7734 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7735 expand_builtin_atomic_signal_fence (exp);
7736 return const0_rtx;
7737
10a0d495
JJ
7738 case BUILT_IN_OBJECT_SIZE:
7739 return expand_builtin_object_size (exp);
7740
7741 case BUILT_IN_MEMCPY_CHK:
7742 case BUILT_IN_MEMPCPY_CHK:
7743 case BUILT_IN_MEMMOVE_CHK:
7744 case BUILT_IN_MEMSET_CHK:
7745 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7746 if (target)
7747 return target;
7748 break;
7749
7750 case BUILT_IN_STRCPY_CHK:
7751 case BUILT_IN_STPCPY_CHK:
7752 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 7753 case BUILT_IN_STPNCPY_CHK:
10a0d495 7754 case BUILT_IN_STRCAT_CHK:
1c2fc017 7755 case BUILT_IN_STRNCAT_CHK:
10a0d495
JJ
7756 case BUILT_IN_SNPRINTF_CHK:
7757 case BUILT_IN_VSNPRINTF_CHK:
7758 maybe_emit_chk_warning (exp, fcode);
7759 break;
7760
7761 case BUILT_IN_SPRINTF_CHK:
7762 case BUILT_IN_VSPRINTF_CHK:
7763 maybe_emit_sprintf_chk_warning (exp, fcode);
7764 break;
7765
f9555f40 7766 case BUILT_IN_FREE:
a3a704a4
MH
7767 if (warn_free_nonheap_object)
7768 maybe_emit_free_warning (exp);
f9555f40
JJ
7769 break;
7770
f959607b
CLT
7771 case BUILT_IN_THREAD_POINTER:
7772 return expand_builtin_thread_pointer (exp, target);
7773
7774 case BUILT_IN_SET_THREAD_POINTER:
7775 expand_builtin_set_thread_pointer (exp);
7776 return const0_rtx;
7777
d5e254e1
IE
7778 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7779 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7780 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7781 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7782 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7783 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7784 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7785 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7786 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7787 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7788 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7789 /* We allow user CHKP builtins if Pointer Bounds
7790 Checker is off. */
7791 if (!chkp_function_instrumented_p (current_function_decl))
7792 {
7793 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7794 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7795 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7796 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7797 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7798 return expand_normal (CALL_EXPR_ARG (exp, 0));
7799 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7800 return expand_normal (size_zero_node);
7801 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7802 return expand_normal (size_int (-1));
7803 else
7804 return const0_rtx;
7805 }
7806 /* FALLTHROUGH */
7807
7808 case BUILT_IN_CHKP_BNDMK:
7809 case BUILT_IN_CHKP_BNDSTX:
7810 case BUILT_IN_CHKP_BNDCL:
7811 case BUILT_IN_CHKP_BNDCU:
7812 case BUILT_IN_CHKP_BNDLDX:
7813 case BUILT_IN_CHKP_BNDRET:
7814 case BUILT_IN_CHKP_INTERSECT:
7815 case BUILT_IN_CHKP_NARROW:
7816 case BUILT_IN_CHKP_EXTRACT_LOWER:
7817 case BUILT_IN_CHKP_EXTRACT_UPPER:
7818 /* Software implementation of Pointer Bounds Checker is NYI.
7819 Target support is required. */
7820 error ("Your target platform does not support -fcheck-pointer-bounds");
7821 break;
7822
41dbbb37 7823 case BUILT_IN_ACC_ON_DEVICE:
164453bb
NS
7824 /* Do library call, if we failed to expand the builtin when
7825 folding. */
41dbbb37
TS
7826 break;
7827
1f62d637
TV
7828 case BUILT_IN_GOACC_PARLEVEL_ID:
7829 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7830 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
7831
e62f4abc 7832 default: /* just do library call, if unknown builtin */
84b8b0e0 7833 break;
28f4ec01
BS
7834 }
7835
7836 /* The switch statement above can drop through to cause the function
7837 to be called normally. */
7838 return expand_call (exp, target, ignore);
7839}
b0b3afb2 7840
edcf72f3
IE
7841/* Similar to expand_builtin but is used for instrumented calls. */
7842
7843rtx
7844expand_builtin_with_bounds (tree exp, rtx target,
7845 rtx subtarget ATTRIBUTE_UNUSED,
7846 machine_mode mode, int ignore)
7847{
7848 tree fndecl = get_callee_fndecl (exp);
7849 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7850
7851 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7852
7853 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7854 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7855
7856 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7857 && fcode < END_CHKP_BUILTINS);
7858
7859 switch (fcode)
7860 {
7861 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7862 target = expand_builtin_memcpy_with_bounds (exp, target);
7863 if (target)
7864 return target;
7865 break;
7866
7867 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
671a00ee 7868 target = expand_builtin_mempcpy_with_bounds (exp, target);
edcf72f3
IE
7869 if (target)
7870 return target;
7871 break;
7872
7873 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7874 target = expand_builtin_memset_with_bounds (exp, target, mode);
7875 if (target)
7876 return target;
7877 break;
7878
cc8bea0a
MS
7879 case BUILT_IN_MEMCPY_CHKP:
7880 case BUILT_IN_MEMMOVE_CHKP:
7881 case BUILT_IN_MEMPCPY_CHKP:
7882 if (call_expr_nargs (exp) > 3)
7883 {
7884 /* memcpy_chkp (void *dst, size_t dstbnd,
7885 const void *src, size_t srcbnd, size_t n)
7886 and others take a pointer bound argument just after each
7887 pointer argument. */
7888 tree dest = CALL_EXPR_ARG (exp, 0);
7889 tree src = CALL_EXPR_ARG (exp, 2);
7890 tree len = CALL_EXPR_ARG (exp, 4);
7891
7892 check_memop_access (exp, dest, src, len);
7893 break;
7894 }
7895
edcf72f3
IE
7896 default:
7897 break;
7898 }
7899
7900 /* The switch statement above can drop through to cause the function
7901 to be called normally. */
7902 return expand_call (exp, target, ignore);
7903 }
7904
4977bab6 7905/* Determine whether a tree node represents a call to a built-in
feda1845
RS
7906 function. If the tree T is a call to a built-in function with
7907 the right number of arguments of the appropriate types, return
7908 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7909 Otherwise the return value is END_BUILTINS. */
4682ae04 7910
4977bab6 7911enum built_in_function
fa233e34 7912builtin_mathfn_code (const_tree t)
4977bab6 7913{
fa233e34
KG
7914 const_tree fndecl, arg, parmlist;
7915 const_tree argtype, parmtype;
7916 const_call_expr_arg_iterator iter;
4977bab6 7917
5f92d109 7918 if (TREE_CODE (t) != CALL_EXPR)
4977bab6
ZW
7919 return END_BUILTINS;
7920
2f503025
JM
7921 fndecl = get_callee_fndecl (t);
7922 if (fndecl == NULL_TREE
feda1845 7923 || TREE_CODE (fndecl) != FUNCTION_DECL
4977bab6
ZW
7924 || ! DECL_BUILT_IN (fndecl)
7925 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7926 return END_BUILTINS;
7927
feda1845 7928 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
fa233e34 7929 init_const_call_expr_arg_iterator (t, &iter);
feda1845 7930 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
c0a47a61 7931 {
feda1845
RS
7932 /* If a function doesn't take a variable number of arguments,
7933 the last element in the list will have type `void'. */
7934 parmtype = TREE_VALUE (parmlist);
7935 if (VOID_TYPE_P (parmtype))
7936 {
fa233e34 7937 if (more_const_call_expr_args_p (&iter))
feda1845
RS
7938 return END_BUILTINS;
7939 return DECL_FUNCTION_CODE (fndecl);
7940 }
7941
fa233e34 7942 if (! more_const_call_expr_args_p (&iter))
c0a47a61 7943 return END_BUILTINS;
b8698a0f 7944
fa233e34 7945 arg = next_const_call_expr_arg (&iter);
5039610b 7946 argtype = TREE_TYPE (arg);
feda1845
RS
7947
7948 if (SCALAR_FLOAT_TYPE_P (parmtype))
7949 {
7950 if (! SCALAR_FLOAT_TYPE_P (argtype))
7951 return END_BUILTINS;
7952 }
7953 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7954 {
7955 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7956 return END_BUILTINS;
7957 }
7958 else if (POINTER_TYPE_P (parmtype))
7959 {
7960 if (! POINTER_TYPE_P (argtype))
7961 return END_BUILTINS;
7962 }
7963 else if (INTEGRAL_TYPE_P (parmtype))
7964 {
7965 if (! INTEGRAL_TYPE_P (argtype))
7966 return END_BUILTINS;
7967 }
7968 else
c0a47a61 7969 return END_BUILTINS;
c0a47a61
RS
7970 }
7971
feda1845 7972 /* Variable-length argument list. */
4977bab6
ZW
7973 return DECL_FUNCTION_CODE (fndecl);
7974}
7975
5039610b
SL
7976/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7977 evaluate to a constant. */
b0b3afb2
BS
7978
7979static tree
5039610b 7980fold_builtin_constant_p (tree arg)
b0b3afb2 7981{
b0b3afb2
BS
7982 /* We return 1 for a numeric type that's known to be a constant
7983 value at compile-time or for an aggregate type that's a
7984 literal constant. */
5039610b 7985 STRIP_NOPS (arg);
b0b3afb2
BS
7986
7987 /* If we know this is a constant, emit the constant of one. */
5039610b
SL
7988 if (CONSTANT_CLASS_P (arg)
7989 || (TREE_CODE (arg) == CONSTRUCTOR
7990 && TREE_CONSTANT (arg)))
b0b3afb2 7991 return integer_one_node;
5039610b 7992 if (TREE_CODE (arg) == ADDR_EXPR)
fb664a2c 7993 {
5039610b 7994 tree op = TREE_OPERAND (arg, 0);
fb664a2c
RG
7995 if (TREE_CODE (op) == STRING_CST
7996 || (TREE_CODE (op) == ARRAY_REF
7997 && integer_zerop (TREE_OPERAND (op, 1))
7998 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7999 return integer_one_node;
8000 }
b0b3afb2 8001
0dcd3840
RH
8002 /* If this expression has side effects, show we don't know it to be a
8003 constant. Likewise if it's a pointer or aggregate type since in
8004 those case we only want literals, since those are only optimized
13104975
ZW
8005 when generating RTL, not later.
8006 And finally, if we are compiling an initializer, not code, we
8007 need to return a definite result now; there's not going to be any
8008 more optimization done. */
5039610b
SL
8009 if (TREE_SIDE_EFFECTS (arg)
8010 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8011 || POINTER_TYPE_P (TREE_TYPE (arg))
63b48197 8012 || cfun == 0
4e7d7b3d
JJ
8013 || folding_initializer
8014 || force_folding_builtin_constant_p)
b0b3afb2
BS
8015 return integer_zero_node;
8016
5039610b 8017 return NULL_TREE;
b0b3afb2
BS
8018}
8019
419ce103
AN
8020/* Create builtin_expect with PRED and EXPECTED as its arguments and
8021 return it as a truthvalue. */
6de9cd9a
DN
8022
8023static tree
ed9c79e1
JJ
8024build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8025 tree predictor)
6de9cd9a 8026{
419ce103 8027 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6de9cd9a 8028
e79983f4 8029 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
419ce103
AN
8030 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8031 ret_type = TREE_TYPE (TREE_TYPE (fn));
8032 pred_type = TREE_VALUE (arg_types);
8033 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8034
db3927fb
AH
8035 pred = fold_convert_loc (loc, pred_type, pred);
8036 expected = fold_convert_loc (loc, expected_type, expected);
ed9c79e1
JJ
8037 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8038 predictor);
419ce103
AN
8039
8040 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8041 build_int_cst (ret_type, 0));
8042}
8043
8044/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
8045 NULL_TREE if no simplification is possible. */
8046
ed9c79e1
JJ
8047tree
8048fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
419ce103 8049{
be31603a 8050 tree inner, fndecl, inner_arg0;
419ce103
AN
8051 enum tree_code code;
8052
be31603a
KT
8053 /* Distribute the expected value over short-circuiting operators.
8054 See through the cast from truthvalue_type_node to long. */
8055 inner_arg0 = arg0;
625a9766 8056 while (CONVERT_EXPR_P (inner_arg0)
be31603a
KT
8057 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8058 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8059 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8060
419ce103
AN
8061 /* If this is a builtin_expect within a builtin_expect keep the
8062 inner one. See through a comparison against a constant. It
8063 might have been added to create a thruthvalue. */
be31603a
KT
8064 inner = inner_arg0;
8065
419ce103
AN
8066 if (COMPARISON_CLASS_P (inner)
8067 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8068 inner = TREE_OPERAND (inner, 0);
8069
8070 if (TREE_CODE (inner) == CALL_EXPR
8071 && (fndecl = get_callee_fndecl (inner))
8072 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8073 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
8074 return arg0;
8075
be31603a 8076 inner = inner_arg0;
419ce103
AN
8077 code = TREE_CODE (inner);
8078 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8079 {
8080 tree op0 = TREE_OPERAND (inner, 0);
8081 tree op1 = TREE_OPERAND (inner, 1);
0d2f7959 8082 arg1 = save_expr (arg1);
419ce103 8083
ed9c79e1
JJ
8084 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
8085 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
419ce103
AN
8086 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8087
db3927fb 8088 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
419ce103
AN
8089 }
8090
8091 /* If the argument isn't invariant then there's nothing else we can do. */
be31603a 8092 if (!TREE_CONSTANT (inner_arg0))
5039610b 8093 return NULL_TREE;
6de9cd9a 8094
419ce103
AN
8095 /* If we expect that a comparison against the argument will fold to
8096 a constant return the constant. In practice, this means a true
8097 constant or the address of a non-weak symbol. */
be31603a 8098 inner = inner_arg0;
6de9cd9a
DN
8099 STRIP_NOPS (inner);
8100 if (TREE_CODE (inner) == ADDR_EXPR)
8101 {
8102 do
8103 {
8104 inner = TREE_OPERAND (inner, 0);
8105 }
8106 while (TREE_CODE (inner) == COMPONENT_REF
8107 || TREE_CODE (inner) == ARRAY_REF);
8813a647 8108 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
5039610b 8109 return NULL_TREE;
6de9cd9a
DN
8110 }
8111
419ce103
AN
8112 /* Otherwise, ARG0 already has the proper type for the return value. */
8113 return arg0;
6de9cd9a
DN
8114}
8115
5039610b 8116/* Fold a call to __builtin_classify_type with argument ARG. */
5197bd50 8117
ad82abb8 8118static tree
5039610b 8119fold_builtin_classify_type (tree arg)
ad82abb8 8120{
5039610b 8121 if (arg == 0)
45a2c477 8122 return build_int_cst (integer_type_node, no_type_class);
ad82abb8 8123
45a2c477 8124 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
ad82abb8
ZW
8125}
8126
5039610b 8127/* Fold a call to __builtin_strlen with argument ARG. */
667bbbbb
EC
8128
8129static tree
ab996409 8130fold_builtin_strlen (location_t loc, tree type, tree arg)
667bbbbb 8131{
5039610b 8132 if (!validate_arg (arg, POINTER_TYPE))
667bbbbb
EC
8133 return NULL_TREE;
8134 else
8135 {
5039610b 8136 tree len = c_strlen (arg, 0);
667bbbbb
EC
8137
8138 if (len)
ab996409 8139 return fold_convert_loc (loc, type, len);
667bbbbb
EC
8140
8141 return NULL_TREE;
8142 }
8143}
8144
ab5e2615
RH
8145/* Fold a call to __builtin_inf or __builtin_huge_val. */
8146
8147static tree
db3927fb 8148fold_builtin_inf (location_t loc, tree type, int warn)
ab5e2615 8149{
efdc7e19
RH
8150 REAL_VALUE_TYPE real;
8151
6d84156b
JM
8152 /* __builtin_inff is intended to be usable to define INFINITY on all
8153 targets. If an infinity is not available, INFINITY expands "to a
8154 positive constant of type float that overflows at translation
8155 time", footnote "In this case, using INFINITY will violate the
8156 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8157 Thus we pedwarn to ensure this constraint violation is
8158 diagnosed. */
ab5e2615 8159 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
db3927fb 8160 pedwarn (loc, 0, "target format does not support infinity");
ab5e2615 8161
efdc7e19
RH
8162 real_inf (&real);
8163 return build_real (type, real);
ab5e2615
RH
8164}
8165
75c7c595
RG
8166/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8167 NULL_TREE if no simplification can be made. */
8168
8169static tree
db3927fb
AH
8170fold_builtin_sincos (location_t loc,
8171 tree arg0, tree arg1, tree arg2)
75c7c595 8172{
5039610b 8173 tree type;
5c1a2e63 8174 tree fndecl, call = NULL_TREE;
75c7c595 8175
5039610b
SL
8176 if (!validate_arg (arg0, REAL_TYPE)
8177 || !validate_arg (arg1, POINTER_TYPE)
8178 || !validate_arg (arg2, POINTER_TYPE))
75c7c595
RG
8179 return NULL_TREE;
8180
75c7c595 8181 type = TREE_TYPE (arg0);
75c7c595
RG
8182
8183 /* Calculate the result when the argument is a constant. */
b03ff92e 8184 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
5c1a2e63 8185 if (fn == END_BUILTINS)
75c7c595
RG
8186 return NULL_TREE;
8187
5c1a2e63
RS
8188 /* Canonicalize sincos to cexpi. */
8189 if (TREE_CODE (arg0) == REAL_CST)
8190 {
8191 tree complex_type = build_complex_type (type);
d7ebef06 8192 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
5c1a2e63
RS
8193 }
8194 if (!call)
8195 {
8196 if (!targetm.libc_has_function (function_c99_math_complex)
8197 || !builtin_decl_implicit_p (fn))
8198 return NULL_TREE;
8199 fndecl = builtin_decl_explicit (fn);
8200 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8201 call = builtin_save_expr (call);
8202 }
75c7c595 8203
928c19bb 8204 return build2 (COMPOUND_EXPR, void_type_node,
75c7c595 8205 build2 (MODIFY_EXPR, void_type_node,
db3927fb 8206 build_fold_indirect_ref_loc (loc, arg1),
5c1a2e63 8207 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
75c7c595 8208 build2 (MODIFY_EXPR, void_type_node,
db3927fb 8209 build_fold_indirect_ref_loc (loc, arg2),
5c1a2e63 8210 fold_build1_loc (loc, REALPART_EXPR, type, call)));
75c7c595
RG
8211}
8212
5039610b
SL
8213/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8214 Return NULL_TREE if no simplification can be made. */
5bb650ec
RS
8215
8216static tree
db3927fb 8217fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
5bb650ec 8218{
5039610b
SL
8219 if (!validate_arg (arg1, POINTER_TYPE)
8220 || !validate_arg (arg2, POINTER_TYPE)
8221 || !validate_arg (len, INTEGER_TYPE))
8222 return NULL_TREE;
5bb650ec
RS
8223
8224 /* If the LEN parameter is zero, return zero. */
8225 if (integer_zerop (len))
db3927fb 8226 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
01847e9d 8227 arg1, arg2);
5bb650ec
RS
8228
8229 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8230 if (operand_equal_p (arg1, arg2, 0))
db3927fb 8231 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
01847e9d 8232
01847e9d
RS
8233 /* If len parameter is one, return an expression corresponding to
8234 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
ae7e9ddd 8235 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
01847e9d
RS
8236 {
8237 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
8238 tree cst_uchar_ptr_node
8239 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8240
db3927fb
AH
8241 tree ind1
8242 = fold_convert_loc (loc, integer_type_node,
8243 build1 (INDIRECT_REF, cst_uchar_node,
8244 fold_convert_loc (loc,
8245 cst_uchar_ptr_node,
01847e9d 8246 arg1)));
db3927fb
AH
8247 tree ind2
8248 = fold_convert_loc (loc, integer_type_node,
8249 build1 (INDIRECT_REF, cst_uchar_node,
8250 fold_convert_loc (loc,
8251 cst_uchar_ptr_node,
01847e9d 8252 arg2)));
db3927fb 8253 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
01847e9d 8254 }
5bb650ec 8255
5039610b 8256 return NULL_TREE;
5bb650ec
RS
8257}
8258
5039610b 8259/* Fold a call to builtin isascii with argument ARG. */
df0785d6
KG
8260
8261static tree
db3927fb 8262fold_builtin_isascii (location_t loc, tree arg)
df0785d6 8263{
5039610b
SL
8264 if (!validate_arg (arg, INTEGER_TYPE))
8265 return NULL_TREE;
df0785d6
KG
8266 else
8267 {
8268 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
6728ee79 8269 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
45a2c477 8270 build_int_cst (integer_type_node,
6728ee79 8271 ~ (unsigned HOST_WIDE_INT) 0x7f));
db3927fb 8272 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
45a2c477 8273 arg, integer_zero_node);
df0785d6
KG
8274 }
8275}
8276
5039610b 8277/* Fold a call to builtin toascii with argument ARG. */
df0785d6
KG
8278
8279static tree
db3927fb 8280fold_builtin_toascii (location_t loc, tree arg)
df0785d6 8281{
5039610b
SL
8282 if (!validate_arg (arg, INTEGER_TYPE))
8283 return NULL_TREE;
b8698a0f 8284
5039610b 8285 /* Transform toascii(c) -> (c & 0x7f). */
db3927fb 8286 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
45a2c477 8287 build_int_cst (integer_type_node, 0x7f));
df0785d6
KG
8288}
8289
5039610b 8290/* Fold a call to builtin isdigit with argument ARG. */
61218d19
KG
8291
8292static tree
db3927fb 8293fold_builtin_isdigit (location_t loc, tree arg)
61218d19 8294{
5039610b
SL
8295 if (!validate_arg (arg, INTEGER_TYPE))
8296 return NULL_TREE;
61218d19
KG
8297 else
8298 {
8299 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
c5ff069d
ZW
8300 /* According to the C standard, isdigit is unaffected by locale.
8301 However, it definitely is affected by the target character set. */
c5ff069d
ZW
8302 unsigned HOST_WIDE_INT target_digit0
8303 = lang_hooks.to_target_charset ('0');
8304
8305 if (target_digit0 == 0)
8306 return NULL_TREE;
8307
db3927fb 8308 arg = fold_convert_loc (loc, unsigned_type_node, arg);
6728ee79
MM
8309 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8310 build_int_cst (unsigned_type_node, target_digit0));
db3927fb 8311 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
5cdc4a26 8312 build_int_cst (unsigned_type_node, 9));
61218d19
KG
8313 }
8314}
ef79730c 8315
5039610b 8316/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9655d83b
RS
8317
8318static tree
db3927fb 8319fold_builtin_fabs (location_t loc, tree arg, tree type)
9655d83b 8320{
5039610b
SL
8321 if (!validate_arg (arg, REAL_TYPE))
8322 return NULL_TREE;
9655d83b 8323
db3927fb 8324 arg = fold_convert_loc (loc, type, arg);
db3927fb 8325 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
8326}
8327
5039610b 8328/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9655d83b
RS
8329
8330static tree
db3927fb 8331fold_builtin_abs (location_t loc, tree arg, tree type)
9655d83b 8332{
5039610b
SL
8333 if (!validate_arg (arg, INTEGER_TYPE))
8334 return NULL_TREE;
9655d83b 8335
db3927fb 8336 arg = fold_convert_loc (loc, type, arg);
db3927fb 8337 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
8338}
8339
16949072
RG
8340/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8341
8342static tree
8343fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8344{
4e48e02b 8345 /* ??? Only expand to FMA_EXPR if it's directly supported. */
16949072 8346 if (validate_arg (arg0, REAL_TYPE)
c3284718 8347 && validate_arg (arg1, REAL_TYPE)
4e48e02b
RS
8348 && validate_arg (arg2, REAL_TYPE)
8349 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8350 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
16949072 8351
16949072
RG
8352 return NULL_TREE;
8353}
8354
527cab20
KG
8355/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8356
8357static tree
db3927fb 8358fold_builtin_carg (location_t loc, tree arg, tree type)
527cab20 8359{
c128599a
KG
8360 if (validate_arg (arg, COMPLEX_TYPE)
8361 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
527cab20
KG
8362 {
8363 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
b8698a0f 8364
527cab20
KG
8365 if (atan2_fn)
8366 {
5039610b 8367 tree new_arg = builtin_save_expr (arg);
db3927fb
AH
8368 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8369 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8370 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
527cab20
KG
8371 }
8372 }
b8698a0f 8373
527cab20
KG
8374 return NULL_TREE;
8375}
8376
7a2a25ab
KG
8377/* Fold a call to builtin frexp, we can assume the base is 2. */
8378
8379static tree
db3927fb 8380fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7a2a25ab
KG
8381{
8382 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8383 return NULL_TREE;
b8698a0f 8384
7a2a25ab 8385 STRIP_NOPS (arg0);
b8698a0f 8386
7a2a25ab
KG
8387 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8388 return NULL_TREE;
b8698a0f 8389
db3927fb 8390 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7a2a25ab
KG
8391
8392 /* Proceed if a valid pointer type was passed in. */
8393 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8394 {
8395 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8396 tree frac, exp;
b8698a0f 8397
7a2a25ab
KG
8398 switch (value->cl)
8399 {
8400 case rvc_zero:
8401 /* For +-0, return (*exp = 0, +-0). */
8402 exp = integer_zero_node;
8403 frac = arg0;
8404 break;
8405 case rvc_nan:
8406 case rvc_inf:
8407 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
db3927fb 8408 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7a2a25ab
KG
8409 case rvc_normal:
8410 {
8411 /* Since the frexp function always expects base 2, and in
8412 GCC normalized significands are already in the range
8413 [0.5, 1.0), we have exactly what frexp wants. */
8414 REAL_VALUE_TYPE frac_rvt = *value;
8415 SET_REAL_EXP (&frac_rvt, 0);
8416 frac = build_real (rettype, frac_rvt);
45a2c477 8417 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7a2a25ab
KG
8418 }
8419 break;
8420 default:
8421 gcc_unreachable ();
8422 }
b8698a0f 8423
7a2a25ab 8424 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 8425 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7a2a25ab 8426 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 8427 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7a2a25ab
KG
8428 }
8429
8430 return NULL_TREE;
8431}
8432
3d577eaf
KG
8433/* Fold a call to builtin modf. */
8434
8435static tree
db3927fb 8436fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
3d577eaf
KG
8437{
8438 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8439 return NULL_TREE;
b8698a0f 8440
3d577eaf 8441 STRIP_NOPS (arg0);
b8698a0f 8442
3d577eaf
KG
8443 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8444 return NULL_TREE;
b8698a0f 8445
db3927fb 8446 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3d577eaf
KG
8447
8448 /* Proceed if a valid pointer type was passed in. */
8449 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8450 {
8451 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8452 REAL_VALUE_TYPE trunc, frac;
8453
8454 switch (value->cl)
8455 {
8456 case rvc_nan:
8457 case rvc_zero:
8458 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8459 trunc = frac = *value;
8460 break;
8461 case rvc_inf:
8462 /* For +-Inf, return (*arg1 = arg0, +-0). */
8463 frac = dconst0;
8464 frac.sign = value->sign;
8465 trunc = *value;
8466 break;
8467 case rvc_normal:
8468 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8469 real_trunc (&trunc, VOIDmode, value);
8470 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8471 /* If the original number was negative and already
8472 integral, then the fractional part is -0.0. */
8473 if (value->sign && frac.cl == rvc_zero)
8474 frac.sign = value->sign;
8475 break;
8476 }
b8698a0f 8477
3d577eaf 8478 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 8479 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
3d577eaf
KG
8480 build_real (rettype, trunc));
8481 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 8482 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
3d577eaf
KG
8483 build_real (rettype, frac));
8484 }
b8698a0f 8485
3d577eaf
KG
8486 return NULL_TREE;
8487}
8488
903c723b
TC
8489/* Given a location LOC, an interclass builtin function decl FNDECL
8490 and its single argument ARG, return an folded expression computing
8491 the same, or NULL_TREE if we either couldn't or didn't want to fold
8492 (the latter happen if there's an RTL instruction available). */
8493
8494static tree
8495fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8496{
8497 machine_mode mode;
8498
8499 if (!validate_arg (arg, REAL_TYPE))
8500 return NULL_TREE;
8501
8502 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8503 return NULL_TREE;
8504
8505 mode = TYPE_MODE (TREE_TYPE (arg));
8506
8507 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
02cf2861 8508
903c723b
TC
8509 /* If there is no optab, try generic code. */
8510 switch (DECL_FUNCTION_CODE (fndecl))
8511 {
8512 tree result;
44e10129 8513
903c723b
TC
8514 CASE_FLT_FN (BUILT_IN_ISINF):
8515 {
8516 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8517 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8518 tree type = TREE_TYPE (arg);
8519 REAL_VALUE_TYPE r;
8520 char buf[128];
8521
8522 if (is_ibm_extended)
8523 {
8524 /* NaN and Inf are encoded in the high-order double value
8525 only. The low-order value is not significant. */
8526 type = double_type_node;
8527 mode = DFmode;
8528 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8529 }
8530 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8531 real_from_string (&r, buf);
8532 result = build_call_expr (isgr_fn, 2,
8533 fold_build1_loc (loc, ABS_EXPR, type, arg),
8534 build_real (type, r));
8535 return result;
8536 }
8537 CASE_FLT_FN (BUILT_IN_FINITE):
8538 case BUILT_IN_ISFINITE:
8539 {
8540 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8541 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8542 tree type = TREE_TYPE (arg);
8543 REAL_VALUE_TYPE r;
8544 char buf[128];
8545
8546 if (is_ibm_extended)
8547 {
8548 /* NaN and Inf are encoded in the high-order double value
8549 only. The low-order value is not significant. */
8550 type = double_type_node;
8551 mode = DFmode;
8552 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8553 }
8554 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8555 real_from_string (&r, buf);
8556 result = build_call_expr (isle_fn, 2,
8557 fold_build1_loc (loc, ABS_EXPR, type, arg),
8558 build_real (type, r));
8559 /*result = fold_build2_loc (loc, UNGT_EXPR,
8560 TREE_TYPE (TREE_TYPE (fndecl)),
8561 fold_build1_loc (loc, ABS_EXPR, type, arg),
8562 build_real (type, r));
8563 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8564 TREE_TYPE (TREE_TYPE (fndecl)),
8565 result);*/
8566 return result;
8567 }
8568 case BUILT_IN_ISNORMAL:
8569 {
8570 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8571 islessequal(fabs(x),DBL_MAX). */
8572 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8573 tree type = TREE_TYPE (arg);
8574 tree orig_arg, max_exp, min_exp;
8575 machine_mode orig_mode = mode;
8576 REAL_VALUE_TYPE rmax, rmin;
8577 char buf[128];
8578
8579 orig_arg = arg = builtin_save_expr (arg);
8580 if (is_ibm_extended)
8581 {
8582 /* Use double to test the normal range of IBM extended
8583 precision. Emin for IBM extended precision is
8584 different to emin for IEEE double, being 53 higher
8585 since the low double exponent is at least 53 lower
8586 than the high double exponent. */
8587 type = double_type_node;
8588 mode = DFmode;
8589 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8590 }
8591 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8592
8593 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8594 real_from_string (&rmax, buf);
8595 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8596 real_from_string (&rmin, buf);
8597 max_exp = build_real (type, rmax);
8598 min_exp = build_real (type, rmin);
8599
8600 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8601 if (is_ibm_extended)
8602 {
8603 /* Testing the high end of the range is done just using
8604 the high double, using the same test as isfinite().
8605 For the subnormal end of the range we first test the
8606 high double, then if its magnitude is equal to the
8607 limit of 0x1p-969, we test whether the low double is
8608 non-zero and opposite sign to the high double. */
8609 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8610 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8611 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8612 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8613 arg, min_exp);
8614 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8615 complex_double_type_node, orig_arg);
8616 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8617 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8618 tree zero = build_real (type, dconst0);
8619 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8620 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8621 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8622 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8623 fold_build3 (COND_EXPR,
8624 integer_type_node,
8625 hilt, logt, lolt));
8626 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8627 eq_min, ok_lo);
8628 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8629 gt_min, eq_min);
8630 }
8631 else
8632 {
8633 tree const isge_fn
8634 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8635 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8636 }
8637 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8638 max_exp, min_exp);
8639 return result;
8640 }
8641 default:
8642 break;
8643 }
8644
8645 return NULL_TREE;
8646}
8647
8648/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
5039610b 8649 ARG is the argument for the call. */
64a9295a
PB
8650
8651static tree
903c723b 8652fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
64a9295a 8653{
903c723b
TC
8654 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8655
5039610b 8656 if (!validate_arg (arg, REAL_TYPE))
83322951 8657 return NULL_TREE;
64a9295a 8658
64a9295a
PB
8659 switch (builtin_index)
8660 {
903c723b
TC
8661 case BUILT_IN_ISINF:
8662 if (!HONOR_INFINITIES (arg))
8663 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8664
8665 return NULL_TREE;
8666
05f41289
KG
8667 case BUILT_IN_ISINF_SIGN:
8668 {
8669 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8670 /* In a boolean context, GCC will fold the inner COND_EXPR to
8671 1. So e.g. "if (isinf_sign(x))" would be folded to just
8672 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
72f52f30 8673 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
e79983f4 8674 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
05f41289
KG
8675 tree tmp = NULL_TREE;
8676
8677 arg = builtin_save_expr (arg);
8678
8679 if (signbit_fn && isinf_fn)
8680 {
db3927fb
AH
8681 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8682 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
05f41289 8683
db3927fb 8684 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 8685 signbit_call, integer_zero_node);
db3927fb 8686 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 8687 isinf_call, integer_zero_node);
b8698a0f 8688
db3927fb 8689 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
05f41289 8690 integer_minus_one_node, integer_one_node);
db3927fb
AH
8691 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8692 isinf_call, tmp,
05f41289
KG
8693 integer_zero_node);
8694 }
8695
8696 return tmp;
8697 }
8698
903c723b
TC
8699 case BUILT_IN_ISFINITE:
8700 if (!HONOR_NANS (arg)
8701 && !HONOR_INFINITIES (arg))
8702 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8703
8704 return NULL_TREE;
8705
8706 case BUILT_IN_ISNAN:
8707 if (!HONOR_NANS (arg))
8708 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8709
8710 {
8711 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8712 if (is_ibm_extended)
8713 {
8714 /* NaN and Inf are encoded in the high-order double value
8715 only. The low-order value is not significant. */
8716 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8717 }
8718 }
8719 arg = builtin_save_expr (arg);
8720 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8721
64a9295a 8722 default:
298e6adc 8723 gcc_unreachable ();
64a9295a
PB
8724 }
8725}
8726
903c723b
TC
8727/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8728 This builtin will generate code to return the appropriate floating
8729 point classification depending on the value of the floating point
8730 number passed in. The possible return values must be supplied as
8731 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8732 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8733 one floating point argument which is "type generic". */
8734
8735static tree
8736fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8737{
8738 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8739 arg, type, res, tmp;
8740 machine_mode mode;
8741 REAL_VALUE_TYPE r;
8742 char buf[128];
8743
8744 /* Verify the required arguments in the original call. */
8745 if (nargs != 6
8746 || !validate_arg (args[0], INTEGER_TYPE)
8747 || !validate_arg (args[1], INTEGER_TYPE)
8748 || !validate_arg (args[2], INTEGER_TYPE)
8749 || !validate_arg (args[3], INTEGER_TYPE)
8750 || !validate_arg (args[4], INTEGER_TYPE)
8751 || !validate_arg (args[5], REAL_TYPE))
8752 return NULL_TREE;
8753
8754 fp_nan = args[0];
8755 fp_infinite = args[1];
8756 fp_normal = args[2];
8757 fp_subnormal = args[3];
8758 fp_zero = args[4];
8759 arg = args[5];
8760 type = TREE_TYPE (arg);
8761 mode = TYPE_MODE (type);
8762 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8763
8764 /* fpclassify(x) ->
8765 isnan(x) ? FP_NAN :
8766 (fabs(x) == Inf ? FP_INFINITE :
8767 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8768 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8769
8770 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8771 build_real (type, dconst0));
8772 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8773 tmp, fp_zero, fp_subnormal);
8774
8775 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8776 real_from_string (&r, buf);
8777 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8778 arg, build_real (type, r));
8779 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8780
8781 if (HONOR_INFINITIES (mode))
8782 {
8783 real_inf (&r);
8784 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8785 build_real (type, r));
8786 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8787 fp_infinite, res);
8788 }
8789
8790 if (HONOR_NANS (mode))
8791 {
8792 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8793 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8794 }
8795
8796 return res;
8797}
8798
08039bd8 8799/* Fold a call to an unordered comparison function such as
a35da91f 8800 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
5039610b 8801 being called and ARG0 and ARG1 are the arguments for the call.
64a9295a
PB
8802 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8803 the opposite of the desired result. UNORDERED_CODE is used
8804 for modes that can hold NaNs and ORDERED_CODE is used for
8805 the rest. */
08039bd8
RS
8806
8807static tree
db3927fb 8808fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
08039bd8
RS
8809 enum tree_code unordered_code,
8810 enum tree_code ordered_code)
8811{
14f661f1 8812 tree type = TREE_TYPE (TREE_TYPE (fndecl));
08039bd8 8813 enum tree_code code;
1aeaea8d
GK
8814 tree type0, type1;
8815 enum tree_code code0, code1;
8816 tree cmp_type = NULL_TREE;
08039bd8 8817
1aeaea8d
GK
8818 type0 = TREE_TYPE (arg0);
8819 type1 = TREE_TYPE (arg1);
c22cacf3 8820
1aeaea8d
GK
8821 code0 = TREE_CODE (type0);
8822 code1 = TREE_CODE (type1);
c22cacf3 8823
1aeaea8d
GK
8824 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8825 /* Choose the wider of two real types. */
8826 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8827 ? type0 : type1;
8828 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8829 cmp_type = type0;
8830 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8831 cmp_type = type1;
c22cacf3 8832
db3927fb
AH
8833 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8834 arg1 = fold_convert_loc (loc, cmp_type, arg1);
14f661f1
RS
8835
8836 if (unordered_code == UNORDERED_EXPR)
8837 {
1b457aa4 8838 if (!HONOR_NANS (arg0))
db3927fb
AH
8839 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8840 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
14f661f1 8841 }
08039bd8 8842
1b457aa4 8843 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
db3927fb
AH
8844 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8845 fold_build2_loc (loc, code, type, arg0, arg1));
08039bd8
RS
8846}
8847
1304953e
JJ
8848/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8849 arithmetics if it can never overflow, or into internal functions that
8850 return both result of arithmetics and overflowed boolean flag in
44a845ca
MS
8851 a complex integer result, or some other check for overflow.
8852 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8853 checking part of that. */
1304953e
JJ
8854
8855static tree
8856fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8857 tree arg0, tree arg1, tree arg2)
8858{
8859 enum internal_fn ifn = IFN_LAST;
44a845ca
MS
8860 /* The code of the expression corresponding to the type-generic
8861 built-in, or ERROR_MARK for the type-specific ones. */
8862 enum tree_code opcode = ERROR_MARK;
8863 bool ovf_only = false;
8864
1304953e
JJ
8865 switch (fcode)
8866 {
44a845ca
MS
8867 case BUILT_IN_ADD_OVERFLOW_P:
8868 ovf_only = true;
8869 /* FALLTHRU */
1304953e 8870 case BUILT_IN_ADD_OVERFLOW:
44a845ca
MS
8871 opcode = PLUS_EXPR;
8872 /* FALLTHRU */
1304953e
JJ
8873 case BUILT_IN_SADD_OVERFLOW:
8874 case BUILT_IN_SADDL_OVERFLOW:
8875 case BUILT_IN_SADDLL_OVERFLOW:
8876 case BUILT_IN_UADD_OVERFLOW:
8877 case BUILT_IN_UADDL_OVERFLOW:
8878 case BUILT_IN_UADDLL_OVERFLOW:
8879 ifn = IFN_ADD_OVERFLOW;
8880 break;
44a845ca
MS
8881 case BUILT_IN_SUB_OVERFLOW_P:
8882 ovf_only = true;
8883 /* FALLTHRU */
1304953e 8884 case BUILT_IN_SUB_OVERFLOW:
44a845ca
MS
8885 opcode = MINUS_EXPR;
8886 /* FALLTHRU */
1304953e
JJ
8887 case BUILT_IN_SSUB_OVERFLOW:
8888 case BUILT_IN_SSUBL_OVERFLOW:
8889 case BUILT_IN_SSUBLL_OVERFLOW:
8890 case BUILT_IN_USUB_OVERFLOW:
8891 case BUILT_IN_USUBL_OVERFLOW:
8892 case BUILT_IN_USUBLL_OVERFLOW:
8893 ifn = IFN_SUB_OVERFLOW;
8894 break;
44a845ca
MS
8895 case BUILT_IN_MUL_OVERFLOW_P:
8896 ovf_only = true;
8897 /* FALLTHRU */
1304953e 8898 case BUILT_IN_MUL_OVERFLOW:
44a845ca
MS
8899 opcode = MULT_EXPR;
8900 /* FALLTHRU */
1304953e
JJ
8901 case BUILT_IN_SMUL_OVERFLOW:
8902 case BUILT_IN_SMULL_OVERFLOW:
8903 case BUILT_IN_SMULLL_OVERFLOW:
8904 case BUILT_IN_UMUL_OVERFLOW:
8905 case BUILT_IN_UMULL_OVERFLOW:
8906 case BUILT_IN_UMULLL_OVERFLOW:
8907 ifn = IFN_MUL_OVERFLOW;
8908 break;
8909 default:
8910 gcc_unreachable ();
8911 }
44a845ca
MS
8912
8913 /* For the "generic" overloads, the first two arguments can have different
8914 types and the last argument determines the target type to use to check
8915 for overflow. The arguments of the other overloads all have the same
8916 type. */
8917 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8918
8919 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8920 arguments are constant, attempt to fold the built-in call into a constant
8921 expression indicating whether or not it detected an overflow. */
8922 if (ovf_only
8923 && TREE_CODE (arg0) == INTEGER_CST
8924 && TREE_CODE (arg1) == INTEGER_CST)
8925 /* Perform the computation in the target type and check for overflow. */
8926 return omit_one_operand_loc (loc, boolean_type_node,
8927 arith_overflowed_p (opcode, type, arg0, arg1)
8928 ? boolean_true_node : boolean_false_node,
8929 arg2);
8930
1304953e
JJ
8931 tree ctype = build_complex_type (type);
8932 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8933 2, arg0, arg1);
8934 tree tgt = save_expr (call);
8935 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8936 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8937 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
44a845ca
MS
8938
8939 if (ovf_only)
8940 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8941
8942 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
1304953e
JJ
8943 tree store
8944 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8945 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8946}
8947
b25aad5f
MS
8948/* Fold a call to __builtin_FILE to a constant string. */
8949
8950static inline tree
8951fold_builtin_FILE (location_t loc)
8952{
8953 if (const char *fname = LOCATION_FILE (loc))
7365279f
BK
8954 {
8955 /* The documentation says this builtin is equivalent to the preprocessor
8956 __FILE__ macro so it appears appropriate to use the same file prefix
8957 mappings. */
8958 fname = remap_macro_filename (fname);
b25aad5f 8959 return build_string_literal (strlen (fname) + 1, fname);
7365279f 8960 }
b25aad5f
MS
8961
8962 return build_string_literal (1, "");
8963}
8964
8965/* Fold a call to __builtin_FUNCTION to a constant string. */
8966
8967static inline tree
8968fold_builtin_FUNCTION ()
8969{
f76b4224
NS
8970 const char *name = "";
8971
b25aad5f 8972 if (current_function_decl)
f76b4224 8973 name = lang_hooks.decl_printable_name (current_function_decl, 0);
b25aad5f 8974
f76b4224 8975 return build_string_literal (strlen (name) + 1, name);
b25aad5f
MS
8976}
8977
8978/* Fold a call to __builtin_LINE to an integer constant. */
8979
8980static inline tree
8981fold_builtin_LINE (location_t loc, tree type)
8982{
8983 return build_int_cst (type, LOCATION_LINE (loc));
8984}
8985
5039610b 8986/* Fold a call to built-in function FNDECL with 0 arguments.
2625bb5d 8987 This function returns NULL_TREE if no simplification was possible. */
b0b3afb2 8988
6de9cd9a 8989static tree
2625bb5d 8990fold_builtin_0 (location_t loc, tree fndecl)
b0b3afb2 8991{
c0a47a61 8992 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5039610b 8993 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
a0d2281e 8994 switch (fcode)
b0b3afb2 8995 {
b25aad5f
MS
8996 case BUILT_IN_FILE:
8997 return fold_builtin_FILE (loc);
8998
8999 case BUILT_IN_FUNCTION:
9000 return fold_builtin_FUNCTION ();
9001
9002 case BUILT_IN_LINE:
9003 return fold_builtin_LINE (loc, type);
9004
5039610b 9005 CASE_FLT_FN (BUILT_IN_INF):
6dc198e3 9006 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
5039610b
SL
9007 case BUILT_IN_INFD32:
9008 case BUILT_IN_INFD64:
9009 case BUILT_IN_INFD128:
db3927fb 9010 return fold_builtin_inf (loc, type, true);
d3147f64 9011
5039610b 9012 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
6dc198e3 9013 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
db3927fb 9014 return fold_builtin_inf (loc, type, false);
d3147f64 9015
5039610b
SL
9016 case BUILT_IN_CLASSIFY_TYPE:
9017 return fold_builtin_classify_type (NULL_TREE);
d3147f64 9018
5039610b
SL
9019 default:
9020 break;
9021 }
9022 return NULL_TREE;
9023}
d3147f64 9024
5039610b 9025/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
2625bb5d 9026 This function returns NULL_TREE if no simplification was possible. */
d3147f64 9027
5039610b 9028static tree
2625bb5d 9029fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
5039610b
SL
9030{
9031 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9032 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
9033
9034 if (TREE_CODE (arg0) == ERROR_MARK)
9035 return NULL_TREE;
9036
d7ebef06 9037 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
5c1a2e63
RS
9038 return ret;
9039
5039610b
SL
9040 switch (fcode)
9041 {
b0b3afb2 9042 case BUILT_IN_CONSTANT_P:
d3147f64 9043 {
5039610b 9044 tree val = fold_builtin_constant_p (arg0);
d3147f64 9045
d3147f64
EC
9046 /* Gimplification will pull the CALL_EXPR for the builtin out of
9047 an if condition. When not optimizing, we'll not CSE it back.
9048 To avoid link error types of regressions, return false now. */
9049 if (!val && !optimize)
9050 val = integer_zero_node;
9051
9052 return val;
9053 }
b0b3afb2 9054
ad82abb8 9055 case BUILT_IN_CLASSIFY_TYPE:
5039610b 9056 return fold_builtin_classify_type (arg0);
ad82abb8 9057
b0b3afb2 9058 case BUILT_IN_STRLEN:
ab996409 9059 return fold_builtin_strlen (loc, type, arg0);
b0b3afb2 9060
ea6a6627 9061 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 9062 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
9063 case BUILT_IN_FABSD32:
9064 case BUILT_IN_FABSD64:
9065 case BUILT_IN_FABSD128:
db3927fb 9066 return fold_builtin_fabs (loc, arg0, type);
9655d83b
RS
9067
9068 case BUILT_IN_ABS:
9069 case BUILT_IN_LABS:
9070 case BUILT_IN_LLABS:
9071 case BUILT_IN_IMAXABS:
db3927fb 9072 return fold_builtin_abs (loc, arg0, type);
07bae5ad 9073
ea6a6627 9074 CASE_FLT_FN (BUILT_IN_CONJ):
c128599a 9075 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9076 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 9077 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
5039610b 9078 break;
aa6c7c3a 9079
ea6a6627 9080 CASE_FLT_FN (BUILT_IN_CREAL):
c128599a 9081 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9082 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
6f3d1a5e 9083 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
5039610b 9084 break;
aa6c7c3a 9085
ea6a6627 9086 CASE_FLT_FN (BUILT_IN_CIMAG):
376da68e
KG
9087 if (validate_arg (arg0, COMPLEX_TYPE)
9088 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 9089 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
5039610b 9090 break;
aa6c7c3a 9091
5c1a2e63
RS
9092 CASE_FLT_FN (BUILT_IN_CARG):
9093 return fold_builtin_carg (loc, arg0, type);
43272bf5 9094
5c1a2e63
RS
9095 case BUILT_IN_ISASCII:
9096 return fold_builtin_isascii (loc, arg0);
b8698a0f 9097
5c1a2e63
RS
9098 case BUILT_IN_TOASCII:
9099 return fold_builtin_toascii (loc, arg0);
b8698a0f 9100
5c1a2e63
RS
9101 case BUILT_IN_ISDIGIT:
9102 return fold_builtin_isdigit (loc, arg0);
b8698a0f 9103
903c723b
TC
9104 CASE_FLT_FN (BUILT_IN_FINITE):
9105 case BUILT_IN_FINITED32:
9106 case BUILT_IN_FINITED64:
9107 case BUILT_IN_FINITED128:
9108 case BUILT_IN_ISFINITE:
9109 {
9110 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9111 if (ret)
9112 return ret;
9113 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9114 }
9115
9116 CASE_FLT_FN (BUILT_IN_ISINF):
9117 case BUILT_IN_ISINFD32:
9118 case BUILT_IN_ISINFD64:
9119 case BUILT_IN_ISINFD128:
9120 {
9121 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9122 if (ret)
9123 return ret;
9124 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9125 }
9126
9127 case BUILT_IN_ISNORMAL:
9128 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9129
5c1a2e63 9130 case BUILT_IN_ISINF_SIGN:
903c723b
TC
9131 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9132
9133 CASE_FLT_FN (BUILT_IN_ISNAN):
9134 case BUILT_IN_ISNAND32:
9135 case BUILT_IN_ISNAND64:
9136 case BUILT_IN_ISNAND128:
9137 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
b8698a0f 9138
5c1a2e63
RS
9139 case BUILT_IN_FREE:
9140 if (integer_zerop (arg0))
9141 return build_empty_stmt (loc);
abcc43f5 9142 break;
07bae5ad 9143
5c1a2e63 9144 default:
4835c978 9145 break;
5c1a2e63 9146 }
4977bab6 9147
5c1a2e63 9148 return NULL_TREE;
e19f6bde 9149
5c1a2e63 9150}
b53fed56 9151
5c1a2e63
RS
9152/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9153 This function returns NULL_TREE if no simplification was possible. */
5039610b
SL
9154
9155static tree
2625bb5d 9156fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
5039610b
SL
9157{
9158 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9159 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9160
5c1a2e63
RS
9161 if (TREE_CODE (arg0) == ERROR_MARK
9162 || TREE_CODE (arg1) == ERROR_MARK)
9163 return NULL_TREE;
ea91f957 9164
d7ebef06 9165 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
5c1a2e63 9166 return ret;
752b7d38 9167
5c1a2e63
RS
9168 switch (fcode)
9169 {
752b7d38
KG
9170 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9171 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9172 if (validate_arg (arg0, REAL_TYPE)
c3284718 9173 && validate_arg (arg1, POINTER_TYPE))
752b7d38
KG
9174 return do_mpfr_lgamma_r (arg0, arg1, type);
9175 break;
5039610b 9176
7a2a25ab 9177 CASE_FLT_FN (BUILT_IN_FREXP):
db3927fb 9178 return fold_builtin_frexp (loc, arg0, arg1, type);
7a2a25ab 9179
3d577eaf 9180 CASE_FLT_FN (BUILT_IN_MODF):
db3927fb 9181 return fold_builtin_modf (loc, arg0, arg1, type);
3d577eaf 9182
5039610b 9183 case BUILT_IN_STRSPN:
db3927fb 9184 return fold_builtin_strspn (loc, arg0, arg1);
5039610b
SL
9185
9186 case BUILT_IN_STRCSPN:
db3927fb 9187 return fold_builtin_strcspn (loc, arg0, arg1);
5039610b 9188
5039610b 9189 case BUILT_IN_STRPBRK:
db3927fb 9190 return fold_builtin_strpbrk (loc, arg0, arg1, type);
5039610b
SL
9191
9192 case BUILT_IN_EXPECT:
ed9c79e1 9193 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
5039610b 9194
08039bd8 9195 case BUILT_IN_ISGREATER:
db3927fb
AH
9196 return fold_builtin_unordered_cmp (loc, fndecl,
9197 arg0, arg1, UNLE_EXPR, LE_EXPR);
08039bd8 9198 case BUILT_IN_ISGREATEREQUAL:
db3927fb
AH
9199 return fold_builtin_unordered_cmp (loc, fndecl,
9200 arg0, arg1, UNLT_EXPR, LT_EXPR);
08039bd8 9201 case BUILT_IN_ISLESS:
db3927fb
AH
9202 return fold_builtin_unordered_cmp (loc, fndecl,
9203 arg0, arg1, UNGE_EXPR, GE_EXPR);
08039bd8 9204 case BUILT_IN_ISLESSEQUAL:
db3927fb
AH
9205 return fold_builtin_unordered_cmp (loc, fndecl,
9206 arg0, arg1, UNGT_EXPR, GT_EXPR);
08039bd8 9207 case BUILT_IN_ISLESSGREATER:
db3927fb
AH
9208 return fold_builtin_unordered_cmp (loc, fndecl,
9209 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
08039bd8 9210 case BUILT_IN_ISUNORDERED:
db3927fb
AH
9211 return fold_builtin_unordered_cmp (loc, fndecl,
9212 arg0, arg1, UNORDERED_EXPR,
a35da91f 9213 NOP_EXPR);
08039bd8 9214
d3147f64
EC
9215 /* We do the folding for va_start in the expander. */
9216 case BUILT_IN_VA_START:
9217 break;
a32e70c3 9218
10a0d495 9219 case BUILT_IN_OBJECT_SIZE:
5039610b 9220 return fold_builtin_object_size (arg0, arg1);
10a0d495 9221
86951993
AM
9222 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9223 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9224
9225 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9226 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9227
5039610b
SL
9228 default:
9229 break;
9230 }
9231 return NULL_TREE;
9232}
9233
9234/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
2625bb5d 9235 and ARG2.
5039610b
SL
9236 This function returns NULL_TREE if no simplification was possible. */
9237
9238static tree
db3927fb 9239fold_builtin_3 (location_t loc, tree fndecl,
2625bb5d 9240 tree arg0, tree arg1, tree arg2)
5039610b
SL
9241{
9242 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9243 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
9244
9245 if (TREE_CODE (arg0) == ERROR_MARK
9246 || TREE_CODE (arg1) == ERROR_MARK
9247 || TREE_CODE (arg2) == ERROR_MARK)
9248 return NULL_TREE;
9249
d7ebef06
RS
9250 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9251 arg0, arg1, arg2))
5c1a2e63
RS
9252 return ret;
9253
5039610b
SL
9254 switch (fcode)
9255 {
9256
9257 CASE_FLT_FN (BUILT_IN_SINCOS):
db3927fb 9258 return fold_builtin_sincos (loc, arg0, arg1, arg2);
5039610b
SL
9259
9260 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 9261 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
16949072 9262 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
5039610b 9263
ea91f957
KG
9264 CASE_FLT_FN (BUILT_IN_REMQUO):
9265 if (validate_arg (arg0, REAL_TYPE)
c3284718
RS
9266 && validate_arg (arg1, REAL_TYPE)
9267 && validate_arg (arg2, POINTER_TYPE))
ea91f957
KG
9268 return do_mpfr_remquo (arg0, arg1, arg2);
9269 break;
ea91f957 9270
5039610b 9271 case BUILT_IN_MEMCMP:
5de73c05 9272 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
5039610b 9273
ed9c79e1
JJ
9274 case BUILT_IN_EXPECT:
9275 return fold_builtin_expect (loc, arg0, arg1, arg2);
9276
1304953e
JJ
9277 case BUILT_IN_ADD_OVERFLOW:
9278 case BUILT_IN_SUB_OVERFLOW:
9279 case BUILT_IN_MUL_OVERFLOW:
44a845ca
MS
9280 case BUILT_IN_ADD_OVERFLOW_P:
9281 case BUILT_IN_SUB_OVERFLOW_P:
9282 case BUILT_IN_MUL_OVERFLOW_P:
1304953e
JJ
9283 case BUILT_IN_SADD_OVERFLOW:
9284 case BUILT_IN_SADDL_OVERFLOW:
9285 case BUILT_IN_SADDLL_OVERFLOW:
9286 case BUILT_IN_SSUB_OVERFLOW:
9287 case BUILT_IN_SSUBL_OVERFLOW:
9288 case BUILT_IN_SSUBLL_OVERFLOW:
9289 case BUILT_IN_SMUL_OVERFLOW:
9290 case BUILT_IN_SMULL_OVERFLOW:
9291 case BUILT_IN_SMULLL_OVERFLOW:
9292 case BUILT_IN_UADD_OVERFLOW:
9293 case BUILT_IN_UADDL_OVERFLOW:
9294 case BUILT_IN_UADDLL_OVERFLOW:
9295 case BUILT_IN_USUB_OVERFLOW:
9296 case BUILT_IN_USUBL_OVERFLOW:
9297 case BUILT_IN_USUBLL_OVERFLOW:
9298 case BUILT_IN_UMUL_OVERFLOW:
9299 case BUILT_IN_UMULL_OVERFLOW:
9300 case BUILT_IN_UMULLL_OVERFLOW:
9301 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9302
b0b3afb2
BS
9303 default:
9304 break;
9305 }
5039610b
SL
9306 return NULL_TREE;
9307}
b0b3afb2 9308
5039610b 9309/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
a6a0570f
RB
9310 arguments. IGNORE is true if the result of the
9311 function call is ignored. This function returns NULL_TREE if no
9312 simplification was possible. */
b8698a0f 9313
3d2cf79f 9314tree
2625bb5d 9315fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
5039610b
SL
9316{
9317 tree ret = NULL_TREE;
f4577fcd 9318
5039610b
SL
9319 switch (nargs)
9320 {
9321 case 0:
2625bb5d 9322 ret = fold_builtin_0 (loc, fndecl);
5039610b
SL
9323 break;
9324 case 1:
2625bb5d 9325 ret = fold_builtin_1 (loc, fndecl, args[0]);
5039610b
SL
9326 break;
9327 case 2:
2625bb5d 9328 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
5039610b
SL
9329 break;
9330 case 3:
2625bb5d 9331 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
5039610b 9332 break;
5039610b 9333 default:
903c723b 9334 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
5039610b
SL
9335 break;
9336 }
9337 if (ret)
9338 {
726a989a 9339 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
db3927fb 9340 SET_EXPR_LOCATION (ret, loc);
5039610b
SL
9341 TREE_NO_WARNING (ret) = 1;
9342 return ret;
9343 }
9344 return NULL_TREE;
9345}
9346
862d0b35
DN
9347/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9348 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9349 of arguments in ARGS to be omitted. OLDNARGS is the number of
9350 elements in ARGS. */
5039610b
SL
9351
9352static tree
862d0b35
DN
9353rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9354 int skip, tree fndecl, int n, va_list newargs)
5039610b 9355{
862d0b35
DN
9356 int nargs = oldnargs - skip + n;
9357 tree *buffer;
5039610b 9358
862d0b35 9359 if (n > 0)
5039610b 9360 {
862d0b35 9361 int i, j;
5039610b 9362
862d0b35
DN
9363 buffer = XALLOCAVEC (tree, nargs);
9364 for (i = 0; i < n; i++)
9365 buffer[i] = va_arg (newargs, tree);
9366 for (j = skip; j < oldnargs; j++, i++)
9367 buffer[i] = args[j];
9368 }
9369 else
9370 buffer = args + skip;
3bf5906b 9371
862d0b35
DN
9372 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9373}
5039610b 9374
0889e9bc
JJ
9375/* Return true if FNDECL shouldn't be folded right now.
9376 If a built-in function has an inline attribute always_inline
9377 wrapper, defer folding it after always_inline functions have
9378 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9379 might not be performed. */
9380
e7f9dae0 9381bool
0889e9bc
JJ
9382avoid_folding_inline_builtin (tree fndecl)
9383{
9384 return (DECL_DECLARED_INLINE_P (fndecl)
9385 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9386 && cfun
9387 && !cfun->always_inline_functions_inlined
9388 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9389}
9390
6de9cd9a 9391/* A wrapper function for builtin folding that prevents warnings for
caf93cb0 9392 "statement without effect" and the like, caused by removing the
6de9cd9a
DN
9393 call node earlier than the warning is generated. */
9394
9395tree
db3927fb 9396fold_call_expr (location_t loc, tree exp, bool ignore)
6de9cd9a 9397{
5039610b
SL
9398 tree ret = NULL_TREE;
9399 tree fndecl = get_callee_fndecl (exp);
9400 if (fndecl
9401 && TREE_CODE (fndecl) == FUNCTION_DECL
6ef5231b
JJ
9402 && DECL_BUILT_IN (fndecl)
9403 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9404 yet. Defer folding until we see all the arguments
9405 (after inlining). */
9406 && !CALL_EXPR_VA_ARG_PACK (exp))
9407 {
9408 int nargs = call_expr_nargs (exp);
9409
9410 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9411 instead last argument is __builtin_va_arg_pack (). Defer folding
9412 even in that case, until arguments are finalized. */
9413 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9414 {
9415 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9416 if (fndecl2
9417 && TREE_CODE (fndecl2) == FUNCTION_DECL
9418 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9419 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9420 return NULL_TREE;
9421 }
9422
0889e9bc
JJ
9423 if (avoid_folding_inline_builtin (fndecl))
9424 return NULL_TREE;
9425
5039610b 9426 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
f311c3b4
NF
9427 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9428 CALL_EXPR_ARGP (exp), ignore);
5039610b
SL
9429 else
9430 {
a6a0570f
RB
9431 tree *args = CALL_EXPR_ARGP (exp);
9432 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
5039610b 9433 if (ret)
db3927fb 9434 return ret;
5039610b 9435 }
6de9cd9a 9436 }
5039610b
SL
9437 return NULL_TREE;
9438}
b8698a0f 9439
a6a0570f
RB
9440/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9441 N arguments are passed in the array ARGARRAY. Return a folded
9442 expression or NULL_TREE if no simplification was possible. */
4977bab6
ZW
9443
9444tree
a6a0570f 9445fold_builtin_call_array (location_t loc, tree,
94a0dd7b
SL
9446 tree fn,
9447 int n,
9448 tree *argarray)
6385a28f 9449{
a6a0570f
RB
9450 if (TREE_CODE (fn) != ADDR_EXPR)
9451 return NULL_TREE;
5039610b 9452
a6a0570f
RB
9453 tree fndecl = TREE_OPERAND (fn, 0);
9454 if (TREE_CODE (fndecl) == FUNCTION_DECL
9455 && DECL_BUILT_IN (fndecl))
9456 {
9457 /* If last argument is __builtin_va_arg_pack (), arguments to this
9458 function are not finalized yet. Defer folding until they are. */
9459 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9460 {
9461 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9462 if (fndecl2
9463 && TREE_CODE (fndecl2) == FUNCTION_DECL
9464 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9465 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9466 return NULL_TREE;
9467 }
9468 if (avoid_folding_inline_builtin (fndecl))
9469 return NULL_TREE;
9470 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9471 return targetm.fold_builtin (fndecl, n, argarray, false);
9472 else
9473 return fold_builtin_n (loc, fndecl, argarray, n, false);
9474 }
5039610b 9475
a6a0570f 9476 return NULL_TREE;
5039610b
SL
9477}
9478
43ea30dc
NF
9479/* Construct a new CALL_EXPR using the tail of the argument list of EXP
9480 along with N new arguments specified as the "..." parameters. SKIP
9481 is the number of arguments in EXP to be omitted. This function is used
9482 to do varargs-to-varargs transformations. */
9483
9484static tree
9485rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9486{
9487 va_list ap;
9488 tree t;
9489
9490 va_start (ap, n);
9491 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9492 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9493 va_end (ap);
5039610b 9494
43ea30dc 9495 return t;
5039610b
SL
9496}
9497
9498/* Validate a single argument ARG against a tree code CODE representing
0dba7960 9499 a type. Return true when argument is valid. */
b8698a0f 9500
5039610b 9501static bool
0dba7960 9502validate_arg (const_tree arg, enum tree_code code)
5039610b
SL
9503{
9504 if (!arg)
9505 return false;
9506 else if (code == POINTER_TYPE)
0dba7960 9507 return POINTER_TYPE_P (TREE_TYPE (arg));
4cd8e76f
RG
9508 else if (code == INTEGER_TYPE)
9509 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
5039610b 9510 return code == TREE_CODE (TREE_TYPE (arg));
6385a28f 9511}
019fa094 9512
726a989a
RB
9513/* This function validates the types of a function call argument list
9514 against a specified list of tree_codes. If the last specifier is a 0,
9515 that represents an ellipses, otherwise the last specifier must be a
9516 VOID_TYPE.
9517
9518 This is the GIMPLE version of validate_arglist. Eventually we want to
9519 completely convert builtins.c to work from GIMPLEs and the tree based
9520 validate_arglist will then be removed. */
9521
9522bool
538dd0b7 9523validate_gimple_arglist (const gcall *call, ...)
726a989a
RB
9524{
9525 enum tree_code code;
9526 bool res = 0;
9527 va_list ap;
9528 const_tree arg;
9529 size_t i;
9530
9531 va_start (ap, call);
9532 i = 0;
9533
9534 do
9535 {
72b5577d 9536 code = (enum tree_code) va_arg (ap, int);
726a989a
RB
9537 switch (code)
9538 {
9539 case 0:
9540 /* This signifies an ellipses, any further arguments are all ok. */
9541 res = true;
9542 goto end;
9543 case VOID_TYPE:
9544 /* This signifies an endlink, if no arguments remain, return
9545 true, otherwise return false. */
9546 res = (i == gimple_call_num_args (call));
9547 goto end;
9548 default:
9549 /* If no parameters remain or the parameter's code does not
9550 match the specified code, return false. Otherwise continue
9551 checking any remaining arguments. */
9552 arg = gimple_call_arg (call, i++);
9553 if (!validate_arg (arg, code))
9554 goto end;
9555 break;
9556 }
9557 }
9558 while (1);
9559
9560 /* We need gotos here since we can only have one VA_CLOSE in a
9561 function. */
9562 end: ;
9563 va_end (ap);
9564
9565 return res;
9566}
9567
f6155fda
SS
9568/* Default target-specific builtin expander that does nothing. */
9569
9570rtx
4682ae04
AJ
9571default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9572 rtx target ATTRIBUTE_UNUSED,
9573 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 9574 machine_mode mode ATTRIBUTE_UNUSED,
4682ae04 9575 int ignore ATTRIBUTE_UNUSED)
f6155fda
SS
9576{
9577 return NULL_RTX;
9578}
34ee7f82 9579
7dc61d6c
KG
9580/* Returns true is EXP represents data that would potentially reside
9581 in a readonly section. */
9582
fef5a0d9 9583bool
7dc61d6c
KG
9584readonly_data_expr (tree exp)
9585{
9586 STRIP_NOPS (exp);
9587
aef0afc4
UW
9588 if (TREE_CODE (exp) != ADDR_EXPR)
9589 return false;
9590
9591 exp = get_base_address (TREE_OPERAND (exp, 0));
9592 if (!exp)
9593 return false;
9594
9595 /* Make sure we call decl_readonly_section only for trees it
9596 can handle (since it returns true for everything it doesn't
9597 understand). */
caf93cb0 9598 if (TREE_CODE (exp) == STRING_CST
aef0afc4 9599 || TREE_CODE (exp) == CONSTRUCTOR
8813a647 9600 || (VAR_P (exp) && TREE_STATIC (exp)))
aef0afc4 9601 return decl_readonly_section (exp, 0);
7dc61d6c
KG
9602 else
9603 return false;
9604}
6de9cd9a 9605
5039610b
SL
9606/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9607 to the call, and TYPE is its return type.
6de9cd9a 9608
5039610b 9609 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
9610 simplified form of the call as a tree.
9611
9612 The simplified form may be a constant or other expression which
9613 computes the same value, but in a more efficient manner (including
9614 calls to other builtin functions).
9615
9616 The call may contain arguments which need to be evaluated, but
9617 which are not useful to determine the result of the call. In
9618 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9619 COMPOUND_EXPR will be an argument which must be evaluated.
9620 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9621 COMPOUND_EXPR in the chain will contain the tree for the simplified
9622 form of the builtin function call. */
9623
9624static tree
db3927fb 9625fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
6de9cd9a 9626{
5039610b
SL
9627 if (!validate_arg (s1, POINTER_TYPE)
9628 || !validate_arg (s2, POINTER_TYPE))
9629 return NULL_TREE;
6de9cd9a
DN
9630 else
9631 {
6de9cd9a
DN
9632 tree fn;
9633 const char *p1, *p2;
9634
9635 p2 = c_getstr (s2);
9636 if (p2 == NULL)
5039610b 9637 return NULL_TREE;
6de9cd9a
DN
9638
9639 p1 = c_getstr (s1);
9640 if (p1 != NULL)
9641 {
9642 const char *r = strpbrk (p1, p2);
5fcfe0b2 9643 tree tem;
6de9cd9a
DN
9644
9645 if (r == NULL)
5212068f 9646 return build_int_cst (TREE_TYPE (s1), 0);
6de9cd9a
DN
9647
9648 /* Return an offset into the constant string argument. */
5d49b6a7 9649 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
db3927fb 9650 return fold_convert_loc (loc, type, tem);
6de9cd9a
DN
9651 }
9652
9653 if (p2[0] == '\0')
d6dc556b
RS
9654 /* strpbrk(x, "") == NULL.
9655 Evaluate and ignore s1 in case it had side-effects. */
a8ed1cbd 9656 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
6de9cd9a
DN
9657
9658 if (p2[1] != '\0')
5039610b 9659 return NULL_TREE; /* Really call strpbrk. */
6de9cd9a 9660
e79983f4 9661 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
6de9cd9a 9662 if (!fn)
5039610b 9663 return NULL_TREE;
6de9cd9a
DN
9664
9665 /* New argument list transforming strpbrk(s1, s2) to
9666 strchr(s1, s2[0]). */
45a2c477
RG
9667 return build_call_expr_loc (loc, fn, 2, s1,
9668 build_int_cst (integer_type_node, p2[0]));
6de9cd9a
DN
9669 }
9670}
9671
5039610b
SL
9672/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9673 to the call.
6de9cd9a 9674
5039610b 9675 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
9676 simplified form of the call as a tree.
9677
9678 The simplified form may be a constant or other expression which
9679 computes the same value, but in a more efficient manner (including
9680 calls to other builtin functions).
9681
9682 The call may contain arguments which need to be evaluated, but
9683 which are not useful to determine the result of the call. In
9684 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9685 COMPOUND_EXPR will be an argument which must be evaluated.
9686 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9687 COMPOUND_EXPR in the chain will contain the tree for the simplified
9688 form of the builtin function call. */
9689
9690static tree
db3927fb 9691fold_builtin_strspn (location_t loc, tree s1, tree s2)
6de9cd9a 9692{
5039610b
SL
9693 if (!validate_arg (s1, POINTER_TYPE)
9694 || !validate_arg (s2, POINTER_TYPE))
9695 return NULL_TREE;
6de9cd9a
DN
9696 else
9697 {
6de9cd9a
DN
9698 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9699
5039610b 9700 /* If either argument is "", return NULL_TREE. */
6de9cd9a 9701 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
08039bd8
RS
9702 /* Evaluate and ignore both arguments in case either one has
9703 side-effects. */
db3927fb 9704 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
08039bd8 9705 s1, s2);
5039610b 9706 return NULL_TREE;
6de9cd9a
DN
9707 }
9708}
9709
5039610b
SL
9710/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9711 to the call.
6de9cd9a 9712
5039610b 9713 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
9714 simplified form of the call as a tree.
9715
9716 The simplified form may be a constant or other expression which
9717 computes the same value, but in a more efficient manner (including
9718 calls to other builtin functions).
9719
9720 The call may contain arguments which need to be evaluated, but
9721 which are not useful to determine the result of the call. In
9722 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9723 COMPOUND_EXPR will be an argument which must be evaluated.
9724 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9725 COMPOUND_EXPR in the chain will contain the tree for the simplified
9726 form of the builtin function call. */
9727
9728static tree
db3927fb 9729fold_builtin_strcspn (location_t loc, tree s1, tree s2)
6de9cd9a 9730{
5039610b
SL
9731 if (!validate_arg (s1, POINTER_TYPE)
9732 || !validate_arg (s2, POINTER_TYPE))
9733 return NULL_TREE;
6de9cd9a
DN
9734 else
9735 {
5039610b 9736 /* If the first argument is "", return NULL_TREE. */
df838ef0 9737 const char *p1 = c_getstr (s1);
6de9cd9a
DN
9738 if (p1 && *p1 == '\0')
9739 {
9740 /* Evaluate and ignore argument s2 in case it has
9741 side-effects. */
db3927fb 9742 return omit_one_operand_loc (loc, size_type_node,
002bd9f0 9743 size_zero_node, s2);
6de9cd9a
DN
9744 }
9745
9746 /* If the second argument is "", return __builtin_strlen(s1). */
df838ef0 9747 const char *p2 = c_getstr (s2);
6de9cd9a
DN
9748 if (p2 && *p2 == '\0')
9749 {
e79983f4 9750 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
6de9cd9a
DN
9751
9752 /* If the replacement _DECL isn't initialized, don't do the
9753 transformation. */
9754 if (!fn)
5039610b 9755 return NULL_TREE;
6de9cd9a 9756
db3927fb 9757 return build_call_expr_loc (loc, fn, 1, s1);
6de9cd9a 9758 }
5039610b 9759 return NULL_TREE;
6de9cd9a
DN
9760 }
9761}
9762
5039610b 9763/* Fold the next_arg or va_start call EXP. Returns true if there was an error
2efcfa4e
AP
9764 produced. False otherwise. This is done so that we don't output the error
9765 or warning twice or three times. */
726a989a 9766
2efcfa4e 9767bool
5039610b 9768fold_builtin_next_arg (tree exp, bool va_start_p)
6de9cd9a
DN
9769{
9770 tree fntype = TREE_TYPE (current_function_decl);
5039610b
SL
9771 int nargs = call_expr_nargs (exp);
9772 tree arg;
34c88790
DS
9773 /* There is good chance the current input_location points inside the
9774 definition of the va_start macro (perhaps on the token for
9775 builtin) in a system header, so warnings will not be emitted.
9776 Use the location in real source code. */
9777 source_location current_location =
9778 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9779 NULL);
6de9cd9a 9780
f38958e8 9781 if (!stdarg_p (fntype))
2efcfa4e
AP
9782 {
9783 error ("%<va_start%> used in function with fixed args");
9784 return true;
9785 }
5039610b
SL
9786
9787 if (va_start_p)
8870e212 9788 {
5039610b
SL
9789 if (va_start_p && (nargs != 2))
9790 {
9791 error ("wrong number of arguments to function %<va_start%>");
9792 return true;
9793 }
9794 arg = CALL_EXPR_ARG (exp, 1);
8870e212
JJ
9795 }
9796 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9797 when we checked the arguments and if needed issued a warning. */
5039610b 9798 else
6de9cd9a 9799 {
5039610b
SL
9800 if (nargs == 0)
9801 {
9802 /* Evidently an out of date version of <stdarg.h>; can't validate
9803 va_start's second argument, but can still work as intended. */
34c88790 9804 warning_at (current_location,
b9c8da34
DS
9805 OPT_Wvarargs,
9806 "%<__builtin_next_arg%> called without an argument");
5039610b
SL
9807 return true;
9808 }
9809 else if (nargs > 1)
c22cacf3 9810 {
5039610b 9811 error ("wrong number of arguments to function %<__builtin_next_arg%>");
c22cacf3
MS
9812 return true;
9813 }
5039610b
SL
9814 arg = CALL_EXPR_ARG (exp, 0);
9815 }
9816
4e3825db
MM
9817 if (TREE_CODE (arg) == SSA_NAME)
9818 arg = SSA_NAME_VAR (arg);
9819
5039610b 9820 /* We destructively modify the call to be __builtin_va_start (ap, 0)
b8698a0f 9821 or __builtin_next_arg (0) the first time we see it, after checking
5039610b
SL
9822 the arguments and if needed issuing a warning. */
9823 if (!integer_zerop (arg))
9824 {
9825 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8870e212 9826
6de9cd9a
DN
9827 /* Strip off all nops for the sake of the comparison. This
9828 is not quite the same as STRIP_NOPS. It does more.
9829 We must also strip off INDIRECT_EXPR for C++ reference
9830 parameters. */
1043771b 9831 while (CONVERT_EXPR_P (arg)
6de9cd9a
DN
9832 || TREE_CODE (arg) == INDIRECT_REF)
9833 arg = TREE_OPERAND (arg, 0);
9834 if (arg != last_parm)
c22cacf3 9835 {
118f3b19
KH
9836 /* FIXME: Sometimes with the tree optimizers we can get the
9837 not the last argument even though the user used the last
9838 argument. We just warn and set the arg to be the last
9839 argument so that we will get wrong-code because of
9840 it. */
34c88790 9841 warning_at (current_location,
b9c8da34 9842 OPT_Wvarargs,
34c88790 9843 "second parameter of %<va_start%> not last named argument");
2efcfa4e 9844 }
2985f531
MLI
9845
9846 /* Undefined by C99 7.15.1.4p4 (va_start):
9847 "If the parameter parmN is declared with the register storage
9848 class, with a function or array type, or with a type that is
9849 not compatible with the type that results after application of
9850 the default argument promotions, the behavior is undefined."
9851 */
9852 else if (DECL_REGISTER (arg))
34c88790
DS
9853 {
9854 warning_at (current_location,
b9c8da34 9855 OPT_Wvarargs,
9c582551 9856 "undefined behavior when second parameter of "
34c88790
DS
9857 "%<va_start%> is declared with %<register%> storage");
9858 }
2985f531 9859
8870e212 9860 /* We want to verify the second parameter just once before the tree
c22cacf3
MS
9861 optimizers are run and then avoid keeping it in the tree,
9862 as otherwise we could warn even for correct code like:
9863 void foo (int i, ...)
9864 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
5039610b
SL
9865 if (va_start_p)
9866 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9867 else
9868 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
2efcfa4e
AP
9869 }
9870 return false;
6de9cd9a
DN
9871}
9872
9873
5039610b 9874/* Expand a call EXP to __builtin_object_size. */
10a0d495 9875
9b2b7279 9876static rtx
10a0d495
JJ
9877expand_builtin_object_size (tree exp)
9878{
9879 tree ost;
9880 int object_size_type;
9881 tree fndecl = get_callee_fndecl (exp);
10a0d495 9882
5039610b 9883 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10a0d495 9884 {
0f2c4a8f 9885 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
c94ed7a1 9886 exp, fndecl);
10a0d495
JJ
9887 expand_builtin_trap ();
9888 return const0_rtx;
9889 }
9890
5039610b 9891 ost = CALL_EXPR_ARG (exp, 1);
10a0d495
JJ
9892 STRIP_NOPS (ost);
9893
9894 if (TREE_CODE (ost) != INTEGER_CST
9895 || tree_int_cst_sgn (ost) < 0
9896 || compare_tree_int (ost, 3) > 0)
9897 {
0f2c4a8f 9898 error ("%Klast argument of %qD is not integer constant between 0 and 3",
c94ed7a1 9899 exp, fndecl);
10a0d495
JJ
9900 expand_builtin_trap ();
9901 return const0_rtx;
9902 }
9903
9439e9a1 9904 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
9905
9906 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9907}
9908
9909/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9910 FCODE is the BUILT_IN_* to use.
5039610b 9911 Return NULL_RTX if we failed; the caller should emit a normal call,
10a0d495
JJ
9912 otherwise try to get the result in TARGET, if convenient (and in
9913 mode MODE if that's convenient). */
9914
9915static rtx
ef4bddc2 9916expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10a0d495
JJ
9917 enum built_in_function fcode)
9918{
5039610b 9919 if (!validate_arglist (exp,
10a0d495
JJ
9920 POINTER_TYPE,
9921 fcode == BUILT_IN_MEMSET_CHK
9922 ? INTEGER_TYPE : POINTER_TYPE,
9923 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5039610b 9924 return NULL_RTX;
10a0d495 9925
cc8bea0a
MS
9926 tree dest = CALL_EXPR_ARG (exp, 0);
9927 tree src = CALL_EXPR_ARG (exp, 1);
9928 tree len = CALL_EXPR_ARG (exp, 2);
9929 tree size = CALL_EXPR_ARG (exp, 3);
10a0d495 9930
cc8bea0a
MS
9931 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
9932 /*str=*/NULL_TREE, size);
ee92e7ba
MS
9933
9934 if (!tree_fits_uhwi_p (size))
5039610b 9935 return NULL_RTX;
10a0d495 9936
cc269bb6 9937 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10a0d495 9938 {
ee92e7ba
MS
9939 /* Avoid transforming the checking call to an ordinary one when
9940 an overflow has been detected or when the call couldn't be
9941 validated because the size is not constant. */
9942 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9943 return NULL_RTX;
10a0d495 9944
ee92e7ba 9945 tree fn = NULL_TREE;
10a0d495
JJ
9946 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9947 mem{cpy,pcpy,move,set} is available. */
9948 switch (fcode)
9949 {
9950 case BUILT_IN_MEMCPY_CHK:
e79983f4 9951 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10a0d495
JJ
9952 break;
9953 case BUILT_IN_MEMPCPY_CHK:
e79983f4 9954 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10a0d495
JJ
9955 break;
9956 case BUILT_IN_MEMMOVE_CHK:
e79983f4 9957 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10a0d495
JJ
9958 break;
9959 case BUILT_IN_MEMSET_CHK:
e79983f4 9960 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10a0d495
JJ
9961 break;
9962 default:
9963 break;
9964 }
9965
9966 if (! fn)
5039610b 9967 return NULL_RTX;
10a0d495 9968
aa493694 9969 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
44e10129
MM
9970 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9971 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
9972 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9973 }
9974 else if (fcode == BUILT_IN_MEMSET_CHK)
5039610b 9975 return NULL_RTX;
10a0d495
JJ
9976 else
9977 {
0eb77834 9978 unsigned int dest_align = get_pointer_alignment (dest);
10a0d495
JJ
9979
9980 /* If DEST is not a pointer type, call the normal function. */
9981 if (dest_align == 0)
5039610b 9982 return NULL_RTX;
10a0d495
JJ
9983
9984 /* If SRC and DEST are the same (and not volatile), do nothing. */
9985 if (operand_equal_p (src, dest, 0))
9986 {
9987 tree expr;
9988
9989 if (fcode != BUILT_IN_MEMPCPY_CHK)
9990 {
9991 /* Evaluate and ignore LEN in case it has side-effects. */
9992 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9993 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9994 }
9995
5d49b6a7 9996 expr = fold_build_pointer_plus (dest, len);
10a0d495
JJ
9997 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9998 }
9999
10000 /* __memmove_chk special case. */
10001 if (fcode == BUILT_IN_MEMMOVE_CHK)
10002 {
0eb77834 10003 unsigned int src_align = get_pointer_alignment (src);
10a0d495
JJ
10004
10005 if (src_align == 0)
5039610b 10006 return NULL_RTX;
10a0d495
JJ
10007
10008 /* If src is categorized for a readonly section we can use
10009 normal __memcpy_chk. */
10010 if (readonly_data_expr (src))
10011 {
e79983f4 10012 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10a0d495 10013 if (!fn)
5039610b 10014 return NULL_RTX;
aa493694
JJ
10015 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10016 dest, src, len, size);
44e10129
MM
10017 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10018 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
10019 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10020 }
10021 }
5039610b 10022 return NULL_RTX;
10a0d495
JJ
10023 }
10024}
10025
10026/* Emit warning if a buffer overflow is detected at compile time. */
10027
10028static void
10029maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10030{
ee92e7ba
MS
10031 /* The source string. */
10032 tree srcstr = NULL_TREE;
10033 /* The size of the destination object. */
10034 tree objsize = NULL_TREE;
10035 /* The string that is being concatenated with (as in __strcat_chk)
10036 or null if it isn't. */
10037 tree catstr = NULL_TREE;
10038 /* The maximum length of the source sequence in a bounded operation
10039 (such as __strncat_chk) or null if the operation isn't bounded
10040 (such as __strcat_chk). */
cc8bea0a 10041 tree maxread = NULL_TREE;
9c1caf50
MS
10042 /* The exact size of the access (such as in __strncpy_chk). */
10043 tree size = NULL_TREE;
10a0d495
JJ
10044
10045 switch (fcode)
10046 {
10047 case BUILT_IN_STRCPY_CHK:
10048 case BUILT_IN_STPCPY_CHK:
ee92e7ba
MS
10049 srcstr = CALL_EXPR_ARG (exp, 1);
10050 objsize = CALL_EXPR_ARG (exp, 2);
10051 break;
10052
10a0d495 10053 case BUILT_IN_STRCAT_CHK:
ee92e7ba
MS
10054 /* For __strcat_chk the warning will be emitted only if overflowing
10055 by at least strlen (dest) + 1 bytes. */
10056 catstr = CALL_EXPR_ARG (exp, 0);
10057 srcstr = CALL_EXPR_ARG (exp, 1);
10058 objsize = CALL_EXPR_ARG (exp, 2);
10a0d495 10059 break;
ee92e7ba 10060
1c2fc017 10061 case BUILT_IN_STRNCAT_CHK:
ee92e7ba
MS
10062 catstr = CALL_EXPR_ARG (exp, 0);
10063 srcstr = CALL_EXPR_ARG (exp, 1);
cc8bea0a 10064 maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
10065 objsize = CALL_EXPR_ARG (exp, 3);
10066 break;
10067
10a0d495 10068 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 10069 case BUILT_IN_STPNCPY_CHK:
ee92e7ba 10070 srcstr = CALL_EXPR_ARG (exp, 1);
9c1caf50 10071 size = CALL_EXPR_ARG (exp, 2);
ee92e7ba 10072 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495 10073 break;
ee92e7ba 10074
10a0d495
JJ
10075 case BUILT_IN_SNPRINTF_CHK:
10076 case BUILT_IN_VSNPRINTF_CHK:
cc8bea0a 10077 maxread = CALL_EXPR_ARG (exp, 1);
ee92e7ba 10078 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495
JJ
10079 break;
10080 default:
10081 gcc_unreachable ();
10082 }
10083
cc8bea0a 10084 if (catstr && maxread)
10a0d495 10085 {
ee92e7ba
MS
10086 /* Check __strncat_chk. There is no way to determine the length
10087 of the string to which the source string is being appended so
10088 just warn when the length of the source string is not known. */
d9c5a8b9
MS
10089 check_strncat_sizes (exp, objsize);
10090 return;
10a0d495 10091 }
10a0d495 10092
cc8bea0a
MS
10093 /* The destination argument is the first one for all built-ins above. */
10094 tree dst = CALL_EXPR_ARG (exp, 0);
10095
10096 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10a0d495
JJ
10097}
10098
10099/* Emit warning if a buffer overflow is detected at compile time
10100 in __sprintf_chk/__vsprintf_chk calls. */
10101
10102static void
10103maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10104{
451409e4 10105 tree size, len, fmt;
10a0d495 10106 const char *fmt_str;
5039610b 10107 int nargs = call_expr_nargs (exp);
10a0d495
JJ
10108
10109 /* Verify the required arguments in the original call. */
b8698a0f 10110
5039610b 10111 if (nargs < 4)
10a0d495 10112 return;
5039610b
SL
10113 size = CALL_EXPR_ARG (exp, 2);
10114 fmt = CALL_EXPR_ARG (exp, 3);
10a0d495 10115
cc269bb6 10116 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10a0d495
JJ
10117 return;
10118
10119 /* Check whether the format is a literal string constant. */
10120 fmt_str = c_getstr (fmt);
10121 if (fmt_str == NULL)
10122 return;
10123
62e5bf5d 10124 if (!init_target_chars ())
000ba23d
KG
10125 return;
10126
10a0d495 10127 /* If the format doesn't contain % args or %%, we know its size. */
000ba23d 10128 if (strchr (fmt_str, target_percent) == 0)
10a0d495
JJ
10129 len = build_int_cstu (size_type_node, strlen (fmt_str));
10130 /* If the format is "%s" and first ... argument is a string literal,
10131 we know it too. */
5039610b
SL
10132 else if (fcode == BUILT_IN_SPRINTF_CHK
10133 && strcmp (fmt_str, target_percent_s) == 0)
10a0d495
JJ
10134 {
10135 tree arg;
10136
5039610b 10137 if (nargs < 5)
10a0d495 10138 return;
5039610b 10139 arg = CALL_EXPR_ARG (exp, 4);
10a0d495
JJ
10140 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10141 return;
10142
10143 len = c_strlen (arg, 1);
cc269bb6 10144 if (!len || ! tree_fits_uhwi_p (len))
10a0d495
JJ
10145 return;
10146 }
10147 else
10148 return;
10149
ee92e7ba
MS
10150 /* Add one for the terminating nul. */
10151 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
cc8bea0a
MS
10152
10153 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10154 /*maxread=*/NULL_TREE, len, size);
10a0d495
JJ
10155}
10156
f9555f40
JJ
10157/* Emit warning if a free is called with address of a variable. */
10158
10159static void
10160maybe_emit_free_warning (tree exp)
10161{
10162 tree arg = CALL_EXPR_ARG (exp, 0);
10163
10164 STRIP_NOPS (arg);
10165 if (TREE_CODE (arg) != ADDR_EXPR)
10166 return;
10167
10168 arg = get_base_address (TREE_OPERAND (arg, 0));
70f34814 10169 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
f9555f40
JJ
10170 return;
10171
10172 if (SSA_VAR_P (arg))
a3a704a4
MH
10173 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10174 "%Kattempt to free a non-heap object %qD", exp, arg);
f9555f40 10175 else
a3a704a4
MH
10176 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10177 "%Kattempt to free a non-heap object", exp);
f9555f40
JJ
10178}
10179
5039610b
SL
10180/* Fold a call to __builtin_object_size with arguments PTR and OST,
10181 if possible. */
10a0d495 10182
9b2b7279 10183static tree
5039610b 10184fold_builtin_object_size (tree ptr, tree ost)
10a0d495 10185{
88e06841 10186 unsigned HOST_WIDE_INT bytes;
10a0d495
JJ
10187 int object_size_type;
10188
5039610b
SL
10189 if (!validate_arg (ptr, POINTER_TYPE)
10190 || !validate_arg (ost, INTEGER_TYPE))
10191 return NULL_TREE;
10a0d495 10192
10a0d495
JJ
10193 STRIP_NOPS (ost);
10194
10195 if (TREE_CODE (ost) != INTEGER_CST
10196 || tree_int_cst_sgn (ost) < 0
10197 || compare_tree_int (ost, 3) > 0)
5039610b 10198 return NULL_TREE;
10a0d495 10199
9439e9a1 10200 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
10201
10202 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10203 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10204 and (size_t) 0 for types 2 and 3. */
10205 if (TREE_SIDE_EFFECTS (ptr))
2ac7cbb5 10206 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10a0d495
JJ
10207
10208 if (TREE_CODE (ptr) == ADDR_EXPR)
88e06841 10209 {
05a64756 10210 compute_builtin_object_size (ptr, object_size_type, &bytes);
807e902e 10211 if (wi::fits_to_tree_p (bytes, size_type_node))
88e06841
AS
10212 return build_int_cstu (size_type_node, bytes);
10213 }
10a0d495
JJ
10214 else if (TREE_CODE (ptr) == SSA_NAME)
10215 {
10a0d495
JJ
10216 /* If object size is not known yet, delay folding until
10217 later. Maybe subsequent passes will help determining
10218 it. */
05a64756
MS
10219 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10220 && wi::fits_to_tree_p (bytes, size_type_node))
88e06841 10221 return build_int_cstu (size_type_node, bytes);
10a0d495
JJ
10222 }
10223
88e06841 10224 return NULL_TREE;
10a0d495
JJ
10225}
10226
903c723b
TC
10227/* Builtins with folding operations that operate on "..." arguments
10228 need special handling; we need to store the arguments in a convenient
10229 data structure before attempting any folding. Fortunately there are
10230 only a few builtins that fall into this category. FNDECL is the
10231 function, EXP is the CALL_EXPR for the call. */
10232
10233static tree
10234fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10235{
10236 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10237 tree ret = NULL_TREE;
10238
10239 switch (fcode)
10240 {
10241 case BUILT_IN_FPCLASSIFY:
10242 ret = fold_builtin_fpclassify (loc, args, nargs);
10243 break;
10244
10245 default:
10246 break;
10247 }
10248 if (ret)
10249 {
10250 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10251 SET_EXPR_LOCATION (ret, loc);
10252 TREE_NO_WARNING (ret) = 1;
10253 return ret;
10254 }
10255 return NULL_TREE;
10256}
10257
000ba23d
KG
10258/* Initialize format string characters in the target charset. */
10259
fef5a0d9 10260bool
000ba23d
KG
10261init_target_chars (void)
10262{
10263 static bool init;
10264 if (!init)
10265 {
10266 target_newline = lang_hooks.to_target_charset ('\n');
10267 target_percent = lang_hooks.to_target_charset ('%');
10268 target_c = lang_hooks.to_target_charset ('c');
10269 target_s = lang_hooks.to_target_charset ('s');
10270 if (target_newline == 0 || target_percent == 0 || target_c == 0
10271 || target_s == 0)
10272 return false;
10273
10274 target_percent_c[0] = target_percent;
10275 target_percent_c[1] = target_c;
10276 target_percent_c[2] = '\0';
10277
10278 target_percent_s[0] = target_percent;
10279 target_percent_s[1] = target_s;
10280 target_percent_s[2] = '\0';
10281
10282 target_percent_s_newline[0] = target_percent;
10283 target_percent_s_newline[1] = target_s;
10284 target_percent_s_newline[2] = target_newline;
10285 target_percent_s_newline[3] = '\0';
c22cacf3 10286
000ba23d
KG
10287 init = true;
10288 }
10289 return true;
10290}
1f3f1f68 10291
4413d881
KG
10292/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10293 and no overflow/underflow occurred. INEXACT is true if M was not
2f8e468b 10294 exactly calculated. TYPE is the tree type for the result. This
4413d881
KG
10295 function assumes that you cleared the MPFR flags and then
10296 calculated M to see if anything subsequently set a flag prior to
10297 entering this function. Return NULL_TREE if any checks fail. */
10298
10299static tree
62e5bf5d 10300do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
4413d881
KG
10301{
10302 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10303 overflow/underflow occurred. If -frounding-math, proceed iff the
10304 result of calling FUNC was exact. */
62e5bf5d 10305 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
4413d881
KG
10306 && (!flag_rounding_math || !inexact))
10307 {
10308 REAL_VALUE_TYPE rr;
10309
205a4d09 10310 real_from_mpfr (&rr, m, type, GMP_RNDN);
4413d881
KG
10311 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10312 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10313 but the mpft_t is not, then we underflowed in the
10314 conversion. */
4c8c70e0 10315 if (real_isfinite (&rr)
4413d881
KG
10316 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10317 {
10318 REAL_VALUE_TYPE rmode;
10319
10320 real_convert (&rmode, TYPE_MODE (type), &rr);
10321 /* Proceed iff the specified mode can hold the value. */
10322 if (real_identical (&rmode, &rr))
10323 return build_real (type, rmode);
10324 }
10325 }
10326 return NULL_TREE;
10327}
10328
c128599a
KG
10329/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10330 number and no overflow/underflow occurred. INEXACT is true if M
10331 was not exactly calculated. TYPE is the tree type for the result.
10332 This function assumes that you cleared the MPFR flags and then
10333 calculated M to see if anything subsequently set a flag prior to
ca75b926
KG
10334 entering this function. Return NULL_TREE if any checks fail, if
10335 FORCE_CONVERT is true, then bypass the checks. */
c128599a
KG
10336
10337static tree
ca75b926 10338do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
c128599a
KG
10339{
10340 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10341 overflow/underflow occurred. If -frounding-math, proceed iff the
10342 result of calling FUNC was exact. */
ca75b926
KG
10343 if (force_convert
10344 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10345 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10346 && (!flag_rounding_math || !inexact)))
c128599a
KG
10347 {
10348 REAL_VALUE_TYPE re, im;
10349
14aa6352
DE
10350 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10351 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
c128599a
KG
10352 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10353 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10354 but the mpft_t is not, then we underflowed in the
10355 conversion. */
ca75b926
KG
10356 if (force_convert
10357 || (real_isfinite (&re) && real_isfinite (&im)
10358 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10359 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
c128599a
KG
10360 {
10361 REAL_VALUE_TYPE re_mode, im_mode;
10362
10363 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10364 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10365 /* Proceed iff the specified mode can hold the value. */
ca75b926
KG
10366 if (force_convert
10367 || (real_identical (&re_mode, &re)
10368 && real_identical (&im_mode, &im)))
c128599a
KG
10369 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10370 build_real (TREE_TYPE (type), im_mode));
10371 }
10372 }
10373 return NULL_TREE;
10374}
c128599a 10375
ea91f957
KG
10376/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10377 the pointer *(ARG_QUO) and return the result. The type is taken
10378 from the type of ARG0 and is used for setting the precision of the
10379 calculation and results. */
10380
10381static tree
10382do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10383{
10384 tree const type = TREE_TYPE (arg0);
10385 tree result = NULL_TREE;
b8698a0f 10386
ea91f957
KG
10387 STRIP_NOPS (arg0);
10388 STRIP_NOPS (arg1);
b8698a0f 10389
ea91f957
KG
10390 /* To proceed, MPFR must exactly represent the target floating point
10391 format, which only happens when the target base equals two. */
10392 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10393 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10394 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10395 {
10396 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10397 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10398
4c8c70e0 10399 if (real_isfinite (ra0) && real_isfinite (ra1))
ea91f957 10400 {
3e479de3
UW
10401 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10402 const int prec = fmt->p;
10403 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
ea91f957
KG
10404 tree result_rem;
10405 long integer_quo;
10406 mpfr_t m0, m1;
10407
10408 mpfr_inits2 (prec, m0, m1, NULL);
10409 mpfr_from_real (m0, ra0, GMP_RNDN);
10410 mpfr_from_real (m1, ra1, GMP_RNDN);
10411 mpfr_clear_flags ();
3e479de3 10412 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
ea91f957
KG
10413 /* Remquo is independent of the rounding mode, so pass
10414 inexact=0 to do_mpfr_ckconv(). */
10415 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10416 mpfr_clears (m0, m1, NULL);
10417 if (result_rem)
10418 {
10419 /* MPFR calculates quo in the host's long so it may
10420 return more bits in quo than the target int can hold
10421 if sizeof(host long) > sizeof(target int). This can
10422 happen even for native compilers in LP64 mode. In
10423 these cases, modulo the quo value with the largest
10424 number that the target int can hold while leaving one
10425 bit for the sign. */
10426 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10427 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10428
10429 /* Dereference the quo pointer argument. */
10430 arg_quo = build_fold_indirect_ref (arg_quo);
10431 /* Proceed iff a valid pointer type was passed in. */
10432 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10433 {
10434 /* Set the value. */
45a2c477
RG
10435 tree result_quo
10436 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10437 build_int_cst (TREE_TYPE (arg_quo),
10438 integer_quo));
ea91f957
KG
10439 TREE_SIDE_EFFECTS (result_quo) = 1;
10440 /* Combine the quo assignment with the rem. */
10441 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10442 result_quo, result_rem));
10443 }
10444 }
10445 }
10446 }
10447 return result;
10448}
752b7d38
KG
10449
10450/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10451 resulting value as a tree with type TYPE. The mpfr precision is
10452 set to the precision of TYPE. We assume that this mpfr function
10453 returns zero if the result could be calculated exactly within the
10454 requested precision. In addition, the integer pointer represented
10455 by ARG_SG will be dereferenced and set to the appropriate signgam
10456 (-1,1) value. */
10457
10458static tree
10459do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10460{
10461 tree result = NULL_TREE;
10462
10463 STRIP_NOPS (arg);
b8698a0f 10464
752b7d38
KG
10465 /* To proceed, MPFR must exactly represent the target floating point
10466 format, which only happens when the target base equals two. Also
10467 verify ARG is a constant and that ARG_SG is an int pointer. */
10468 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10469 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10470 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10471 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10472 {
10473 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10474
10475 /* In addition to NaN and Inf, the argument cannot be zero or a
10476 negative integer. */
4c8c70e0 10477 if (real_isfinite (ra)
752b7d38 10478 && ra->cl != rvc_zero
c3284718 10479 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
752b7d38 10480 {
3e479de3
UW
10481 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10482 const int prec = fmt->p;
10483 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
752b7d38
KG
10484 int inexact, sg;
10485 mpfr_t m;
10486 tree result_lg;
10487
10488 mpfr_init2 (m, prec);
10489 mpfr_from_real (m, ra, GMP_RNDN);
10490 mpfr_clear_flags ();
3e479de3 10491 inexact = mpfr_lgamma (m, &sg, m, rnd);
752b7d38
KG
10492 result_lg = do_mpfr_ckconv (m, type, inexact);
10493 mpfr_clear (m);
10494 if (result_lg)
10495 {
10496 tree result_sg;
10497
10498 /* Dereference the arg_sg pointer argument. */
10499 arg_sg = build_fold_indirect_ref (arg_sg);
10500 /* Assign the signgam value into *arg_sg. */
10501 result_sg = fold_build2 (MODIFY_EXPR,
10502 TREE_TYPE (arg_sg), arg_sg,
45a2c477 10503 build_int_cst (TREE_TYPE (arg_sg), sg));
752b7d38
KG
10504 TREE_SIDE_EFFECTS (result_sg) = 1;
10505 /* Combine the signgam assignment with the lgamma result. */
10506 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10507 result_sg, result_lg));
10508 }
10509 }
10510 }
10511
10512 return result;
10513}
726a989a 10514
a41d064d
KG
10515/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10516 mpc function FUNC on it and return the resulting value as a tree
10517 with type TYPE. The mpfr precision is set to the precision of
10518 TYPE. We assume that function FUNC returns zero if the result
ca75b926
KG
10519 could be calculated exactly within the requested precision. If
10520 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10521 in the arguments and/or results. */
a41d064d 10522
2f440f6a 10523tree
ca75b926 10524do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
a41d064d
KG
10525 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10526{
10527 tree result = NULL_TREE;
b8698a0f 10528
a41d064d
KG
10529 STRIP_NOPS (arg0);
10530 STRIP_NOPS (arg1);
10531
10532 /* To proceed, MPFR must exactly represent the target floating point
10533 format, which only happens when the target base equals two. */
10534 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10535 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10536 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10537 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10538 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10539 {
10540 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10541 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10542 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10543 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10544
ca75b926
KG
10545 if (do_nonfinite
10546 || (real_isfinite (re0) && real_isfinite (im0)
10547 && real_isfinite (re1) && real_isfinite (im1)))
a41d064d
KG
10548 {
10549 const struct real_format *const fmt =
10550 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10551 const int prec = fmt->p;
10552 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10553 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10554 int inexact;
10555 mpc_t m0, m1;
b8698a0f 10556
a41d064d
KG
10557 mpc_init2 (m0, prec);
10558 mpc_init2 (m1, prec);
c3284718
RS
10559 mpfr_from_real (mpc_realref (m0), re0, rnd);
10560 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10561 mpfr_from_real (mpc_realref (m1), re1, rnd);
10562 mpfr_from_real (mpc_imagref (m1), im1, rnd);
a41d064d
KG
10563 mpfr_clear_flags ();
10564 inexact = func (m0, m0, m1, crnd);
ca75b926 10565 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
a41d064d
KG
10566 mpc_clear (m0);
10567 mpc_clear (m1);
10568 }
10569 }
10570
10571 return result;
10572}
c128599a 10573
726a989a
RB
10574/* A wrapper function for builtin folding that prevents warnings for
10575 "statement without effect" and the like, caused by removing the
10576 call node earlier than the warning is generated. */
10577
10578tree
538dd0b7 10579fold_call_stmt (gcall *stmt, bool ignore)
726a989a
RB
10580{
10581 tree ret = NULL_TREE;
10582 tree fndecl = gimple_call_fndecl (stmt);
db3927fb 10583 location_t loc = gimple_location (stmt);
726a989a
RB
10584 if (fndecl
10585 && TREE_CODE (fndecl) == FUNCTION_DECL
10586 && DECL_BUILT_IN (fndecl)
10587 && !gimple_call_va_arg_pack_p (stmt))
10588 {
10589 int nargs = gimple_call_num_args (stmt);
8897c9ce
NF
10590 tree *args = (nargs > 0
10591 ? gimple_call_arg_ptr (stmt, 0)
10592 : &error_mark_node);
726a989a 10593
0889e9bc
JJ
10594 if (avoid_folding_inline_builtin (fndecl))
10595 return NULL_TREE;
726a989a
RB
10596 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10597 {
8897c9ce 10598 return targetm.fold_builtin (fndecl, nargs, args, ignore);
726a989a
RB
10599 }
10600 else
10601 {
a6a0570f 10602 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
726a989a
RB
10603 if (ret)
10604 {
10605 /* Propagate location information from original call to
10606 expansion of builtin. Otherwise things like
10607 maybe_emit_chk_warning, that operate on the expansion
10608 of a builtin, will use the wrong location information. */
10609 if (gimple_has_location (stmt))
10610 {
10611 tree realret = ret;
10612 if (TREE_CODE (ret) == NOP_EXPR)
10613 realret = TREE_OPERAND (ret, 0);
10614 if (CAN_HAVE_LOCATION_P (realret)
10615 && !EXPR_HAS_LOCATION (realret))
db3927fb 10616 SET_EXPR_LOCATION (realret, loc);
726a989a
RB
10617 return realret;
10618 }
10619 return ret;
10620 }
10621 }
10622 }
10623 return NULL_TREE;
10624}
d7f09764 10625
e79983f4 10626/* Look up the function in builtin_decl that corresponds to DECL
d7f09764
DN
10627 and set ASMSPEC as its user assembler name. DECL must be a
10628 function decl that declares a builtin. */
10629
10630void
10631set_builtin_user_assembler_name (tree decl, const char *asmspec)
10632{
d7f09764
DN
10633 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10634 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10635 && asmspec != 0);
10636
ee516de9 10637 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
ce835863 10638 set_user_assembler_name (builtin, asmspec);
ee516de9
EB
10639
10640 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10641 && INT_TYPE_SIZE < BITS_PER_WORD)
d7f09764 10642 {
fffbab82 10643 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
ee516de9 10644 set_user_assembler_libfunc ("ffs", asmspec);
fffbab82 10645 set_optab_libfunc (ffs_optab, mode, "ffs");
d7f09764
DN
10646 }
10647}
bec922f0
SL
10648
10649/* Return true if DECL is a builtin that expands to a constant or similarly
10650 simple code. */
10651bool
10652is_simple_builtin (tree decl)
10653{
10654 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10655 switch (DECL_FUNCTION_CODE (decl))
10656 {
10657 /* Builtins that expand to constants. */
10658 case BUILT_IN_CONSTANT_P:
10659 case BUILT_IN_EXPECT:
10660 case BUILT_IN_OBJECT_SIZE:
10661 case BUILT_IN_UNREACHABLE:
10662 /* Simple register moves or loads from stack. */
45d439ac 10663 case BUILT_IN_ASSUME_ALIGNED:
bec922f0
SL
10664 case BUILT_IN_RETURN_ADDRESS:
10665 case BUILT_IN_EXTRACT_RETURN_ADDR:
10666 case BUILT_IN_FROB_RETURN_ADDR:
10667 case BUILT_IN_RETURN:
10668 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10669 case BUILT_IN_FRAME_ADDRESS:
10670 case BUILT_IN_VA_END:
10671 case BUILT_IN_STACK_SAVE:
10672 case BUILT_IN_STACK_RESTORE:
10673 /* Exception state returns or moves registers around. */
10674 case BUILT_IN_EH_FILTER:
10675 case BUILT_IN_EH_POINTER:
10676 case BUILT_IN_EH_COPY_VALUES:
10677 return true;
10678
10679 default:
10680 return false;
10681 }
10682
10683 return false;
10684}
10685
10686/* Return true if DECL is a builtin that is not expensive, i.e., they are
10687 most probably expanded inline into reasonably simple code. This is a
10688 superset of is_simple_builtin. */
10689bool
10690is_inexpensive_builtin (tree decl)
10691{
10692 if (!decl)
10693 return false;
10694 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10695 return true;
10696 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10697 switch (DECL_FUNCTION_CODE (decl))
10698 {
10699 case BUILT_IN_ABS:
9e878cf1 10700 CASE_BUILT_IN_ALLOCA:
ac868f29 10701 case BUILT_IN_BSWAP16:
bec922f0
SL
10702 case BUILT_IN_BSWAP32:
10703 case BUILT_IN_BSWAP64:
10704 case BUILT_IN_CLZ:
10705 case BUILT_IN_CLZIMAX:
10706 case BUILT_IN_CLZL:
10707 case BUILT_IN_CLZLL:
10708 case BUILT_IN_CTZ:
10709 case BUILT_IN_CTZIMAX:
10710 case BUILT_IN_CTZL:
10711 case BUILT_IN_CTZLL:
10712 case BUILT_IN_FFS:
10713 case BUILT_IN_FFSIMAX:
10714 case BUILT_IN_FFSL:
10715 case BUILT_IN_FFSLL:
10716 case BUILT_IN_IMAXABS:
10717 case BUILT_IN_FINITE:
10718 case BUILT_IN_FINITEF:
10719 case BUILT_IN_FINITEL:
10720 case BUILT_IN_FINITED32:
10721 case BUILT_IN_FINITED64:
10722 case BUILT_IN_FINITED128:
10723 case BUILT_IN_FPCLASSIFY:
10724 case BUILT_IN_ISFINITE:
10725 case BUILT_IN_ISINF_SIGN:
10726 case BUILT_IN_ISINF:
10727 case BUILT_IN_ISINFF:
10728 case BUILT_IN_ISINFL:
10729 case BUILT_IN_ISINFD32:
10730 case BUILT_IN_ISINFD64:
10731 case BUILT_IN_ISINFD128:
10732 case BUILT_IN_ISNAN:
10733 case BUILT_IN_ISNANF:
10734 case BUILT_IN_ISNANL:
10735 case BUILT_IN_ISNAND32:
10736 case BUILT_IN_ISNAND64:
10737 case BUILT_IN_ISNAND128:
10738 case BUILT_IN_ISNORMAL:
10739 case BUILT_IN_ISGREATER:
10740 case BUILT_IN_ISGREATEREQUAL:
10741 case BUILT_IN_ISLESS:
10742 case BUILT_IN_ISLESSEQUAL:
10743 case BUILT_IN_ISLESSGREATER:
10744 case BUILT_IN_ISUNORDERED:
10745 case BUILT_IN_VA_ARG_PACK:
10746 case BUILT_IN_VA_ARG_PACK_LEN:
10747 case BUILT_IN_VA_COPY:
10748 case BUILT_IN_TRAP:
10749 case BUILT_IN_SAVEREGS:
10750 case BUILT_IN_POPCOUNTL:
10751 case BUILT_IN_POPCOUNTLL:
10752 case BUILT_IN_POPCOUNTIMAX:
10753 case BUILT_IN_POPCOUNT:
10754 case BUILT_IN_PARITYL:
10755 case BUILT_IN_PARITYLL:
10756 case BUILT_IN_PARITYIMAX:
10757 case BUILT_IN_PARITY:
10758 case BUILT_IN_LABS:
10759 case BUILT_IN_LLABS:
10760 case BUILT_IN_PREFETCH:
41dbbb37 10761 case BUILT_IN_ACC_ON_DEVICE:
bec922f0
SL
10762 return true;
10763
10764 default:
10765 return is_simple_builtin (decl);
10766 }
10767
10768 return false;
10769}
488c6247
ML
10770
10771/* Return true if T is a constant and the value cast to a target char
10772 can be represented by a host char.
10773 Store the casted char constant in *P if so. */
10774
10775bool
10776target_char_cst_p (tree t, char *p)
10777{
10778 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10779 return false;
10780
10781 *p = (char)tree_to_uhwi (t);
10782 return true;
10783}
cc8bea0a
MS
10784
10785/* Return the maximum object size. */
10786
10787tree
10788max_object_size (void)
10789{
10790 /* To do: Make this a configurable parameter. */
10791 return TYPE_MAX_VALUE (ptrdiff_type_node);
10792}