]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
Daily bump.
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
28f4ec01 1/* Expand builtin functions.
85ec4feb 2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
28f4ec01 3
1322177d 4This file is part of GCC.
28f4ec01 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
28f4ec01 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
28f4ec01
BS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
28f4ec01 19
25ab3b0a
RB
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
28f4ec01
BS
24#include "config.h"
25#include "system.h"
4977bab6 26#include "coretypes.h"
c7131fb2 27#include "backend.h"
957060b5
AM
28#include "target.h"
29#include "rtl.h"
c7131fb2 30#include "tree.h"
e73cf9a2 31#include "memmodel.h"
c7131fb2 32#include "gimple.h"
957060b5 33#include "predict.h"
b2272b13 34#include "params.h"
957060b5
AM
35#include "tm_p.h"
36#include "stringpool.h"
f90aa46c 37#include "tree-vrp.h"
957060b5
AM
38#include "tree-ssanames.h"
39#include "expmed.h"
40#include "optabs.h"
957060b5
AM
41#include "emit-rtl.h"
42#include "recog.h"
957060b5 43#include "diagnostic-core.h"
40e23961 44#include "alias.h"
40e23961 45#include "fold-const.h"
5c1a2e63 46#include "fold-const-call.h"
cc8bea0a 47#include "gimple-ssa-warn-restrict.h"
d8a2d370
DN
48#include "stor-layout.h"
49#include "calls.h"
50#include "varasm.h"
51#include "tree-object-size.h"
d49b6e1e 52#include "realmpfr.h"
60393bbc 53#include "cfgrtl.h"
28f4ec01 54#include "except.h"
36566b39
PK
55#include "dojump.h"
56#include "explow.h"
36566b39 57#include "stmt.h"
28f4ec01 58#include "expr.h"
e78d8e51 59#include "libfuncs.h"
28f4ec01
BS
60#include "output.h"
61#include "typeclass.h"
ab393bf1 62#include "langhooks.h"
079a182e 63#include "value-prof.h"
fa19795e 64#include "builtins.h"
314e6352
ML
65#include "stringpool.h"
66#include "attribs.h"
bdea98ca 67#include "asan.h"
686ee971 68#include "internal-fn.h"
b03ff92e 69#include "case-cfn-macros.h"
44a845ca 70#include "gimple-fold.h"
ee92e7ba 71#include "intl.h"
7365279f 72#include "file-prefix-map.h" /* remap_macro_filename() */
1f62d637
TV
73#include "gomp-constants.h"
74#include "omp-general.h"
81f5094d 75
fa19795e
RS
76struct target_builtins default_target_builtins;
77#if SWITCHABLE_TARGET
78struct target_builtins *this_target_builtins = &default_target_builtins;
79#endif
80
9df2c88c 81/* Define the names of the builtin function types and codes. */
5e351e96 82const char *const built_in_class_names[BUILT_IN_LAST]
9df2c88c
RK
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84
c6a912da 85#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
7e7e470f 86const char * built_in_names[(int) END_BUILTINS] =
cb1072f4
KG
87{
88#include "builtins.def"
89};
9df2c88c 90
cbf5d0e7 91/* Setup an array of builtin_info_type, make sure each element decl is
3ff5f682 92 initialized to NULL_TREE. */
cbf5d0e7 93builtin_info_type builtin_info[(int)END_BUILTINS];
3ff5f682 94
4e7d7b3d
JJ
95/* Non-zero if __builtin_constant_p should be folded right away. */
96bool force_folding_builtin_constant_p;
97
095a2d76 98static rtx c_readstr (const char *, scalar_int_mode);
4682ae04 99static int target_char_cast (tree, char *);
435bb2a1 100static rtx get_memory_rtx (tree, tree);
4682ae04
AJ
101static int apply_args_size (void);
102static int apply_result_size (void);
4682ae04 103static rtx result_vector (int, rtx);
4682ae04
AJ
104static void expand_builtin_prefetch (tree);
105static rtx expand_builtin_apply_args (void);
106static rtx expand_builtin_apply_args_1 (void);
107static rtx expand_builtin_apply (rtx, rtx, rtx);
108static void expand_builtin_return (rtx);
109static enum type_class type_to_class (tree);
110static rtx expand_builtin_classify_type (tree);
6c7cf1f0 111static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
1b1562a5 112static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
4359dc2a 113static rtx expand_builtin_interclass_mathfn (tree, rtx);
403e54f0 114static rtx expand_builtin_sincos (tree);
4359dc2a 115static rtx expand_builtin_cexpi (tree, rtx);
1856c8dc
JH
116static rtx expand_builtin_int_roundingfn (tree, rtx);
117static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
8870e212 118static rtx expand_builtin_next_arg (void);
4682ae04
AJ
119static rtx expand_builtin_va_start (tree);
120static rtx expand_builtin_va_end (tree);
121static rtx expand_builtin_va_copy (tree);
523a59ff 122static rtx inline_expand_builtin_string_cmp (tree, rtx);
44e10129 123static rtx expand_builtin_strcmp (tree, rtx);
ef4bddc2 124static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
095a2d76 125static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
d9c5a8b9 126static rtx expand_builtin_memchr (tree, rtx);
44e10129 127static rtx expand_builtin_memcpy (tree, rtx);
671a00ee
ML
128static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
e50d56a5 130static rtx expand_builtin_memmove (tree, rtx);
671a00ee 131static rtx expand_builtin_mempcpy (tree, rtx);
671a00ee 132static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
ee92e7ba 133static rtx expand_builtin_strcat (tree, rtx);
44e10129
MM
134static rtx expand_builtin_strcpy (tree, rtx);
135static rtx expand_builtin_strcpy_args (tree, tree, rtx);
ef4bddc2 136static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
e50d56a5 137static rtx expand_builtin_stpncpy (tree, rtx);
ee92e7ba 138static rtx expand_builtin_strncat (tree, rtx);
44e10129 139static rtx expand_builtin_strncpy (tree, rtx);
095a2d76 140static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
ef4bddc2
RS
141static rtx expand_builtin_memset (tree, rtx, machine_mode);
142static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
4682ae04 143static rtx expand_builtin_bzero (tree);
ef4bddc2 144static rtx expand_builtin_strlen (tree, rtx, machine_mode);
781ff3d8 145static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
b7e52782 146static rtx expand_builtin_alloca (tree);
ef4bddc2 147static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
4682ae04 148static rtx expand_builtin_frame_address (tree, tree);
db3927fb 149static tree stabilize_va_list_loc (location_t, tree, int);
4682ae04
AJ
150static rtx expand_builtin_expect (tree, rtx);
151static tree fold_builtin_constant_p (tree);
152static tree fold_builtin_classify_type (tree);
ab996409 153static tree fold_builtin_strlen (location_t, tree, tree);
db3927fb 154static tree fold_builtin_inf (location_t, tree, int);
db3927fb 155static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
0dba7960 156static bool validate_arg (const_tree, enum tree_code code);
4682ae04 157static rtx expand_builtin_fabs (tree, rtx, rtx);
ef79730c 158static rtx expand_builtin_signbit (tree, rtx);
db3927fb 159static tree fold_builtin_memcmp (location_t, tree, tree, tree);
db3927fb
AH
160static tree fold_builtin_isascii (location_t, tree);
161static tree fold_builtin_toascii (location_t, tree);
162static tree fold_builtin_isdigit (location_t, tree);
163static tree fold_builtin_fabs (location_t, tree, tree);
164static tree fold_builtin_abs (location_t, tree, tree);
165static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
a35da91f 166 enum tree_code);
2625bb5d
RB
167static tree fold_builtin_0 (location_t, tree);
168static tree fold_builtin_1 (location_t, tree, tree);
169static tree fold_builtin_2 (location_t, tree, tree, tree);
170static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
903c723b 171static tree fold_builtin_varargs (location_t, tree, tree*, int);
db3927fb
AH
172
173static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
db3927fb
AH
174static tree fold_builtin_strspn (location_t, tree, tree);
175static tree fold_builtin_strcspn (location_t, tree, tree);
6de9cd9a 176
10a0d495 177static rtx expand_builtin_object_size (tree);
ef4bddc2 178static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
10a0d495
JJ
179 enum built_in_function);
180static void maybe_emit_chk_warning (tree, enum built_in_function);
181static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
f9555f40 182static void maybe_emit_free_warning (tree);
5039610b 183static tree fold_builtin_object_size (tree, tree);
000ba23d 184
ad03a744 185unsigned HOST_WIDE_INT target_newline;
fef5a0d9 186unsigned HOST_WIDE_INT target_percent;
000ba23d
KG
187static unsigned HOST_WIDE_INT target_c;
188static unsigned HOST_WIDE_INT target_s;
edd7ae68 189char target_percent_c[3];
fef5a0d9 190char target_percent_s[3];
ad03a744 191char target_percent_s_newline[4];
ea91f957 192static tree do_mpfr_remquo (tree, tree, tree);
752b7d38 193static tree do_mpfr_lgamma_r (tree, tree, tree);
86951993 194static void expand_builtin_sync_synchronize (void);
10a0d495 195
d7f09764
DN
196/* Return true if NAME starts with __builtin_ or __sync_. */
197
0c1e7e42 198static bool
bbf7ce11 199is_builtin_name (const char *name)
48ae6c13 200{
48ae6c13
RH
201 if (strncmp (name, "__builtin_", 10) == 0)
202 return true;
203 if (strncmp (name, "__sync_", 7) == 0)
204 return true;
86951993
AM
205 if (strncmp (name, "__atomic_", 9) == 0)
206 return true;
48ae6c13
RH
207 return false;
208}
6de9cd9a 209
d7f09764
DN
210
211/* Return true if DECL is a function symbol representing a built-in. */
212
213bool
214is_builtin_fn (tree decl)
215{
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
217}
218
bbf7ce11
RAE
219/* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
222
4cfe7a6c 223bool
bbf7ce11
RAE
224called_as_built_in (tree node)
225{
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
231}
232
644ffefd
MJ
233/* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
73f6eabc
RS
237
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
b0f4a35f 243 whereas foo() itself starts on an even address.
df96b059 244
b0f4a35f
RG
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
247
248static bool
249get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
df96b059 251{
7df9b6f1 252 poly_int64 bitsize, bitpos;
e80c2726 253 tree offset;
ef4bddc2 254 machine_mode mode;
ee45a32d 255 int unsignedp, reversep, volatilep;
eae76e53 256 unsigned int align = BITS_PER_UNIT;
644ffefd 257 bool known_alignment = false;
df96b059 258
e80c2726
RG
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
ee45a32d 261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
25b75a48 262 &unsignedp, &reversep, &volatilep);
e80c2726
RG
263
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
b0f4a35f 266 if (TREE_CODE (exp) == FUNCTION_DECL)
73f6eabc 267 {
b0f4a35f
RG
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
73f6eabc 274 }
b0f4a35f
RG
275 else if (TREE_CODE (exp) == LABEL_DECL)
276 ;
277 else if (TREE_CODE (exp) == CONST_DECL)
e80c2726 278 {
b0f4a35f
RG
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
e80c2726 281 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 282 if (CONSTANT_CLASS_P (exp))
58e17cf8 283 align = targetm.constant_alignment (exp, align);
6b00e42d 284
b0f4a35f 285 known_alignment = true;
e80c2726 286 }
b0f4a35f 287 else if (DECL_P (exp))
644ffefd 288 {
b0f4a35f 289 align = DECL_ALIGN (exp);
644ffefd 290 known_alignment = true;
644ffefd 291 }
b0f4a35f
RG
292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
e80c2726
RG
295 {
296 tree addr = TREE_OPERAND (exp, 0);
644ffefd
MJ
297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
4ceae7e9 299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
644ffefd 300
4ceae7e9 301 /* If the address is explicitely aligned, handle that. */
e80c2726
RG
302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
304 {
4ceae7e9
RB
305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
146ec50f 307 align = least_bit_hwi (ptr_bitmask);
e80c2726
RG
308 addr = TREE_OPERAND (addr, 0);
309 }
644ffefd 310
b0f4a35f
RG
311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
b0f4a35f
RG
313 align = MAX (ptr_align, align);
314
4ceae7e9
RB
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
317
3c82efd9
RG
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
b0f4a35f 320 if (TREE_CODE (exp) == TARGET_MEM_REF)
1be38ccb 321 {
b0f4a35f
RG
322 if (TMR_INDEX (exp))
323 {
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
146ec50f 327 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
b0f4a35f
RG
328 }
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
1be38ccb 332 }
644ffefd 333
b0f4a35f
RG
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
3c82efd9
RG
337 alignment knowledge and if using that alignment would
338 improve the situation. */
a4cf4b64 339 unsigned int talign;
3c82efd9 340 if (!addr_p && !known_alignment
a4cf4b64
RB
341 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
342 && talign > align)
343 align = talign;
3c82efd9
RG
344 else
345 {
346 /* Else adjust bitpos accordingly. */
347 bitpos += ptr_bitpos;
348 if (TREE_CODE (exp) == MEM_REF
349 || TREE_CODE (exp) == TARGET_MEM_REF)
aca52e6f 350 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
3c82efd9 351 }
e80c2726 352 }
b0f4a35f 353 else if (TREE_CODE (exp) == STRING_CST)
1be38ccb 354 {
b0f4a35f
RG
355 /* STRING_CST are the only constant objects we allow to be not
356 wrapped inside a CONST_DECL. */
357 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 358 if (CONSTANT_CLASS_P (exp))
58e17cf8 359 align = targetm.constant_alignment (exp, align);
6b00e42d 360
b0f4a35f 361 known_alignment = true;
e80c2726 362 }
e80c2726
RG
363
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
eae76e53 366 if (offset)
e80c2726 367 {
e75fde1a 368 unsigned int trailing_zeros = tree_ctz (offset);
eae76e53 369 if (trailing_zeros < HOST_BITS_PER_INT)
e80c2726 370 {
eae76e53
JJ
371 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
372 if (inner)
373 align = MIN (align, inner);
e80c2726 374 }
e80c2726
RG
375 }
376
7df9b6f1
RS
377 /* Account for the alignment of runtime coefficients, so that the constant
378 bitpos is guaranteed to be accurate. */
379 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
380 if (alt_align != 0 && alt_align < align)
381 {
382 align = alt_align;
383 known_alignment = false;
384 }
385
b0f4a35f 386 *alignp = align;
7df9b6f1 387 *bitposp = bitpos.coeffs[0] & (align - 1);
644ffefd 388 return known_alignment;
daade206
RG
389}
390
b0f4a35f
RG
391/* For a memory reference expression EXP compute values M and N such that M
392 divides (&EXP - N) and such that N < M. If these numbers can be determined,
393 store M in alignp and N in *BITPOSP and return true. Otherwise return false
394 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
395
396bool
397get_object_alignment_1 (tree exp, unsigned int *alignp,
398 unsigned HOST_WIDE_INT *bitposp)
399{
400 return get_object_alignment_2 (exp, alignp, bitposp, false);
401}
402
0eb77834 403/* Return the alignment in bits of EXP, an object. */
daade206
RG
404
405unsigned int
0eb77834 406get_object_alignment (tree exp)
daade206
RG
407{
408 unsigned HOST_WIDE_INT bitpos = 0;
409 unsigned int align;
410
644ffefd 411 get_object_alignment_1 (exp, &align, &bitpos);
daade206 412
e80c2726
RG
413 /* align and bitpos now specify known low bits of the pointer.
414 ptr & (align - 1) == bitpos. */
415
416 if (bitpos != 0)
146ec50f 417 align = least_bit_hwi (bitpos);
0eb77834 418 return align;
df96b059
JJ
419}
420
644ffefd
MJ
421/* For a pointer valued expression EXP compute values M and N such that M
422 divides (EXP - N) and such that N < M. If these numbers can be determined,
b0f4a35f
RG
423 store M in alignp and N in *BITPOSP and return true. Return false if
424 the results are just a conservative approximation.
28f4ec01 425
644ffefd 426 If EXP is not a pointer, false is returned too. */
28f4ec01 427
644ffefd
MJ
428bool
429get_pointer_alignment_1 (tree exp, unsigned int *alignp,
430 unsigned HOST_WIDE_INT *bitposp)
28f4ec01 431{
1be38ccb 432 STRIP_NOPS (exp);
6026b73e 433
1be38ccb 434 if (TREE_CODE (exp) == ADDR_EXPR)
b0f4a35f
RG
435 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
436 alignp, bitposp, true);
5fa79de8
RB
437 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
438 {
439 unsigned int align;
440 unsigned HOST_WIDE_INT bitpos;
441 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
442 &align, &bitpos);
443 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
444 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
445 else
446 {
447 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
448 if (trailing_zeros < HOST_BITS_PER_INT)
449 {
450 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
451 if (inner)
452 align = MIN (align, inner);
453 }
454 }
455 *alignp = align;
456 *bitposp = bitpos & (align - 1);
457 return res;
458 }
1be38ccb
RG
459 else if (TREE_CODE (exp) == SSA_NAME
460 && POINTER_TYPE_P (TREE_TYPE (exp)))
28f4ec01 461 {
644ffefd 462 unsigned int ptr_align, ptr_misalign;
1be38ccb 463 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
644ffefd
MJ
464
465 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
466 {
467 *bitposp = ptr_misalign * BITS_PER_UNIT;
468 *alignp = ptr_align * BITS_PER_UNIT;
5505978a
RB
469 /* Make sure to return a sensible alignment when the multiplication
470 by BITS_PER_UNIT overflowed. */
471 if (*alignp == 0)
472 *alignp = 1u << (HOST_BITS_PER_INT - 1);
b0f4a35f 473 /* We cannot really tell whether this result is an approximation. */
5f9a167b 474 return false;
644ffefd
MJ
475 }
476 else
87c0fb4b
RG
477 {
478 *bitposp = 0;
644ffefd
MJ
479 *alignp = BITS_PER_UNIT;
480 return false;
87c0fb4b 481 }
28f4ec01 482 }
44fabee4
RG
483 else if (TREE_CODE (exp) == INTEGER_CST)
484 {
485 *alignp = BIGGEST_ALIGNMENT;
486 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
487 & (BIGGEST_ALIGNMENT - 1));
488 return true;
489 }
1be38ccb 490
87c0fb4b 491 *bitposp = 0;
644ffefd
MJ
492 *alignp = BITS_PER_UNIT;
493 return false;
28f4ec01
BS
494}
495
87c0fb4b
RG
496/* Return the alignment in bits of EXP, a pointer valued expression.
497 The alignment returned is, by default, the alignment of the thing that
498 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
499
500 Otherwise, look at the expression to see if we can do better, i.e., if the
501 expression is actually pointing at an object whose alignment is tighter. */
502
503unsigned int
504get_pointer_alignment (tree exp)
505{
506 unsigned HOST_WIDE_INT bitpos = 0;
507 unsigned int align;
644ffefd
MJ
508
509 get_pointer_alignment_1 (exp, &align, &bitpos);
87c0fb4b
RG
510
511 /* align and bitpos now specify known low bits of the pointer.
512 ptr & (align - 1) == bitpos. */
513
514 if (bitpos != 0)
146ec50f 515 align = least_bit_hwi (bitpos);
87c0fb4b
RG
516
517 return align;
518}
519
1eb4547b
MS
520/* Return the number of non-zero elements in the sequence
521 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
522 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
523
524static unsigned
525string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
526{
527 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
528
529 unsigned n;
530
531 if (eltsize == 1)
532 {
533 /* Optimize the common case of plain char. */
534 for (n = 0; n < maxelts; n++)
535 {
536 const char *elt = (const char*) ptr + n;
537 if (!*elt)
538 break;
539 }
540 }
541 else
542 {
543 for (n = 0; n < maxelts; n++)
544 {
545 const char *elt = (const char*) ptr + n * eltsize;
546 if (!memcmp (elt, "\0\0\0\0", eltsize))
547 break;
548 }
549 }
550 return n;
551}
552
553/* Compute the length of a null-terminated character string or wide
554 character string handling character sizes of 1, 2, and 4 bytes.
555 TREE_STRING_LENGTH is not the right way because it evaluates to
556 the size of the character array in bytes (as opposed to characters)
557 and because it can contain a zero byte in the middle.
28f4ec01 558
f1ba665b 559 ONLY_VALUE should be nonzero if the result is not going to be emitted
88373ed0 560 into the instruction stream and zero if it is going to be expanded.
f1ba665b 561 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
ae808627
JJ
562 is returned, otherwise NULL, since
563 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
564 evaluate the side-effects.
565
21e8fb22
RB
566 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
567 accesses. Note that this implies the result is not going to be emitted
568 into the instruction stream.
569
fed3cef0
RK
570 The value returned is of type `ssizetype'.
571
28f4ec01
BS
572 Unfortunately, string_constant can't access the values of const char
573 arrays with initializers, so neither can we do so here. */
574
6de9cd9a 575tree
ae808627 576c_strlen (tree src, int only_value)
28f4ec01 577{
ae808627
JJ
578 STRIP_NOPS (src);
579 if (TREE_CODE (src) == COND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 {
582 tree len1, len2;
583
584 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
585 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
33521f7d 586 if (tree_int_cst_equal (len1, len2))
ae808627
JJ
587 return len1;
588 }
589
590 if (TREE_CODE (src) == COMPOUND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 return c_strlen (TREE_OPERAND (src, 1), only_value);
593
1eb4547b 594 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
59d49708 595
1eb4547b
MS
596 /* Offset from the beginning of the string in bytes. */
597 tree byteoff;
598 src = string_constant (src, &byteoff);
28f4ec01 599 if (src == 0)
5039610b 600 return NULL_TREE;
fed3cef0 601
1eb4547b
MS
602 /* Determine the size of the string element. */
603 unsigned eltsize
604 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
605
606 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
35b4d3a6
MS
607 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
608 in case the latter is less than the size of the array. */
609 HOST_WIDE_INT maxelts = TREE_STRING_LENGTH (src);
610 tree type = TREE_TYPE (src);
611 if (tree size = TYPE_SIZE_UNIT (type))
612 if (tree_fits_shwi_p (size))
613 maxelts = tree_to_uhwi (size);
614
615 maxelts = maxelts / eltsize - 1;
1eb4547b
MS
616
617 /* PTR can point to the byte representation of any string type, including
618 char* and wchar_t*. */
619 const char *ptr = TREE_STRING_POINTER (src);
fed3cef0 620
1eb4547b 621 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
28f4ec01
BS
622 {
623 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
624 compute the offset to the following null if we don't know where to
625 start searching for it. */
1eb4547b
MS
626 if (string_length (ptr, eltsize, maxelts) < maxelts)
627 {
628 /* Return when an embedded null character is found. */
5039610b 629 return NULL_TREE;
1eb4547b 630 }
fed3cef0 631
c42d0aa0
MS
632 if (!maxelts)
633 return ssize_int (0);
634
28f4ec01
BS
635 /* We don't know the starting offset, but we do know that the string
636 has no internal zero bytes. We can assume that the offset falls
637 within the bounds of the string; otherwise, the programmer deserves
638 what he gets. Subtract the offset from the length of the string,
fed3cef0
RK
639 and return that. This would perhaps not be valid if we were dealing
640 with named arrays in addition to literal string constants. */
1eb4547b 641 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
28f4ec01
BS
642 }
643
1eb4547b
MS
644 /* Offset from the beginning of the string in elements. */
645 HOST_WIDE_INT eltoff;
646
28f4ec01 647 /* We have a known offset into the string. Start searching there for
5197bd50 648 a null character if we can represent it as a single HOST_WIDE_INT. */
1eb4547b
MS
649 if (byteoff == 0)
650 eltoff = 0;
651 else if (! tree_fits_shwi_p (byteoff))
652 eltoff = -1;
28f4ec01 653 else
1eb4547b 654 eltoff = tree_to_shwi (byteoff) / eltsize;
fed3cef0 655
b2ed71b6
BE
656 /* If the offset is known to be out of bounds, warn, and call strlen at
657 runtime. */
1eb4547b 658 if (eltoff < 0 || eltoff > maxelts)
28f4ec01 659 {
b2ed71b6 660 /* Suppress multiple warnings for propagated constant strings. */
3b57ff81
RB
661 if (only_value != 2
662 && !TREE_NO_WARNING (src))
b2ed71b6 663 {
c42d0aa0
MS
664 warning_at (loc, OPT_Warray_bounds,
665 "offset %qwi outside bounds of constant string",
1eb4547b 666 eltoff);
b2ed71b6
BE
667 TREE_NO_WARNING (src) = 1;
668 }
5039610b 669 return NULL_TREE;
28f4ec01 670 }
fed3cef0 671
28f4ec01
BS
672 /* Use strlen to search for the first zero byte. Since any strings
673 constructed with build_string will have nulls appended, we win even
674 if we get handed something like (char[4])"abcd".
675
1eb4547b 676 Since ELTOFF is our starting index into the string, no further
28f4ec01 677 calculation is needed. */
1eb4547b
MS
678 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
679 maxelts - eltoff);
680
681 return ssize_int (len);
28f4ec01
BS
682}
683
807e902e 684/* Return a constant integer corresponding to target reading
bf06b5d8 685 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ab937357 686
57814e5e 687static rtx
095a2d76 688c_readstr (const char *str, scalar_int_mode mode)
57814e5e 689{
57814e5e
JJ
690 HOST_WIDE_INT ch;
691 unsigned int i, j;
807e902e 692 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
57814e5e 693
298e6adc 694 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
807e902e
KZ
695 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
696 / HOST_BITS_PER_WIDE_INT;
697
698 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
699 for (i = 0; i < len; i++)
700 tmp[i] = 0;
5906d013 701
57814e5e
JJ
702 ch = 1;
703 for (i = 0; i < GET_MODE_SIZE (mode); i++)
704 {
705 j = i;
706 if (WORDS_BIG_ENDIAN)
707 j = GET_MODE_SIZE (mode) - i - 1;
708 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
e046112d 709 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
57814e5e
JJ
710 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
711 j *= BITS_PER_UNIT;
5906d013 712
57814e5e
JJ
713 if (ch)
714 ch = (unsigned char) str[i];
807e902e 715 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
57814e5e 716 }
807e902e
KZ
717
718 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
719 return immed_wide_int_const (c, mode);
57814e5e
JJ
720}
721
ab937357 722/* Cast a target constant CST to target CHAR and if that value fits into
206048bd 723 host char type, return zero and put that value into variable pointed to by
ab937357
JJ
724 P. */
725
726static int
4682ae04 727target_char_cast (tree cst, char *p)
ab937357
JJ
728{
729 unsigned HOST_WIDE_INT val, hostval;
730
de77ab75 731 if (TREE_CODE (cst) != INTEGER_CST
ab937357
JJ
732 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
733 return 1;
734
807e902e 735 /* Do not care if it fits or not right here. */
de77ab75 736 val = TREE_INT_CST_LOW (cst);
807e902e 737
ab937357 738 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
fecfbfa4 739 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ab937357
JJ
740
741 hostval = val;
742 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
fecfbfa4 743 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ab937357
JJ
744
745 if (val != hostval)
746 return 1;
747
748 *p = hostval;
749 return 0;
750}
751
6de9cd9a
DN
752/* Similar to save_expr, but assumes that arbitrary code is not executed
753 in between the multiple evaluations. In particular, we assume that a
754 non-addressable local variable will not be modified. */
755
756static tree
757builtin_save_expr (tree exp)
758{
5cbf5c20
RG
759 if (TREE_CODE (exp) == SSA_NAME
760 || (TREE_ADDRESSABLE (exp) == 0
761 && (TREE_CODE (exp) == PARM_DECL
8813a647 762 || (VAR_P (exp) && !TREE_STATIC (exp)))))
6de9cd9a
DN
763 return exp;
764
765 return save_expr (exp);
766}
767
28f4ec01
BS
768/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
769 times to get the address of either a higher stack frame, or a return
770 address located within it (depending on FNDECL_CODE). */
fed3cef0 771
54e62799 772static rtx
c6d01079 773expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
28f4ec01
BS
774{
775 int i;
c6d01079 776 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
579f2946 777 if (tem == NULL_RTX)
c8f27794 778 {
579f2946
TS
779 /* For a zero count with __builtin_return_address, we don't care what
780 frame address we return, because target-specific definitions will
781 override us. Therefore frame pointer elimination is OK, and using
782 the soft frame pointer is OK.
783
784 For a nonzero count, or a zero count with __builtin_frame_address,
785 we require a stable offset from the current frame pointer to the
786 previous one, so we must use the hard frame pointer, and
787 we must disable frame pointer elimination. */
788 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
789 tem = frame_pointer_rtx;
790 else
791 {
792 tem = hard_frame_pointer_rtx;
c8f27794 793
579f2946
TS
794 /* Tell reload not to eliminate the frame pointer. */
795 crtl->accesses_prior_frames = 1;
796 }
c8f27794 797 }
c6d01079 798
28f4ec01
BS
799 if (count > 0)
800 SETUP_FRAME_ADDRESSES ();
28f4ec01 801
224869d9 802 /* On the SPARC, the return address is not in the frame, it is in a
28f4ec01
BS
803 register. There is no way to access it off of the current frame
804 pointer, but it can be accessed off the previous frame pointer by
805 reading the value from the register window save area. */
2e612c47 806 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
28f4ec01 807 count--;
28f4ec01
BS
808
809 /* Scan back COUNT frames to the specified frame. */
810 for (i = 0; i < count; i++)
811 {
812 /* Assume the dynamic chain pointer is in the word that the
813 frame address points to, unless otherwise specified. */
28f4ec01 814 tem = DYNAMIC_CHAIN_ADDRESS (tem);
28f4ec01 815 tem = memory_address (Pmode, tem);
bf877a76 816 tem = gen_frame_mem (Pmode, tem);
432fd734 817 tem = copy_to_reg (tem);
28f4ec01
BS
818 }
819
224869d9
EB
820 /* For __builtin_frame_address, return what we've got. But, on
821 the SPARC for example, we may have to add a bias. */
28f4ec01 822 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
224869d9 823 return FRAME_ADDR_RTX (tem);
28f4ec01 824
224869d9 825 /* For __builtin_return_address, get the return address from that frame. */
28f4ec01
BS
826#ifdef RETURN_ADDR_RTX
827 tem = RETURN_ADDR_RTX (count, tem);
828#else
829 tem = memory_address (Pmode,
0a81f074 830 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
bf877a76 831 tem = gen_frame_mem (Pmode, tem);
28f4ec01
BS
832#endif
833 return tem;
834}
835
3bdf5ad1 836/* Alias set used for setjmp buffer. */
4862826d 837static alias_set_type setjmp_alias_set = -1;
3bdf5ad1 838
250d07b6 839/* Construct the leading half of a __builtin_setjmp call. Control will
4f6c2131
EB
840 return to RECEIVER_LABEL. This is also called directly by the SJLJ
841 exception handling code. */
28f4ec01 842
250d07b6 843void
4682ae04 844expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
28f4ec01 845{
ef4bddc2 846 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 847 rtx stack_save;
3bdf5ad1 848 rtx mem;
28f4ec01 849
3bdf5ad1
RK
850 if (setjmp_alias_set == -1)
851 setjmp_alias_set = new_alias_set ();
852
5ae6cd0d 853 buf_addr = convert_memory_address (Pmode, buf_addr);
28f4ec01 854
7d505b82 855 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
28f4ec01 856
250d07b6
RH
857 /* We store the frame pointer and the address of receiver_label in
858 the buffer and use the rest of it for the stack save area, which
859 is machine-dependent. */
28f4ec01 860
3bdf5ad1 861 mem = gen_rtx_MEM (Pmode, buf_addr);
ba4828e0 862 set_mem_alias_set (mem, setjmp_alias_set);
d6da68b9 863 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
3bdf5ad1 864
0a81f074
RS
865 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
866 GET_MODE_SIZE (Pmode))),
ba4828e0 867 set_mem_alias_set (mem, setjmp_alias_set);
3bdf5ad1
RK
868
869 emit_move_insn (validize_mem (mem),
250d07b6 870 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
28f4ec01
BS
871
872 stack_save = gen_rtx_MEM (sa_mode,
0a81f074 873 plus_constant (Pmode, buf_addr,
28f4ec01 874 2 * GET_MODE_SIZE (Pmode)));
ba4828e0 875 set_mem_alias_set (stack_save, setjmp_alias_set);
9eac0f2a 876 emit_stack_save (SAVE_NONLOCAL, &stack_save);
28f4ec01
BS
877
878 /* If there is further processing to do, do it. */
95a3fb9d
RS
879 if (targetm.have_builtin_setjmp_setup ())
880 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
28f4ec01 881
ecaebb9e 882 /* We have a nonlocal label. */
e3b5732b 883 cfun->has_nonlocal_label = 1;
250d07b6 884}
28f4ec01 885
4f6c2131 886/* Construct the trailing part of a __builtin_setjmp call. This is
e90d1568
HPN
887 also called directly by the SJLJ exception handling code.
888 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
250d07b6
RH
889
890void
95a3fb9d 891expand_builtin_setjmp_receiver (rtx receiver_label)
250d07b6 892{
531ca746
RH
893 rtx chain;
894
e90d1568 895 /* Mark the FP as used when we get here, so we have to make sure it's
28f4ec01 896 marked as used by this function. */
c41c1387 897 emit_use (hard_frame_pointer_rtx);
28f4ec01
BS
898
899 /* Mark the static chain as clobbered here so life information
900 doesn't get messed up for it. */
4b522b8f 901 chain = rtx_for_static_chain (current_function_decl, true);
531ca746
RH
902 if (chain && REG_P (chain))
903 emit_clobber (chain);
28f4ec01
BS
904
905 /* Now put in the code to restore the frame pointer, and argument
caf93cb0 906 pointer, if needed. */
95a3fb9d 907 if (! targetm.have_nonlocal_goto ())
f1257268
RS
908 {
909 /* First adjust our frame pointer to its actual value. It was
910 previously set to the start of the virtual area corresponding to
911 the stacked variables when we branched here and now needs to be
912 adjusted to the actual hardware fp value.
913
914 Assignments to virtual registers are converted by
915 instantiate_virtual_regs into the corresponding assignment
916 to the underlying register (fp in this case) that makes
917 the original assignment true.
918 So the following insn will actually be decrementing fp by
2a31c321 919 TARGET_STARTING_FRAME_OFFSET. */
f1257268
RS
920 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
921
922 /* Restoring the frame pointer also modifies the hard frame pointer.
923 Mark it used (so that the previous assignment remains live once
924 the frame pointer is eliminated) and clobbered (to represent the
925 implicit update from the assignment). */
926 emit_use (hard_frame_pointer_rtx);
927 emit_clobber (hard_frame_pointer_rtx);
928 }
28f4ec01 929
38b0b093 930 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
28f4ec01 931 {
e90d1568
HPN
932 /* If the argument pointer can be eliminated in favor of the
933 frame pointer, we don't need to restore it. We assume here
934 that if such an elimination is present, it can always be used.
935 This is the case on all known machines; if we don't make this
936 assumption, we do unnecessary saving on many machines. */
28f4ec01 937 size_t i;
8b60264b 938 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
28f4ec01 939
b6a1cbae 940 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
28f4ec01
BS
941 if (elim_regs[i].from == ARG_POINTER_REGNUM
942 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
943 break;
944
b6a1cbae 945 if (i == ARRAY_SIZE (elim_regs))
28f4ec01
BS
946 {
947 /* Now restore our arg pointer from the address at which it
278ed218 948 was saved in our stack frame. */
2e3f842f 949 emit_move_insn (crtl->args.internal_arg_pointer,
bd60bab2 950 copy_to_reg (get_arg_pointer_save_area ()));
28f4ec01
BS
951 }
952 }
28f4ec01 953
95a3fb9d
RS
954 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
955 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
956 else if (targetm.have_nonlocal_goto_receiver ())
957 emit_insn (targetm.gen_nonlocal_goto_receiver ());
28f4ec01 958 else
95a3fb9d 959 { /* Nothing */ }
bcd7edfe 960
6fb5fa3c
DB
961 /* We must not allow the code we just generated to be reordered by
962 scheduling. Specifically, the update of the frame pointer must
f1257268 963 happen immediately, not later. */
6fb5fa3c 964 emit_insn (gen_blockage ());
250d07b6 965}
28f4ec01 966
28f4ec01
BS
967/* __builtin_longjmp is passed a pointer to an array of five words (not
968 all will be used on all machines). It operates similarly to the C
969 library function of the same name, but is more efficient. Much of
4f6c2131 970 the code below is copied from the handling of non-local gotos. */
28f4ec01 971
54e62799 972static void
4682ae04 973expand_builtin_longjmp (rtx buf_addr, rtx value)
28f4ec01 974{
58f4cf2a
DM
975 rtx fp, lab, stack;
976 rtx_insn *insn, *last;
ef4bddc2 977 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 978
b8698a0f 979 /* DRAP is needed for stack realign if longjmp is expanded to current
2e3f842f
L
980 function */
981 if (SUPPORTS_STACK_ALIGNMENT)
982 crtl->need_drap = true;
983
3bdf5ad1
RK
984 if (setjmp_alias_set == -1)
985 setjmp_alias_set = new_alias_set ();
986
5ae6cd0d 987 buf_addr = convert_memory_address (Pmode, buf_addr);
4b6c1672 988
28f4ec01
BS
989 buf_addr = force_reg (Pmode, buf_addr);
990
531ca746
RH
991 /* We require that the user must pass a second argument of 1, because
992 that is what builtin_setjmp will return. */
298e6adc 993 gcc_assert (value == const1_rtx);
28f4ec01 994
d337d653 995 last = get_last_insn ();
95a3fb9d
RS
996 if (targetm.have_builtin_longjmp ())
997 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
28f4ec01 998 else
28f4ec01
BS
999 {
1000 fp = gen_rtx_MEM (Pmode, buf_addr);
0a81f074 1001 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
28f4ec01
BS
1002 GET_MODE_SIZE (Pmode)));
1003
0a81f074 1004 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
28f4ec01 1005 2 * GET_MODE_SIZE (Pmode)));
ba4828e0
RK
1006 set_mem_alias_set (fp, setjmp_alias_set);
1007 set_mem_alias_set (lab, setjmp_alias_set);
1008 set_mem_alias_set (stack, setjmp_alias_set);
28f4ec01
BS
1009
1010 /* Pick up FP, label, and SP from the block and jump. This code is
1011 from expand_goto in stmt.c; see there for detailed comments. */
95a3fb9d 1012 if (targetm.have_nonlocal_goto ())
28f4ec01
BS
1013 /* We have to pass a value to the nonlocal_goto pattern that will
1014 get copied into the static_chain pointer, but it does not matter
1015 what that value is, because builtin_setjmp does not use it. */
95a3fb9d 1016 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
28f4ec01 1017 else
28f4ec01
BS
1018 {
1019 lab = copy_to_reg (lab);
1020
c41c1387
RS
1021 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1022 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
41439bf6 1023
28f4ec01 1024 emit_move_insn (hard_frame_pointer_rtx, fp);
9eac0f2a 1025 emit_stack_restore (SAVE_NONLOCAL, stack);
28f4ec01 1026
c41c1387
RS
1027 emit_use (hard_frame_pointer_rtx);
1028 emit_use (stack_pointer_rtx);
28f4ec01
BS
1029 emit_indirect_jump (lab);
1030 }
1031 }
4b01bd16
RH
1032
1033 /* Search backwards and mark the jump insn as a non-local goto.
1034 Note that this precludes the use of __builtin_longjmp to a
1035 __builtin_setjmp target in the same function. However, we've
1036 already cautioned the user that these functions are for
1037 internal exception handling use only. */
8206fc89
AM
1038 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1039 {
298e6adc 1040 gcc_assert (insn != last);
5906d013 1041
4b4bf941 1042 if (JUMP_P (insn))
8206fc89 1043 {
65c5f2a6 1044 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
8206fc89
AM
1045 break;
1046 }
4b4bf941 1047 else if (CALL_P (insn))
ca7fd9cd 1048 break;
8206fc89 1049 }
28f4ec01
BS
1050}
1051
862d0b35
DN
1052static inline bool
1053more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1054{
1055 return (iter->i < iter->n);
1056}
1057
1058/* This function validates the types of a function call argument list
1059 against a specified list of tree_codes. If the last specifier is a 0,
474da67e 1060 that represents an ellipsis, otherwise the last specifier must be a
862d0b35
DN
1061 VOID_TYPE. */
1062
1063static bool
1064validate_arglist (const_tree callexpr, ...)
1065{
1066 enum tree_code code;
1067 bool res = 0;
1068 va_list ap;
1069 const_call_expr_arg_iterator iter;
1070 const_tree arg;
1071
1072 va_start (ap, callexpr);
1073 init_const_call_expr_arg_iterator (callexpr, &iter);
1074
474da67e 1075 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
0dba7960
JJ
1076 tree fn = CALL_EXPR_FN (callexpr);
1077 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
474da67e
MS
1078
1079 for (unsigned argno = 1; ; ++argno)
862d0b35
DN
1080 {
1081 code = (enum tree_code) va_arg (ap, int);
474da67e 1082
862d0b35
DN
1083 switch (code)
1084 {
1085 case 0:
1086 /* This signifies an ellipses, any further arguments are all ok. */
1087 res = true;
1088 goto end;
1089 case VOID_TYPE:
1090 /* This signifies an endlink, if no arguments remain, return
1091 true, otherwise return false. */
1092 res = !more_const_call_expr_args_p (&iter);
1093 goto end;
474da67e
MS
1094 case POINTER_TYPE:
1095 /* The actual argument must be nonnull when either the whole
1096 called function has been declared nonnull, or when the formal
1097 argument corresponding to the actual argument has been. */
0dba7960
JJ
1098 if (argmap
1099 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1100 {
1101 arg = next_const_call_expr_arg (&iter);
1102 if (!validate_arg (arg, code) || integer_zerop (arg))
1103 goto end;
1104 break;
1105 }
474da67e 1106 /* FALLTHRU */
862d0b35
DN
1107 default:
1108 /* If no parameters remain or the parameter's code does not
1109 match the specified code, return false. Otherwise continue
1110 checking any remaining arguments. */
1111 arg = next_const_call_expr_arg (&iter);
0dba7960 1112 if (!validate_arg (arg, code))
862d0b35
DN
1113 goto end;
1114 break;
1115 }
1116 }
862d0b35
DN
1117
1118 /* We need gotos here since we can only have one VA_CLOSE in a
1119 function. */
1120 end: ;
1121 va_end (ap);
1122
474da67e
MS
1123 BITMAP_FREE (argmap);
1124
862d0b35
DN
1125 return res;
1126}
1127
6de9cd9a
DN
1128/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1129 and the address of the save area. */
1130
1131static rtx
5039610b 1132expand_builtin_nonlocal_goto (tree exp)
6de9cd9a
DN
1133{
1134 tree t_label, t_save_area;
58f4cf2a
DM
1135 rtx r_label, r_save_area, r_fp, r_sp;
1136 rtx_insn *insn;
6de9cd9a 1137
5039610b 1138 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
1139 return NULL_RTX;
1140
5039610b
SL
1141 t_label = CALL_EXPR_ARG (exp, 0);
1142 t_save_area = CALL_EXPR_ARG (exp, 1);
6de9cd9a 1143
84217346 1144 r_label = expand_normal (t_label);
5e89a381 1145 r_label = convert_memory_address (Pmode, r_label);
84217346 1146 r_save_area = expand_normal (t_save_area);
5e89a381 1147 r_save_area = convert_memory_address (Pmode, r_save_area);
bc6d3f91
EB
1148 /* Copy the address of the save location to a register just in case it was
1149 based on the frame pointer. */
cba2d79f 1150 r_save_area = copy_to_reg (r_save_area);
6de9cd9a
DN
1151 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1152 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
0a81f074
RS
1153 plus_constant (Pmode, r_save_area,
1154 GET_MODE_SIZE (Pmode)));
6de9cd9a 1155
e3b5732b 1156 crtl->has_nonlocal_goto = 1;
6de9cd9a 1157
6de9cd9a 1158 /* ??? We no longer need to pass the static chain value, afaik. */
95a3fb9d
RS
1159 if (targetm.have_nonlocal_goto ())
1160 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
6de9cd9a 1161 else
6de9cd9a
DN
1162 {
1163 r_label = copy_to_reg (r_label);
1164
c41c1387
RS
1165 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1166 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
caf93cb0 1167
bc6d3f91 1168 /* Restore frame pointer for containing function. */
6de9cd9a 1169 emit_move_insn (hard_frame_pointer_rtx, r_fp);
9eac0f2a 1170 emit_stack_restore (SAVE_NONLOCAL, r_sp);
caf93cb0 1171
6de9cd9a
DN
1172 /* USE of hard_frame_pointer_rtx added for consistency;
1173 not clear if really needed. */
c41c1387
RS
1174 emit_use (hard_frame_pointer_rtx);
1175 emit_use (stack_pointer_rtx);
eae645b6
RS
1176
1177 /* If the architecture is using a GP register, we must
1178 conservatively assume that the target function makes use of it.
1179 The prologue of functions with nonlocal gotos must therefore
1180 initialize the GP register to the appropriate value, and we
1181 must then make sure that this value is live at the point
1182 of the jump. (Note that this doesn't necessarily apply
1183 to targets with a nonlocal_goto pattern; they are free
1184 to implement it in their own way. Note also that this is
1185 a no-op if the GP register is a global invariant.) */
959c1e20
AH
1186 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1187 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
c41c1387 1188 emit_use (pic_offset_table_rtx);
eae645b6 1189
6de9cd9a
DN
1190 emit_indirect_jump (r_label);
1191 }
caf93cb0 1192
6de9cd9a
DN
1193 /* Search backwards to the jump insn and mark it as a
1194 non-local goto. */
1195 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1196 {
4b4bf941 1197 if (JUMP_P (insn))
6de9cd9a 1198 {
65c5f2a6 1199 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
6de9cd9a
DN
1200 break;
1201 }
4b4bf941 1202 else if (CALL_P (insn))
6de9cd9a
DN
1203 break;
1204 }
1205
1206 return const0_rtx;
1207}
1208
2b92e7f5
RK
1209/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1210 (not all will be used on all machines) that was passed to __builtin_setjmp.
d33606c3
EB
1211 It updates the stack pointer in that block to the current value. This is
1212 also called directly by the SJLJ exception handling code. */
2b92e7f5 1213
d33606c3 1214void
2b92e7f5
RK
1215expand_builtin_update_setjmp_buf (rtx buf_addr)
1216{
ef4bddc2 1217 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
4887028b 1218 buf_addr = convert_memory_address (Pmode, buf_addr);
bc6d3f91 1219 rtx stack_save
2b92e7f5
RK
1220 = gen_rtx_MEM (sa_mode,
1221 memory_address
1222 (sa_mode,
0a81f074
RS
1223 plus_constant (Pmode, buf_addr,
1224 2 * GET_MODE_SIZE (Pmode))));
2b92e7f5 1225
9eac0f2a 1226 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2b92e7f5
RK
1227}
1228
a9ccbb60
JJ
1229/* Expand a call to __builtin_prefetch. For a target that does not support
1230 data prefetch, evaluate the memory address argument in case it has side
1231 effects. */
1232
1233static void
5039610b 1234expand_builtin_prefetch (tree exp)
a9ccbb60
JJ
1235{
1236 tree arg0, arg1, arg2;
5039610b 1237 int nargs;
a9ccbb60
JJ
1238 rtx op0, op1, op2;
1239
5039610b 1240 if (!validate_arglist (exp, POINTER_TYPE, 0))
e83d297b
JJ
1241 return;
1242
5039610b
SL
1243 arg0 = CALL_EXPR_ARG (exp, 0);
1244
e83d297b
JJ
1245 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1246 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1247 locality). */
5039610b
SL
1248 nargs = call_expr_nargs (exp);
1249 if (nargs > 1)
1250 arg1 = CALL_EXPR_ARG (exp, 1);
e83d297b 1251 else
5039610b
SL
1252 arg1 = integer_zero_node;
1253 if (nargs > 2)
1254 arg2 = CALL_EXPR_ARG (exp, 2);
1255 else
9a9d280e 1256 arg2 = integer_three_node;
a9ccbb60
JJ
1257
1258 /* Argument 0 is an address. */
1259 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1260
1261 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1262 if (TREE_CODE (arg1) != INTEGER_CST)
1263 {
40b97a2e 1264 error ("second argument to %<__builtin_prefetch%> must be a constant");
ca7fd9cd 1265 arg1 = integer_zero_node;
a9ccbb60 1266 }
84217346 1267 op1 = expand_normal (arg1);
a9ccbb60
JJ
1268 /* Argument 1 must be either zero or one. */
1269 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1270 {
d4ee4d25 1271 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
40b97a2e 1272 " using zero");
a9ccbb60
JJ
1273 op1 = const0_rtx;
1274 }
1275
1276 /* Argument 2 (locality) must be a compile-time constant int. */
1277 if (TREE_CODE (arg2) != INTEGER_CST)
1278 {
40b97a2e 1279 error ("third argument to %<__builtin_prefetch%> must be a constant");
a9ccbb60
JJ
1280 arg2 = integer_zero_node;
1281 }
84217346 1282 op2 = expand_normal (arg2);
a9ccbb60
JJ
1283 /* Argument 2 must be 0, 1, 2, or 3. */
1284 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1285 {
d4ee4d25 1286 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
a9ccbb60
JJ
1287 op2 = const0_rtx;
1288 }
1289
134b044d 1290 if (targetm.have_prefetch ())
a9ccbb60 1291 {
a5c7d693
RS
1292 struct expand_operand ops[3];
1293
1294 create_address_operand (&ops[0], op0);
1295 create_integer_operand (&ops[1], INTVAL (op1));
1296 create_integer_operand (&ops[2], INTVAL (op2));
134b044d 1297 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
a5c7d693 1298 return;
a9ccbb60 1299 }
ad76cef8 1300
5ab2f7b7
KH
1301 /* Don't do anything with direct references to volatile memory, but
1302 generate code to handle other side effects. */
3c0cb5de 1303 if (!MEM_P (op0) && side_effects_p (op0))
5ab2f7b7 1304 emit_insn (op0);
a9ccbb60
JJ
1305}
1306
3bdf5ad1 1307/* Get a MEM rtx for expression EXP which is the address of an operand
435bb2a1
JJ
1308 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1309 the maximum length of the block of memory that might be accessed or
1310 NULL if unknown. */
3bdf5ad1 1311
28f4ec01 1312static rtx
435bb2a1 1313get_memory_rtx (tree exp, tree len)
28f4ec01 1314{
805903b5
JJ
1315 tree orig_exp = exp;
1316 rtx addr, mem;
805903b5
JJ
1317
1318 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1319 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1320 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1321 exp = TREE_OPERAND (exp, 0);
1322
1323 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1324 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
8ac61af7 1325
3bdf5ad1 1326 /* Get an expression we can use to find the attributes to assign to MEM.
625ed172 1327 First remove any nops. */
1043771b 1328 while (CONVERT_EXPR_P (exp)
3bdf5ad1
RK
1329 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1330 exp = TREE_OPERAND (exp, 0);
1331
625ed172
MM
1332 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1333 (as builtin stringops may alias with anything). */
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_one_node, len)),
1338 exp, build_int_cst (ptr_type_node, 0));
1339
1340 /* If the MEM_REF has no acceptable address, try to get the base object
1341 from the original address we got, and build an all-aliasing
1342 unknown-sized access to that one. */
1343 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1344 set_mem_attributes (mem, exp, 0);
1345 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1346 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1347 0))))
343fb412 1348 {
625ed172
MM
1349 exp = build_fold_addr_expr (exp);
1350 exp = fold_build2 (MEM_REF,
1351 build_array_type (char_type_node,
1352 build_range_type (sizetype,
1353 size_zero_node,
1354 NULL)),
1355 exp, build_int_cst (ptr_type_node, 0));
931e6c29 1356 set_mem_attributes (mem, exp, 0);
343fb412 1357 }
625ed172 1358 set_mem_alias_set (mem, 0);
28f4ec01
BS
1359 return mem;
1360}
1361\f
1362/* Built-in functions to perform an untyped call and return. */
1363
fa19795e
RS
1364#define apply_args_mode \
1365 (this_target_builtins->x_apply_args_mode)
1366#define apply_result_mode \
1367 (this_target_builtins->x_apply_result_mode)
28f4ec01 1368
28f4ec01
BS
1369/* Return the size required for the block returned by __builtin_apply_args,
1370 and initialize apply_args_mode. */
1371
1372static int
4682ae04 1373apply_args_size (void)
28f4ec01
BS
1374{
1375 static int size = -1;
cbf5468f
AH
1376 int align;
1377 unsigned int regno;
28f4ec01
BS
1378
1379 /* The values computed by this function never change. */
1380 if (size < 0)
1381 {
1382 /* The first value is the incoming arg-pointer. */
1383 size = GET_MODE_SIZE (Pmode);
1384
1385 /* The second value is the structure value address unless this is
1386 passed as an "invisible" first argument. */
92f6864c 1387 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1388 size += GET_MODE_SIZE (Pmode);
1389
1390 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1391 if (FUNCTION_ARG_REGNO_P (regno))
1392 {
b660eccf 1393 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
33521f7d 1394
298e6adc 1395 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1396
1397 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1398 if (size % align != 0)
1399 size = CEIL (size, align) * align;
28f4ec01
BS
1400 size += GET_MODE_SIZE (mode);
1401 apply_args_mode[regno] = mode;
1402 }
1403 else
1404 {
b660eccf 1405 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1406 }
1407 }
1408 return size;
1409}
1410
1411/* Return the size required for the block returned by __builtin_apply,
1412 and initialize apply_result_mode. */
1413
1414static int
4682ae04 1415apply_result_size (void)
28f4ec01
BS
1416{
1417 static int size = -1;
1418 int align, regno;
28f4ec01
BS
1419
1420 /* The values computed by this function never change. */
1421 if (size < 0)
1422 {
1423 size = 0;
1424
1425 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
82f81f18 1426 if (targetm.calls.function_value_regno_p (regno))
28f4ec01 1427 {
b660eccf 1428 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
33521f7d 1429
298e6adc 1430 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1431
1432 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1433 if (size % align != 0)
1434 size = CEIL (size, align) * align;
1435 size += GET_MODE_SIZE (mode);
1436 apply_result_mode[regno] = mode;
1437 }
1438 else
b660eccf 1439 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1440
1441 /* Allow targets that use untyped_call and untyped_return to override
1442 the size so that machine-specific information can be stored here. */
1443#ifdef APPLY_RESULT_SIZE
1444 size = APPLY_RESULT_SIZE;
1445#endif
1446 }
1447 return size;
1448}
1449
28f4ec01
BS
1450/* Create a vector describing the result block RESULT. If SAVEP is true,
1451 the result block is used to save the values; otherwise it is used to
1452 restore the values. */
1453
1454static rtx
4682ae04 1455result_vector (int savep, rtx result)
28f4ec01
BS
1456{
1457 int regno, size, align, nelts;
b660eccf 1458 fixed_size_mode mode;
28f4ec01 1459 rtx reg, mem;
f883e0a7 1460 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
8d51ecf8 1461
28f4ec01
BS
1462 size = nelts = 0;
1463 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1464 if ((mode = apply_result_mode[regno]) != VOIDmode)
1465 {
1466 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1467 if (size % align != 0)
1468 size = CEIL (size, align) * align;
1469 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
f4ef873c 1470 mem = adjust_address (result, mode, size);
28f4ec01 1471 savevec[nelts++] = (savep
f7df4a84
RS
1472 ? gen_rtx_SET (mem, reg)
1473 : gen_rtx_SET (reg, mem));
28f4ec01
BS
1474 size += GET_MODE_SIZE (mode);
1475 }
1476 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1477}
28f4ec01
BS
1478
1479/* Save the state required to perform an untyped call with the same
1480 arguments as were passed to the current function. */
1481
1482static rtx
4682ae04 1483expand_builtin_apply_args_1 (void)
28f4ec01 1484{
88e541e1 1485 rtx registers, tem;
28f4ec01 1486 int size, align, regno;
b660eccf 1487 fixed_size_mode mode;
92f6864c 1488 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
28f4ec01
BS
1489
1490 /* Create a block where the arg-pointer, structure value address,
1491 and argument registers can be saved. */
1492 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1493
1494 /* Walk past the arg-pointer and structure value address. */
1495 size = GET_MODE_SIZE (Pmode);
92f6864c 1496 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1497 size += GET_MODE_SIZE (Pmode);
1498
1499 /* Save each register used in calling a function to the block. */
1500 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1501 if ((mode = apply_args_mode[regno]) != VOIDmode)
1502 {
28f4ec01
BS
1503 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1504 if (size % align != 0)
1505 size = CEIL (size, align) * align;
1506
1507 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1508
f4ef873c 1509 emit_move_insn (adjust_address (registers, mode, size), tem);
28f4ec01
BS
1510 size += GET_MODE_SIZE (mode);
1511 }
1512
1513 /* Save the arg pointer to the block. */
2e3f842f 1514 tem = copy_to_reg (crtl->args.internal_arg_pointer);
88e541e1 1515 /* We need the pointer as the caller actually passed them to us, not
ac3f5df7
HPN
1516 as we might have pretended they were passed. Make sure it's a valid
1517 operand, as emit_move_insn isn't expected to handle a PLUS. */
581edfa3
TS
1518 if (STACK_GROWS_DOWNWARD)
1519 tem
1520 = force_operand (plus_constant (Pmode, tem,
1521 crtl->args.pretend_args_size),
1522 NULL_RTX);
88e541e1 1523 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
33521f7d 1524
28f4ec01
BS
1525 size = GET_MODE_SIZE (Pmode);
1526
1527 /* Save the structure value address unless this is passed as an
1528 "invisible" first argument. */
61f71b34 1529 if (struct_incoming_value)
28f4ec01 1530 {
f4ef873c 1531 emit_move_insn (adjust_address (registers, Pmode, size),
61f71b34 1532 copy_to_reg (struct_incoming_value));
28f4ec01
BS
1533 size += GET_MODE_SIZE (Pmode);
1534 }
1535
1536 /* Return the address of the block. */
1537 return copy_addr_to_reg (XEXP (registers, 0));
1538}
1539
1540/* __builtin_apply_args returns block of memory allocated on
1541 the stack into which is stored the arg pointer, structure
1542 value address, static chain, and all the registers that might
1543 possibly be used in performing a function call. The code is
1544 moved to the start of the function so the incoming values are
1545 saved. */
5197bd50 1546
28f4ec01 1547static rtx
4682ae04 1548expand_builtin_apply_args (void)
28f4ec01
BS
1549{
1550 /* Don't do __builtin_apply_args more than once in a function.
1551 Save the result of the first call and reuse it. */
1552 if (apply_args_value != 0)
1553 return apply_args_value;
1554 {
1555 /* When this function is called, it means that registers must be
1556 saved on entry to this function. So we migrate the
1557 call to the first insn of this function. */
1558 rtx temp;
28f4ec01
BS
1559
1560 start_sequence ();
1561 temp = expand_builtin_apply_args_1 ();
e67d1102 1562 rtx_insn *seq = get_insns ();
28f4ec01
BS
1563 end_sequence ();
1564
1565 apply_args_value = temp;
1566
2f937369
DM
1567 /* Put the insns after the NOTE that starts the function.
1568 If this is inside a start_sequence, make the outer-level insn
28f4ec01 1569 chain current, so the code is placed at the start of the
1f21b6f4
JJ
1570 function. If internal_arg_pointer is a non-virtual pseudo,
1571 it needs to be placed after the function that initializes
1572 that pseudo. */
28f4ec01 1573 push_topmost_sequence ();
1f21b6f4
JJ
1574 if (REG_P (crtl->args.internal_arg_pointer)
1575 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1576 emit_insn_before (seq, parm_birth_insn);
1577 else
1578 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
28f4ec01
BS
1579 pop_topmost_sequence ();
1580 return temp;
1581 }
1582}
1583
1584/* Perform an untyped call and save the state required to perform an
1585 untyped return of whatever value was returned by the given function. */
1586
1587static rtx
4682ae04 1588expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
28f4ec01
BS
1589{
1590 int size, align, regno;
b660eccf 1591 fixed_size_mode mode;
58f4cf2a
DM
1592 rtx incoming_args, result, reg, dest, src;
1593 rtx_call_insn *call_insn;
28f4ec01
BS
1594 rtx old_stack_level = 0;
1595 rtx call_fusage = 0;
92f6864c 1596 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
28f4ec01 1597
5ae6cd0d 1598 arguments = convert_memory_address (Pmode, arguments);
ce2d32cd 1599
28f4ec01
BS
1600 /* Create a block where the return registers can be saved. */
1601 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1602
28f4ec01
BS
1603 /* Fetch the arg pointer from the ARGUMENTS block. */
1604 incoming_args = gen_reg_rtx (Pmode);
ce2d32cd 1605 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
581edfa3
TS
1606 if (!STACK_GROWS_DOWNWARD)
1607 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1608 incoming_args, 0, OPTAB_LIB_WIDEN);
28f4ec01 1609
9d53e585
JM
1610 /* Push a new argument block and copy the arguments. Do not allow
1611 the (potential) memcpy call below to interfere with our stack
1612 manipulations. */
28f4ec01 1613 do_pending_stack_adjust ();
9d53e585 1614 NO_DEFER_POP;
28f4ec01 1615
f9da5064 1616 /* Save the stack with nonlocal if available. */
4476e1a0 1617 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1618 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
28f4ec01 1619 else
9eac0f2a 1620 emit_stack_save (SAVE_BLOCK, &old_stack_level);
28f4ec01 1621
316d0b19 1622 /* Allocate a block of memory onto the stack and copy the memory
d3c12306
EB
1623 arguments to the outgoing arguments address. We can pass TRUE
1624 as the 4th argument because we just saved the stack pointer
1625 and will restore it right after the call. */
9e878cf1 1626 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2e3f842f
L
1627
1628 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1629 may have already set current_function_calls_alloca to true.
1630 current_function_calls_alloca won't be set if argsize is zero,
1631 so we have to guarantee need_drap is true here. */
1632 if (SUPPORTS_STACK_ALIGNMENT)
1633 crtl->need_drap = true;
1634
316d0b19 1635 dest = virtual_outgoing_args_rtx;
581edfa3
TS
1636 if (!STACK_GROWS_DOWNWARD)
1637 {
1638 if (CONST_INT_P (argsize))
1639 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1640 else
1641 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1642 }
8ac61af7
RK
1643 dest = gen_rtx_MEM (BLKmode, dest);
1644 set_mem_align (dest, PARM_BOUNDARY);
1645 src = gen_rtx_MEM (BLKmode, incoming_args);
1646 set_mem_align (src, PARM_BOUNDARY);
44bb111a 1647 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
28f4ec01
BS
1648
1649 /* Refer to the argument block. */
1650 apply_args_size ();
1651 arguments = gen_rtx_MEM (BLKmode, arguments);
8ac61af7 1652 set_mem_align (arguments, PARM_BOUNDARY);
28f4ec01
BS
1653
1654 /* Walk past the arg-pointer and structure value address. */
1655 size = GET_MODE_SIZE (Pmode);
61f71b34 1656 if (struct_value)
28f4ec01
BS
1657 size += GET_MODE_SIZE (Pmode);
1658
1659 /* Restore each of the registers previously saved. Make USE insns
1660 for each of these registers for use in making the call. */
1661 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1662 if ((mode = apply_args_mode[regno]) != VOIDmode)
1663 {
1664 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1665 if (size % align != 0)
1666 size = CEIL (size, align) * align;
1667 reg = gen_rtx_REG (mode, regno);
f4ef873c 1668 emit_move_insn (reg, adjust_address (arguments, mode, size));
28f4ec01
BS
1669 use_reg (&call_fusage, reg);
1670 size += GET_MODE_SIZE (mode);
1671 }
1672
1673 /* Restore the structure value address unless this is passed as an
1674 "invisible" first argument. */
1675 size = GET_MODE_SIZE (Pmode);
61f71b34 1676 if (struct_value)
28f4ec01
BS
1677 {
1678 rtx value = gen_reg_rtx (Pmode);
f4ef873c 1679 emit_move_insn (value, adjust_address (arguments, Pmode, size));
61f71b34 1680 emit_move_insn (struct_value, value);
f8cfc6aa 1681 if (REG_P (struct_value))
61f71b34 1682 use_reg (&call_fusage, struct_value);
28f4ec01
BS
1683 size += GET_MODE_SIZE (Pmode);
1684 }
1685
1686 /* All arguments and registers used for the call are set up by now! */
531ca746 1687 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
28f4ec01
BS
1688
1689 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1690 and we don't want to load it into a register as an optimization,
1691 because prepare_call_address already did it if it should be done. */
1692 if (GET_CODE (function) != SYMBOL_REF)
1693 function = memory_address (FUNCTION_MODE, function);
1694
1695 /* Generate the actual call instruction and save the return value. */
43c7dca8
RS
1696 if (targetm.have_untyped_call ())
1697 {
1698 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1699 emit_call_insn (targetm.gen_untyped_call (mem, result,
1700 result_vector (1, result)));
1701 }
58d745ec 1702 else if (targetm.have_call_value ())
28f4ec01
BS
1703 {
1704 rtx valreg = 0;
1705
1706 /* Locate the unique return register. It is not possible to
1707 express a call that sets more than one return register using
1708 call_value; use untyped_call for that. In fact, untyped_call
1709 only needs to save the return registers in the given block. */
1710 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1711 if ((mode = apply_result_mode[regno]) != VOIDmode)
1712 {
58d745ec 1713 gcc_assert (!valreg); /* have_untyped_call required. */
5906d013 1714
28f4ec01
BS
1715 valreg = gen_rtx_REG (mode, regno);
1716 }
1717
58d745ec
RS
1718 emit_insn (targetm.gen_call_value (valreg,
1719 gen_rtx_MEM (FUNCTION_MODE, function),
1720 const0_rtx, NULL_RTX, const0_rtx));
28f4ec01 1721
f4ef873c 1722 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
28f4ec01
BS
1723 }
1724 else
298e6adc 1725 gcc_unreachable ();
28f4ec01 1726
ee960939
OH
1727 /* Find the CALL insn we just emitted, and attach the register usage
1728 information. */
1729 call_insn = last_call_insn ();
1730 add_function_usage_to (call_insn, call_fusage);
28f4ec01
BS
1731
1732 /* Restore the stack. */
4476e1a0 1733 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1734 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
28f4ec01 1735 else
9eac0f2a 1736 emit_stack_restore (SAVE_BLOCK, old_stack_level);
c3284718 1737 fixup_args_size_notes (call_insn, get_last_insn (), 0);
28f4ec01 1738
9d53e585
JM
1739 OK_DEFER_POP;
1740
28f4ec01 1741 /* Return the address of the result block. */
5ae6cd0d
MM
1742 result = copy_addr_to_reg (XEXP (result, 0));
1743 return convert_memory_address (ptr_mode, result);
28f4ec01
BS
1744}
1745
1746/* Perform an untyped return. */
1747
1748static void
4682ae04 1749expand_builtin_return (rtx result)
28f4ec01
BS
1750{
1751 int size, align, regno;
b660eccf 1752 fixed_size_mode mode;
28f4ec01 1753 rtx reg;
fee3e72c 1754 rtx_insn *call_fusage = 0;
28f4ec01 1755
5ae6cd0d 1756 result = convert_memory_address (Pmode, result);
ce2d32cd 1757
28f4ec01
BS
1758 apply_result_size ();
1759 result = gen_rtx_MEM (BLKmode, result);
1760
43c7dca8 1761 if (targetm.have_untyped_return ())
28f4ec01 1762 {
43c7dca8
RS
1763 rtx vector = result_vector (0, result);
1764 emit_jump_insn (targetm.gen_untyped_return (result, vector));
28f4ec01
BS
1765 emit_barrier ();
1766 return;
1767 }
28f4ec01
BS
1768
1769 /* Restore the return value and note that each value is used. */
1770 size = 0;
1771 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1772 if ((mode = apply_result_mode[regno]) != VOIDmode)
1773 {
1774 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1775 if (size % align != 0)
1776 size = CEIL (size, align) * align;
1777 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
f4ef873c 1778 emit_move_insn (reg, adjust_address (result, mode, size));
28f4ec01
BS
1779
1780 push_to_sequence (call_fusage);
c41c1387 1781 emit_use (reg);
28f4ec01
BS
1782 call_fusage = get_insns ();
1783 end_sequence ();
1784 size += GET_MODE_SIZE (mode);
1785 }
1786
1787 /* Put the USE insns before the return. */
2f937369 1788 emit_insn (call_fusage);
28f4ec01
BS
1789
1790 /* Return whatever values was restored by jumping directly to the end
1791 of the function. */
6e3077c6 1792 expand_naked_return ();
28f4ec01
BS
1793}
1794
ad82abb8 1795/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
5197bd50 1796
ad82abb8 1797static enum type_class
4682ae04 1798type_to_class (tree type)
ad82abb8
ZW
1799{
1800 switch (TREE_CODE (type))
1801 {
1802 case VOID_TYPE: return void_type_class;
1803 case INTEGER_TYPE: return integer_type_class;
ad82abb8
ZW
1804 case ENUMERAL_TYPE: return enumeral_type_class;
1805 case BOOLEAN_TYPE: return boolean_type_class;
1806 case POINTER_TYPE: return pointer_type_class;
1807 case REFERENCE_TYPE: return reference_type_class;
1808 case OFFSET_TYPE: return offset_type_class;
1809 case REAL_TYPE: return real_type_class;
1810 case COMPLEX_TYPE: return complex_type_class;
1811 case FUNCTION_TYPE: return function_type_class;
1812 case METHOD_TYPE: return method_type_class;
1813 case RECORD_TYPE: return record_type_class;
1814 case UNION_TYPE:
1815 case QUAL_UNION_TYPE: return union_type_class;
1816 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1817 ? string_type_class : array_type_class);
ad82abb8
ZW
1818 case LANG_TYPE: return lang_type_class;
1819 default: return no_type_class;
1820 }
1821}
8d51ecf8 1822
5039610b 1823/* Expand a call EXP to __builtin_classify_type. */
5197bd50 1824
28f4ec01 1825static rtx
5039610b 1826expand_builtin_classify_type (tree exp)
28f4ec01 1827{
5039610b
SL
1828 if (call_expr_nargs (exp))
1829 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
28f4ec01
BS
1830 return GEN_INT (no_type_class);
1831}
1832
ee5fd23a
MM
1833/* This helper macro, meant to be used in mathfn_built_in below, determines
1834 which among a set of builtin math functions is appropriate for a given type
1835 mode. The `F' (float) and `L' (long double) are automatically generated
1836 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1837 types, there are additional types that are considered with 'F32', 'F64',
1838 'F128', etc. suffixes. */
b03ff92e
RS
1839#define CASE_MATHFN(MATHFN) \
1840 CASE_CFN_##MATHFN: \
1841 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1842 fcodel = BUILT_IN_##MATHFN##L ; break;
ee5fd23a
MM
1843/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1844 types. */
1845#define CASE_MATHFN_FLOATN(MATHFN) \
1846 CASE_CFN_##MATHFN: \
1847 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1848 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1849 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1850 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1851 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1852 break;
bf460eec 1853/* Similar to above, but appends _R after any F/L suffix. */
b03ff92e
RS
1854#define CASE_MATHFN_REENT(MATHFN) \
1855 case CFN_BUILT_IN_##MATHFN##_R: \
1856 case CFN_BUILT_IN_##MATHFN##F_R: \
1857 case CFN_BUILT_IN_##MATHFN##L_R: \
1858 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1859 fcodel = BUILT_IN_##MATHFN##L_R ; break;
daa027cc 1860
5c1a2e63
RS
1861/* Return a function equivalent to FN but operating on floating-point
1862 values of type TYPE, or END_BUILTINS if no such function exists.
b03ff92e
RS
1863 This is purely an operation on function codes; it does not guarantee
1864 that the target actually has an implementation of the function. */
05f41289 1865
5c1a2e63 1866static built_in_function
b03ff92e 1867mathfn_built_in_2 (tree type, combined_fn fn)
272f51a3 1868{
ee5fd23a 1869 tree mtype;
5c1a2e63 1870 built_in_function fcode, fcodef, fcodel;
ee5fd23a
MM
1871 built_in_function fcodef16 = END_BUILTINS;
1872 built_in_function fcodef32 = END_BUILTINS;
1873 built_in_function fcodef64 = END_BUILTINS;
1874 built_in_function fcodef128 = END_BUILTINS;
1875 built_in_function fcodef32x = END_BUILTINS;
1876 built_in_function fcodef64x = END_BUILTINS;
1877 built_in_function fcodef128x = END_BUILTINS;
daa027cc
KG
1878
1879 switch (fn)
1880 {
b03ff92e
RS
1881 CASE_MATHFN (ACOS)
1882 CASE_MATHFN (ACOSH)
1883 CASE_MATHFN (ASIN)
1884 CASE_MATHFN (ASINH)
1885 CASE_MATHFN (ATAN)
1886 CASE_MATHFN (ATAN2)
1887 CASE_MATHFN (ATANH)
1888 CASE_MATHFN (CBRT)
c6cfa2bf 1889 CASE_MATHFN_FLOATN (CEIL)
b03ff92e 1890 CASE_MATHFN (CEXPI)
ee5fd23a 1891 CASE_MATHFN_FLOATN (COPYSIGN)
b03ff92e
RS
1892 CASE_MATHFN (COS)
1893 CASE_MATHFN (COSH)
1894 CASE_MATHFN (DREM)
1895 CASE_MATHFN (ERF)
1896 CASE_MATHFN (ERFC)
1897 CASE_MATHFN (EXP)
1898 CASE_MATHFN (EXP10)
1899 CASE_MATHFN (EXP2)
1900 CASE_MATHFN (EXPM1)
1901 CASE_MATHFN (FABS)
1902 CASE_MATHFN (FDIM)
c6cfa2bf 1903 CASE_MATHFN_FLOATN (FLOOR)
ee5fd23a
MM
1904 CASE_MATHFN_FLOATN (FMA)
1905 CASE_MATHFN_FLOATN (FMAX)
1906 CASE_MATHFN_FLOATN (FMIN)
b03ff92e
RS
1907 CASE_MATHFN (FMOD)
1908 CASE_MATHFN (FREXP)
1909 CASE_MATHFN (GAMMA)
1910 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1911 CASE_MATHFN (HUGE_VAL)
1912 CASE_MATHFN (HYPOT)
1913 CASE_MATHFN (ILOGB)
1914 CASE_MATHFN (ICEIL)
1915 CASE_MATHFN (IFLOOR)
1916 CASE_MATHFN (INF)
1917 CASE_MATHFN (IRINT)
1918 CASE_MATHFN (IROUND)
1919 CASE_MATHFN (ISINF)
1920 CASE_MATHFN (J0)
1921 CASE_MATHFN (J1)
1922 CASE_MATHFN (JN)
1923 CASE_MATHFN (LCEIL)
1924 CASE_MATHFN (LDEXP)
1925 CASE_MATHFN (LFLOOR)
1926 CASE_MATHFN (LGAMMA)
1927 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1928 CASE_MATHFN (LLCEIL)
1929 CASE_MATHFN (LLFLOOR)
1930 CASE_MATHFN (LLRINT)
1931 CASE_MATHFN (LLROUND)
1932 CASE_MATHFN (LOG)
1933 CASE_MATHFN (LOG10)
1934 CASE_MATHFN (LOG1P)
1935 CASE_MATHFN (LOG2)
1936 CASE_MATHFN (LOGB)
1937 CASE_MATHFN (LRINT)
1938 CASE_MATHFN (LROUND)
1939 CASE_MATHFN (MODF)
1940 CASE_MATHFN (NAN)
1941 CASE_MATHFN (NANS)
c6cfa2bf 1942 CASE_MATHFN_FLOATN (NEARBYINT)
b03ff92e
RS
1943 CASE_MATHFN (NEXTAFTER)
1944 CASE_MATHFN (NEXTTOWARD)
1945 CASE_MATHFN (POW)
1946 CASE_MATHFN (POWI)
1947 CASE_MATHFN (POW10)
1948 CASE_MATHFN (REMAINDER)
1949 CASE_MATHFN (REMQUO)
c6cfa2bf
MM
1950 CASE_MATHFN_FLOATN (RINT)
1951 CASE_MATHFN_FLOATN (ROUND)
b03ff92e
RS
1952 CASE_MATHFN (SCALB)
1953 CASE_MATHFN (SCALBLN)
1954 CASE_MATHFN (SCALBN)
1955 CASE_MATHFN (SIGNBIT)
1956 CASE_MATHFN (SIGNIFICAND)
1957 CASE_MATHFN (SIN)
1958 CASE_MATHFN (SINCOS)
1959 CASE_MATHFN (SINH)
ee5fd23a 1960 CASE_MATHFN_FLOATN (SQRT)
b03ff92e
RS
1961 CASE_MATHFN (TAN)
1962 CASE_MATHFN (TANH)
1963 CASE_MATHFN (TGAMMA)
c6cfa2bf 1964 CASE_MATHFN_FLOATN (TRUNC)
b03ff92e
RS
1965 CASE_MATHFN (Y0)
1966 CASE_MATHFN (Y1)
1967 CASE_MATHFN (YN)
daa027cc 1968
b03ff92e
RS
1969 default:
1970 return END_BUILTINS;
1971 }
daa027cc 1972
ee5fd23a
MM
1973 mtype = TYPE_MAIN_VARIANT (type);
1974 if (mtype == double_type_node)
5c1a2e63 1975 return fcode;
ee5fd23a 1976 else if (mtype == float_type_node)
5c1a2e63 1977 return fcodef;
ee5fd23a 1978 else if (mtype == long_double_type_node)
5c1a2e63 1979 return fcodel;
ee5fd23a
MM
1980 else if (mtype == float16_type_node)
1981 return fcodef16;
1982 else if (mtype == float32_type_node)
1983 return fcodef32;
1984 else if (mtype == float64_type_node)
1985 return fcodef64;
1986 else if (mtype == float128_type_node)
1987 return fcodef128;
1988 else if (mtype == float32x_type_node)
1989 return fcodef32x;
1990 else if (mtype == float64x_type_node)
1991 return fcodef64x;
1992 else if (mtype == float128x_type_node)
1993 return fcodef128x;
daa027cc 1994 else
5c1a2e63
RS
1995 return END_BUILTINS;
1996}
1997
1998/* Return mathematic function equivalent to FN but operating directly on TYPE,
1999 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2000 otherwise use the explicit declaration. If we can't do the conversion,
2001 return null. */
2002
2003static tree
b03ff92e 2004mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
5c1a2e63
RS
2005{
2006 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2007 if (fcode2 == END_BUILTINS)
5039610b 2008 return NULL_TREE;
e79983f4
MM
2009
2010 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2011 return NULL_TREE;
2012
2013 return builtin_decl_explicit (fcode2);
272f51a3
JH
2014}
2015
b03ff92e 2016/* Like mathfn_built_in_1, but always use the implicit array. */
05f41289
KG
2017
2018tree
b03ff92e 2019mathfn_built_in (tree type, combined_fn fn)
05f41289
KG
2020{
2021 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2022}
2023
b03ff92e
RS
2024/* Like mathfn_built_in_1, but take a built_in_function and
2025 always use the implicit array. */
2026
2027tree
2028mathfn_built_in (tree type, enum built_in_function fn)
2029{
2030 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2031}
2032
686ee971
RS
2033/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2034 return its code, otherwise return IFN_LAST. Note that this function
2035 only tests whether the function is defined in internals.def, not whether
2036 it is actually available on the target. */
2037
2038internal_fn
2039associated_internal_fn (tree fndecl)
2040{
2041 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2042 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2043 switch (DECL_FUNCTION_CODE (fndecl))
2044 {
2045#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2046 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
ee5fd23a
MM
2047#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2048 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2049 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
4959a752
RS
2050#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2051 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
686ee971
RS
2052#include "internal-fn.def"
2053
2054 CASE_FLT_FN (BUILT_IN_POW10):
2055 return IFN_EXP10;
2056
2057 CASE_FLT_FN (BUILT_IN_DREM):
2058 return IFN_REMAINDER;
2059
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2063 return IFN_LDEXP;
2064 return IFN_LAST;
2065
2066 default:
2067 return IFN_LAST;
2068 }
2069}
2070
2071/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2072 on the current target by a call to an internal function, return the
2073 code of that internal function, otherwise return IFN_LAST. The caller
2074 is responsible for ensuring that any side-effects of the built-in
2075 call are dealt with correctly. E.g. if CALL sets errno, the caller
2076 must decide that the errno result isn't needed or make it available
2077 in some other way. */
2078
2079internal_fn
2080replacement_internal_fn (gcall *call)
2081{
2082 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2083 {
2084 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2085 if (ifn != IFN_LAST)
2086 {
2087 tree_pair types = direct_internal_fn_types (ifn, call);
d95ab70a
RS
2088 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2089 if (direct_internal_fn_supported_p (ifn, types, opt_type))
686ee971
RS
2090 return ifn;
2091 }
2092 }
2093 return IFN_LAST;
2094}
2095
1b1562a5
MM
2096/* Expand a call to the builtin trinary math functions (fma).
2097 Return NULL_RTX if a normal call should be emitted rather than expanding the
2098 function in-line. EXP is the expression that is a call to the builtin
2099 function; if convenient, the result should be placed in TARGET.
2100 SUBTARGET may be used as the target for computing one of EXP's
2101 operands. */
2102
2103static rtx
2104expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2105{
2106 optab builtin_optab;
58f4cf2a
DM
2107 rtx op0, op1, op2, result;
2108 rtx_insn *insns;
1b1562a5
MM
2109 tree fndecl = get_callee_fndecl (exp);
2110 tree arg0, arg1, arg2;
ef4bddc2 2111 machine_mode mode;
1b1562a5
MM
2112
2113 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2114 return NULL_RTX;
2115
2116 arg0 = CALL_EXPR_ARG (exp, 0);
2117 arg1 = CALL_EXPR_ARG (exp, 1);
2118 arg2 = CALL_EXPR_ARG (exp, 2);
2119
2120 switch (DECL_FUNCTION_CODE (fndecl))
2121 {
2122 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 2123 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
2124 builtin_optab = fma_optab; break;
2125 default:
2126 gcc_unreachable ();
2127 }
2128
2129 /* Make a suitable register to place result in. */
2130 mode = TYPE_MODE (TREE_TYPE (exp));
2131
2132 /* Before working hard, check whether the instruction is available. */
2133 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2134 return NULL_RTX;
2135
04b80dbb 2136 result = gen_reg_rtx (mode);
1b1562a5
MM
2137
2138 /* Always stabilize the argument list. */
2139 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2140 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2141 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2142
2143 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2144 op1 = expand_normal (arg1);
2145 op2 = expand_normal (arg2);
2146
2147 start_sequence ();
2148
04b80dbb
RS
2149 /* Compute into RESULT.
2150 Set RESULT to wherever the result comes back. */
2151 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2152 result, 0);
1b1562a5
MM
2153
2154 /* If we were unable to expand via the builtin, stop the sequence
2155 (without outputting the insns) and call to the library function
2156 with the stabilized argument list. */
04b80dbb 2157 if (result == 0)
1b1562a5
MM
2158 {
2159 end_sequence ();
2160 return expand_call (exp, target, target == const0_rtx);
2161 }
2162
2163 /* Output the entire sequence. */
2164 insns = get_insns ();
2165 end_sequence ();
2166 emit_insn (insns);
2167
04b80dbb 2168 return result;
1b1562a5
MM
2169}
2170
6c7cf1f0 2171/* Expand a call to the builtin sin and cos math functions.
5039610b 2172 Return NULL_RTX if a normal call should be emitted rather than expanding the
6c7cf1f0
UB
2173 function in-line. EXP is the expression that is a call to the builtin
2174 function; if convenient, the result should be placed in TARGET.
2175 SUBTARGET may be used as the target for computing one of EXP's
2176 operands. */
2177
2178static rtx
2179expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2180{
2181 optab builtin_optab;
58f4cf2a
DM
2182 rtx op0;
2183 rtx_insn *insns;
6c7cf1f0 2184 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2185 machine_mode mode;
5799f732 2186 tree arg;
6c7cf1f0 2187
5039610b
SL
2188 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2189 return NULL_RTX;
6c7cf1f0 2190
5039610b 2191 arg = CALL_EXPR_ARG (exp, 0);
6c7cf1f0
UB
2192
2193 switch (DECL_FUNCTION_CODE (fndecl))
2194 {
ea6a6627
VR
2195 CASE_FLT_FN (BUILT_IN_SIN):
2196 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2197 builtin_optab = sincos_optab; break;
2198 default:
298e6adc 2199 gcc_unreachable ();
6c7cf1f0
UB
2200 }
2201
2202 /* Make a suitable register to place result in. */
2203 mode = TYPE_MODE (TREE_TYPE (exp));
2204
6c7cf1f0 2205 /* Check if sincos insn is available, otherwise fallback
9cf737f8 2206 to sin or cos insn. */
947131ba 2207 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6c7cf1f0
UB
2208 switch (DECL_FUNCTION_CODE (fndecl))
2209 {
ea6a6627 2210 CASE_FLT_FN (BUILT_IN_SIN):
6c7cf1f0 2211 builtin_optab = sin_optab; break;
ea6a6627 2212 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2213 builtin_optab = cos_optab; break;
2214 default:
298e6adc 2215 gcc_unreachable ();
6c7cf1f0 2216 }
6c7cf1f0
UB
2217
2218 /* Before working hard, check whether the instruction is available. */
947131ba 2219 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6c7cf1f0 2220 {
04b80dbb 2221 rtx result = gen_reg_rtx (mode);
6c7cf1f0
UB
2222
2223 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2224 need to expand the argument again. This way, we will not perform
2225 side-effects more the once. */
5799f732 2226 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6c7cf1f0 2227
49452c07 2228 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6c7cf1f0 2229
6c7cf1f0
UB
2230 start_sequence ();
2231
04b80dbb
RS
2232 /* Compute into RESULT.
2233 Set RESULT to wherever the result comes back. */
6c7cf1f0
UB
2234 if (builtin_optab == sincos_optab)
2235 {
04b80dbb 2236 int ok;
5906d013 2237
6c7cf1f0
UB
2238 switch (DECL_FUNCTION_CODE (fndecl))
2239 {
ea6a6627 2240 CASE_FLT_FN (BUILT_IN_SIN):
04b80dbb 2241 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6c7cf1f0 2242 break;
ea6a6627 2243 CASE_FLT_FN (BUILT_IN_COS):
04b80dbb 2244 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6c7cf1f0
UB
2245 break;
2246 default:
298e6adc 2247 gcc_unreachable ();
6c7cf1f0 2248 }
04b80dbb 2249 gcc_assert (ok);
6c7cf1f0
UB
2250 }
2251 else
04b80dbb 2252 result = expand_unop (mode, builtin_optab, op0, result, 0);
6c7cf1f0 2253
04b80dbb 2254 if (result != 0)
6c7cf1f0 2255 {
6c7cf1f0
UB
2256 /* Output the entire sequence. */
2257 insns = get_insns ();
2258 end_sequence ();
2259 emit_insn (insns);
04b80dbb 2260 return result;
6c7cf1f0
UB
2261 }
2262
2263 /* If we were unable to expand via the builtin, stop the sequence
2264 (without outputting the insns) and call to the library function
2265 with the stabilized argument list. */
2266 end_sequence ();
2267 }
2268
04b80dbb 2269 return expand_call (exp, target, target == const0_rtx);
6c7cf1f0
UB
2270}
2271
44e10129
MM
2272/* Given an interclass math builtin decl FNDECL and it's argument ARG
2273 return an RTL instruction code that implements the functionality.
2274 If that isn't possible or available return CODE_FOR_nothing. */
eaee4464 2275
44e10129
MM
2276static enum insn_code
2277interclass_mathfn_icode (tree arg, tree fndecl)
eaee4464 2278{
44e10129 2279 bool errno_set = false;
2225b9f2 2280 optab builtin_optab = unknown_optab;
ef4bddc2 2281 machine_mode mode;
eaee4464
UB
2282
2283 switch (DECL_FUNCTION_CODE (fndecl))
2284 {
2285 CASE_FLT_FN (BUILT_IN_ILOGB):
903c723b
TC
2286 errno_set = true; builtin_optab = ilogb_optab; break;
2287 CASE_FLT_FN (BUILT_IN_ISINF):
2288 builtin_optab = isinf_optab; break;
2289 case BUILT_IN_ISNORMAL:
2290 case BUILT_IN_ISFINITE:
2291 CASE_FLT_FN (BUILT_IN_FINITE):
2292 case BUILT_IN_FINITED32:
2293 case BUILT_IN_FINITED64:
2294 case BUILT_IN_FINITED128:
2295 case BUILT_IN_ISINFD32:
2296 case BUILT_IN_ISINFD64:
2297 case BUILT_IN_ISINFD128:
2298 /* These builtins have no optabs (yet). */
0c8d3c2b 2299 break;
eaee4464
UB
2300 default:
2301 gcc_unreachable ();
2302 }
2303
2304 /* There's no easy way to detect the case we need to set EDOM. */
2305 if (flag_errno_math && errno_set)
44e10129 2306 return CODE_FOR_nothing;
eaee4464
UB
2307
2308 /* Optab mode depends on the mode of the input argument. */
2309 mode = TYPE_MODE (TREE_TYPE (arg));
2310
0c8d3c2b 2311 if (builtin_optab)
947131ba 2312 return optab_handler (builtin_optab, mode);
44e10129
MM
2313 return CODE_FOR_nothing;
2314}
2315
2316/* Expand a call to one of the builtin math functions that operate on
903c723b
TC
2317 floating point argument and output an integer result (ilogb, isinf,
2318 isnan, etc).
44e10129
MM
2319 Return 0 if a normal call should be emitted rather than expanding the
2320 function in-line. EXP is the expression that is a call to the builtin
4359dc2a 2321 function; if convenient, the result should be placed in TARGET. */
44e10129
MM
2322
2323static rtx
4359dc2a 2324expand_builtin_interclass_mathfn (tree exp, rtx target)
44e10129
MM
2325{
2326 enum insn_code icode = CODE_FOR_nothing;
2327 rtx op0;
2328 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2329 machine_mode mode;
44e10129
MM
2330 tree arg;
2331
2332 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2333 return NULL_RTX;
2334
2335 arg = CALL_EXPR_ARG (exp, 0);
2336 icode = interclass_mathfn_icode (arg, fndecl);
2337 mode = TYPE_MODE (TREE_TYPE (arg));
2338
eaee4464
UB
2339 if (icode != CODE_FOR_nothing)
2340 {
a5c7d693 2341 struct expand_operand ops[1];
58f4cf2a 2342 rtx_insn *last = get_last_insn ();
8a0b1aa4 2343 tree orig_arg = arg;
eaee4464
UB
2344
2345 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2346 need to expand the argument again. This way, we will not perform
2347 side-effects more the once. */
5799f732 2348 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
eaee4464 2349
4359dc2a 2350 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
eaee4464
UB
2351
2352 if (mode != GET_MODE (op0))
2353 op0 = convert_to_mode (mode, op0, 0);
2354
a5c7d693
RS
2355 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2356 if (maybe_legitimize_operands (icode, 0, 1, ops)
2357 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2358 return ops[0].value;
2359
8a0b1aa4
MM
2360 delete_insns_since (last);
2361 CALL_EXPR_ARG (exp, 0) = orig_arg;
eaee4464
UB
2362 }
2363
44e10129 2364 return NULL_RTX;
eaee4464
UB
2365}
2366
403e54f0 2367/* Expand a call to the builtin sincos math function.
5039610b 2368 Return NULL_RTX if a normal call should be emitted rather than expanding the
403e54f0
RG
2369 function in-line. EXP is the expression that is a call to the builtin
2370 function. */
2371
2372static rtx
2373expand_builtin_sincos (tree exp)
2374{
2375 rtx op0, op1, op2, target1, target2;
ef4bddc2 2376 machine_mode mode;
403e54f0
RG
2377 tree arg, sinp, cosp;
2378 int result;
db3927fb 2379 location_t loc = EXPR_LOCATION (exp);
ca818bd9 2380 tree alias_type, alias_off;
403e54f0 2381
5039610b
SL
2382 if (!validate_arglist (exp, REAL_TYPE,
2383 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2384 return NULL_RTX;
403e54f0 2385
5039610b
SL
2386 arg = CALL_EXPR_ARG (exp, 0);
2387 sinp = CALL_EXPR_ARG (exp, 1);
2388 cosp = CALL_EXPR_ARG (exp, 2);
403e54f0
RG
2389
2390 /* Make a suitable register to place result in. */
2391 mode = TYPE_MODE (TREE_TYPE (arg));
2392
2393 /* Check if sincos insn is available, otherwise emit the call. */
947131ba 2394 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
403e54f0
RG
2395 return NULL_RTX;
2396
2397 target1 = gen_reg_rtx (mode);
2398 target2 = gen_reg_rtx (mode);
2399
84217346 2400 op0 = expand_normal (arg);
ca818bd9
RG
2401 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2402 alias_off = build_int_cst (alias_type, 0);
2403 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2404 sinp, alias_off));
2405 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2406 cosp, alias_off));
403e54f0
RG
2407
2408 /* Compute into target1 and target2.
2409 Set TARGET to wherever the result comes back. */
2410 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2411 gcc_assert (result);
2412
2413 /* Move target1 and target2 to the memory locations indicated
2414 by op1 and op2. */
2415 emit_move_insn (op1, target1);
2416 emit_move_insn (op2, target2);
2417
2418 return const0_rtx;
2419}
2420
75c7c595
RG
2421/* Expand a call to the internal cexpi builtin to the sincos math function.
2422 EXP is the expression that is a call to the builtin function; if convenient,
4359dc2a 2423 the result should be placed in TARGET. */
75c7c595
RG
2424
2425static rtx
4359dc2a 2426expand_builtin_cexpi (tree exp, rtx target)
75c7c595
RG
2427{
2428 tree fndecl = get_callee_fndecl (exp);
75c7c595 2429 tree arg, type;
ef4bddc2 2430 machine_mode mode;
75c7c595 2431 rtx op0, op1, op2;
db3927fb 2432 location_t loc = EXPR_LOCATION (exp);
75c7c595 2433
5039610b
SL
2434 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2435 return NULL_RTX;
75c7c595 2436
5039610b 2437 arg = CALL_EXPR_ARG (exp, 0);
75c7c595
RG
2438 type = TREE_TYPE (arg);
2439 mode = TYPE_MODE (TREE_TYPE (arg));
2440
2441 /* Try expanding via a sincos optab, fall back to emitting a libcall
b54c5497
RG
2442 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2443 is only generated from sincos, cexp or if we have either of them. */
947131ba 2444 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
75c7c595
RG
2445 {
2446 op1 = gen_reg_rtx (mode);
2447 op2 = gen_reg_rtx (mode);
2448
4359dc2a 2449 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2450
2451 /* Compute into op1 and op2. */
2452 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2453 }
d33d9e47 2454 else if (targetm.libc_has_function (function_sincos))
75c7c595 2455 {
5039610b 2456 tree call, fn = NULL_TREE;
75c7c595
RG
2457 tree top1, top2;
2458 rtx op1a, op2a;
2459
2460 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2461 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
75c7c595 2462 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2463 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
75c7c595 2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2465 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
5039610b
SL
2466 else
2467 gcc_unreachable ();
b8698a0f 2468
9474e8ab
MM
2469 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2470 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
18ae1560
UB
2471 op1a = copy_addr_to_reg (XEXP (op1, 0));
2472 op2a = copy_addr_to_reg (XEXP (op2, 0));
75c7c595
RG
2473 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2474 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2475
75c7c595
RG
2476 /* Make sure not to fold the sincos call again. */
2477 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5039610b
SL
2478 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2479 call, 3, arg, top1, top2));
75c7c595 2480 }
b54c5497
RG
2481 else
2482 {
9d972b2d 2483 tree call, fn = NULL_TREE, narg;
b54c5497
RG
2484 tree ctype = build_complex_type (type);
2485
9d972b2d 2486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2487 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
9d972b2d 2488 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2489 fn = builtin_decl_explicit (BUILT_IN_CEXP);
9d972b2d 2490 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2491 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
5039610b
SL
2492 else
2493 gcc_unreachable ();
34a24c11
RG
2494
2495 /* If we don't have a decl for cexp create one. This is the
2496 friendliest fallback if the user calls __builtin_cexpi
2497 without full target C99 function support. */
2498 if (fn == NULL_TREE)
2499 {
2500 tree fntype;
2501 const char *name = NULL;
2502
2503 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2504 name = "cexpf";
2505 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2506 name = "cexp";
2507 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2508 name = "cexpl";
2509
2510 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2511 fn = build_fn_decl (name, fntype);
2512 }
2513
db3927fb 2514 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
b54c5497
RG
2515 build_real (type, dconst0), arg);
2516
2517 /* Make sure not to fold the cexp call again. */
2518 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
b8698a0f 2519 return expand_expr (build_call_nary (ctype, call, 1, narg),
49452c07 2520 target, VOIDmode, EXPAND_NORMAL);
b54c5497 2521 }
75c7c595
RG
2522
2523 /* Now build the proper return type. */
2524 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2525 make_tree (TREE_TYPE (arg), op2),
2526 make_tree (TREE_TYPE (arg), op1)),
49452c07 2527 target, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2528}
2529
44e10129
MM
2530/* Conveniently construct a function call expression. FNDECL names the
2531 function to be called, N is the number of arguments, and the "..."
2532 parameters are the argument expressions. Unlike build_call_exr
2533 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2534
2535static tree
2536build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2537{
2538 va_list ap;
2539 tree fntype = TREE_TYPE (fndecl);
2540 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2541
2542 va_start (ap, n);
2543 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2544 va_end (ap);
2545 SET_EXPR_LOCATION (fn, loc);
2546 return fn;
2547}
44e10129 2548
0bfa1541
RG
2549/* Expand a call to one of the builtin rounding functions gcc defines
2550 as an extension (lfloor and lceil). As these are gcc extensions we
2551 do not need to worry about setting errno to EDOM.
d8b42d06
UB
2552 If expanding via optab fails, lower expression to (int)(floor(x)).
2553 EXP is the expression that is a call to the builtin function;
1856c8dc 2554 if convenient, the result should be placed in TARGET. */
d8b42d06
UB
2555
2556static rtx
1856c8dc 2557expand_builtin_int_roundingfn (tree exp, rtx target)
d8b42d06 2558{
c3a4177f 2559 convert_optab builtin_optab;
58f4cf2a
DM
2560 rtx op0, tmp;
2561 rtx_insn *insns;
d8b42d06 2562 tree fndecl = get_callee_fndecl (exp);
d8b42d06
UB
2563 enum built_in_function fallback_fn;
2564 tree fallback_fndecl;
ef4bddc2 2565 machine_mode mode;
968fc3b6 2566 tree arg;
d8b42d06 2567
5039610b 2568 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
d8b42d06
UB
2569 gcc_unreachable ();
2570
5039610b 2571 arg = CALL_EXPR_ARG (exp, 0);
d8b42d06
UB
2572
2573 switch (DECL_FUNCTION_CODE (fndecl))
2574 {
6c32ee74 2575 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
2576 CASE_FLT_FN (BUILT_IN_LCEIL):
2577 CASE_FLT_FN (BUILT_IN_LLCEIL):
f94b1661
UB
2578 builtin_optab = lceil_optab;
2579 fallback_fn = BUILT_IN_CEIL;
2580 break;
2581
6c32ee74 2582 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627
VR
2583 CASE_FLT_FN (BUILT_IN_LFLOOR):
2584 CASE_FLT_FN (BUILT_IN_LLFLOOR):
d8b42d06
UB
2585 builtin_optab = lfloor_optab;
2586 fallback_fn = BUILT_IN_FLOOR;
2587 break;
2588
2589 default:
2590 gcc_unreachable ();
2591 }
2592
2593 /* Make a suitable register to place result in. */
2594 mode = TYPE_MODE (TREE_TYPE (exp));
2595
c3a4177f 2596 target = gen_reg_rtx (mode);
d8b42d06 2597
c3a4177f
RG
2598 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2599 need to expand the argument again. This way, we will not perform
2600 side-effects more the once. */
5799f732 2601 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
d8b42d06 2602
1856c8dc 2603 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
d8b42d06 2604
c3a4177f 2605 start_sequence ();
d8b42d06 2606
c3a4177f
RG
2607 /* Compute into TARGET. */
2608 if (expand_sfix_optab (target, op0, builtin_optab))
2609 {
2610 /* Output the entire sequence. */
2611 insns = get_insns ();
d8b42d06 2612 end_sequence ();
c3a4177f
RG
2613 emit_insn (insns);
2614 return target;
d8b42d06
UB
2615 }
2616
c3a4177f
RG
2617 /* If we were unable to expand via the builtin, stop the sequence
2618 (without outputting the insns). */
2619 end_sequence ();
2620
d8b42d06
UB
2621 /* Fall back to floating point rounding optab. */
2622 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
34a24c11
RG
2623
2624 /* For non-C99 targets we may end up without a fallback fndecl here
2625 if the user called __builtin_lfloor directly. In this case emit
2626 a call to the floor/ceil variants nevertheless. This should result
2627 in the best user experience for not full C99 targets. */
2628 if (fallback_fndecl == NULL_TREE)
2629 {
2630 tree fntype;
2631 const char *name = NULL;
2632
2633 switch (DECL_FUNCTION_CODE (fndecl))
2634 {
6c32ee74 2635 case BUILT_IN_ICEIL:
34a24c11
RG
2636 case BUILT_IN_LCEIL:
2637 case BUILT_IN_LLCEIL:
2638 name = "ceil";
2639 break;
6c32ee74 2640 case BUILT_IN_ICEILF:
34a24c11
RG
2641 case BUILT_IN_LCEILF:
2642 case BUILT_IN_LLCEILF:
2643 name = "ceilf";
2644 break;
6c32ee74 2645 case BUILT_IN_ICEILL:
34a24c11
RG
2646 case BUILT_IN_LCEILL:
2647 case BUILT_IN_LLCEILL:
2648 name = "ceill";
2649 break;
6c32ee74 2650 case BUILT_IN_IFLOOR:
34a24c11
RG
2651 case BUILT_IN_LFLOOR:
2652 case BUILT_IN_LLFLOOR:
2653 name = "floor";
2654 break;
6c32ee74 2655 case BUILT_IN_IFLOORF:
34a24c11
RG
2656 case BUILT_IN_LFLOORF:
2657 case BUILT_IN_LLFLOORF:
2658 name = "floorf";
2659 break;
6c32ee74 2660 case BUILT_IN_IFLOORL:
34a24c11
RG
2661 case BUILT_IN_LFLOORL:
2662 case BUILT_IN_LLFLOORL:
2663 name = "floorl";
2664 break;
2665 default:
2666 gcc_unreachable ();
2667 }
2668
2669 fntype = build_function_type_list (TREE_TYPE (arg),
2670 TREE_TYPE (arg), NULL_TREE);
2671 fallback_fndecl = build_fn_decl (name, fntype);
2672 }
2673
aa493694 2674 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
d8b42d06 2675
39b1ec97 2676 tmp = expand_normal (exp);
9a002da8 2677 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
d8b42d06
UB
2678
2679 /* Truncate the result of floating point optab to integer
2680 via expand_fix (). */
2681 target = gen_reg_rtx (mode);
2682 expand_fix (target, tmp, 0);
2683
2684 return target;
2685}
2686
0bfa1541
RG
2687/* Expand a call to one of the builtin math functions doing integer
2688 conversion (lrint).
2689 Return 0 if a normal call should be emitted rather than expanding the
2690 function in-line. EXP is the expression that is a call to the builtin
1856c8dc 2691 function; if convenient, the result should be placed in TARGET. */
0bfa1541
RG
2692
2693static rtx
1856c8dc 2694expand_builtin_int_roundingfn_2 (tree exp, rtx target)
0bfa1541 2695{
bb7f0423 2696 convert_optab builtin_optab;
58f4cf2a
DM
2697 rtx op0;
2698 rtx_insn *insns;
0bfa1541 2699 tree fndecl = get_callee_fndecl (exp);
968fc3b6 2700 tree arg;
ef4bddc2 2701 machine_mode mode;
ff63ac4d 2702 enum built_in_function fallback_fn = BUILT_IN_NONE;
0bfa1541 2703
5039610b
SL
2704 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2705 gcc_unreachable ();
b8698a0f 2706
5039610b 2707 arg = CALL_EXPR_ARG (exp, 0);
0bfa1541
RG
2708
2709 switch (DECL_FUNCTION_CODE (fndecl))
2710 {
6c32ee74 2711 CASE_FLT_FN (BUILT_IN_IRINT):
ff63ac4d 2712 fallback_fn = BUILT_IN_LRINT;
81fea426 2713 gcc_fallthrough ();
0bfa1541
RG
2714 CASE_FLT_FN (BUILT_IN_LRINT):
2715 CASE_FLT_FN (BUILT_IN_LLRINT):
ff63ac4d
JJ
2716 builtin_optab = lrint_optab;
2717 break;
6c32ee74
UB
2718
2719 CASE_FLT_FN (BUILT_IN_IROUND):
ff63ac4d 2720 fallback_fn = BUILT_IN_LROUND;
81fea426 2721 gcc_fallthrough ();
4d81bf84
RG
2722 CASE_FLT_FN (BUILT_IN_LROUND):
2723 CASE_FLT_FN (BUILT_IN_LLROUND):
ff63ac4d
JJ
2724 builtin_optab = lround_optab;
2725 break;
6c32ee74 2726
0bfa1541
RG
2727 default:
2728 gcc_unreachable ();
2729 }
2730
ff63ac4d
JJ
2731 /* There's no easy way to detect the case we need to set EDOM. */
2732 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2733 return NULL_RTX;
2734
0bfa1541
RG
2735 /* Make a suitable register to place result in. */
2736 mode = TYPE_MODE (TREE_TYPE (exp));
2737
ff63ac4d
JJ
2738 /* There's no easy way to detect the case we need to set EDOM. */
2739 if (!flag_errno_math)
2740 {
04b80dbb 2741 rtx result = gen_reg_rtx (mode);
0bfa1541 2742
ff63ac4d
JJ
2743 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2744 need to expand the argument again. This way, we will not perform
2745 side-effects more the once. */
2746 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
0bfa1541 2747
ff63ac4d 2748 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
0bfa1541 2749
ff63ac4d 2750 start_sequence ();
0bfa1541 2751
04b80dbb 2752 if (expand_sfix_optab (result, op0, builtin_optab))
ff63ac4d
JJ
2753 {
2754 /* Output the entire sequence. */
2755 insns = get_insns ();
2756 end_sequence ();
2757 emit_insn (insns);
04b80dbb 2758 return result;
ff63ac4d
JJ
2759 }
2760
2761 /* If we were unable to expand via the builtin, stop the sequence
2762 (without outputting the insns) and call to the library function
2763 with the stabilized argument list. */
0bfa1541
RG
2764 end_sequence ();
2765 }
2766
ff63ac4d
JJ
2767 if (fallback_fn != BUILT_IN_NONE)
2768 {
2769 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2770 targets, (int) round (x) should never be transformed into
2771 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2772 a call to lround in the hope that the target provides at least some
2773 C99 functions. This should result in the best user experience for
2774 not full C99 targets. */
b03ff92e
RS
2775 tree fallback_fndecl = mathfn_built_in_1
2776 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
ff63ac4d
JJ
2777
2778 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2779 fallback_fndecl, 1, arg);
2780
2781 target = expand_call (exp, NULL_RTX, target == const0_rtx);
9a002da8 2782 target = maybe_emit_group_store (target, TREE_TYPE (exp));
ff63ac4d
JJ
2783 return convert_to_mode (mode, target, 0);
2784 }
bb7f0423 2785
04b80dbb 2786 return expand_call (exp, target, target == const0_rtx);
0bfa1541
RG
2787}
2788
5039610b 2789/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
17684d46
RG
2790 a normal call should be emitted rather than expanding the function
2791 in-line. EXP is the expression that is a call to the builtin
2792 function; if convenient, the result should be placed in TARGET. */
2793
2794static rtx
4359dc2a 2795expand_builtin_powi (tree exp, rtx target)
17684d46 2796{
17684d46
RG
2797 tree arg0, arg1;
2798 rtx op0, op1;
ef4bddc2
RS
2799 machine_mode mode;
2800 machine_mode mode2;
17684d46 2801
5039610b
SL
2802 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2803 return NULL_RTX;
17684d46 2804
5039610b
SL
2805 arg0 = CALL_EXPR_ARG (exp, 0);
2806 arg1 = CALL_EXPR_ARG (exp, 1);
17684d46
RG
2807 mode = TYPE_MODE (TREE_TYPE (exp));
2808
17684d46
RG
2809 /* Emit a libcall to libgcc. */
2810
5039610b 2811 /* Mode of the 2nd argument must match that of an int. */
f4b31647 2812 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
0b8495ae 2813
17684d46
RG
2814 if (target == NULL_RTX)
2815 target = gen_reg_rtx (mode);
2816
4359dc2a 2817 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
17684d46
RG
2818 if (GET_MODE (op0) != mode)
2819 op0 = convert_to_mode (mode, op0, 0);
49452c07 2820 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
0b8495ae
FJ
2821 if (GET_MODE (op1) != mode2)
2822 op1 = convert_to_mode (mode2, op1, 0);
17684d46 2823
8a33f100 2824 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
db69559b 2825 target, LCT_CONST, mode,
0b8495ae 2826 op0, mode, op1, mode2);
17684d46
RG
2827
2828 return target;
2829}
2830
b8698a0f 2831/* Expand expression EXP which is a call to the strlen builtin. Return
781ff3d8 2832 NULL_RTX if we failed and the caller should emit a normal call, otherwise
0e9295cf 2833 try to get the result in TARGET, if convenient. */
3bdf5ad1 2834
28f4ec01 2835static rtx
5039610b 2836expand_builtin_strlen (tree exp, rtx target,
ef4bddc2 2837 machine_mode target_mode)
28f4ec01 2838{
5039610b
SL
2839 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2840 return NULL_RTX;
712b7a05 2841
16155777
MS
2842 struct expand_operand ops[4];
2843 rtx pat;
2844 tree len;
2845 tree src = CALL_EXPR_ARG (exp, 0);
2846 rtx src_reg;
2847 rtx_insn *before_strlen;
2848 machine_mode insn_mode;
2849 enum insn_code icode = CODE_FOR_nothing;
2850 unsigned int align;
ae808627 2851
16155777
MS
2852 /* If the length can be computed at compile-time, return it. */
2853 len = c_strlen (src, 0);
2854 if (len)
2855 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2856
2857 /* If the length can be computed at compile-time and is constant
2858 integer, but there are side-effects in src, evaluate
2859 src for side-effects, then return len.
2860 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2861 can be optimized into: i++; x = 3; */
2862 len = c_strlen (src, 1);
2863 if (len && TREE_CODE (len) == INTEGER_CST)
2864 {
2865 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2866 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2867 }
28f4ec01 2868
16155777 2869 align = get_pointer_alignment (src) / BITS_PER_UNIT;
28f4ec01 2870
16155777
MS
2871 /* If SRC is not a pointer type, don't do this operation inline. */
2872 if (align == 0)
2873 return NULL_RTX;
2874
2875 /* Bail out if we can't compute strlen in the right mode. */
2876 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2877 {
2878 icode = optab_handler (strlen_optab, insn_mode);
2879 if (icode != CODE_FOR_nothing)
2880 break;
2881 }
2882 if (insn_mode == VOIDmode)
2883 return NULL_RTX;
28f4ec01 2884
16155777
MS
2885 /* Make a place to hold the source address. We will not expand
2886 the actual source until we are sure that the expansion will
2887 not fail -- there are trees that cannot be expanded twice. */
2888 src_reg = gen_reg_rtx (Pmode);
28f4ec01 2889
16155777
MS
2890 /* Mark the beginning of the strlen sequence so we can emit the
2891 source operand later. */
2892 before_strlen = get_last_insn ();
28f4ec01 2893
16155777
MS
2894 create_output_operand (&ops[0], target, insn_mode);
2895 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2896 create_integer_operand (&ops[2], 0);
2897 create_integer_operand (&ops[3], align);
2898 if (!maybe_expand_insn (icode, 4, ops))
2899 return NULL_RTX;
dd05e4fa 2900
16155777
MS
2901 /* Check to see if the argument was declared attribute nonstring
2902 and if so, issue a warning since at this point it's not known
2903 to be nul-terminated. */
2904 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6a33d0ff 2905
16155777
MS
2906 /* Now that we are assured of success, expand the source. */
2907 start_sequence ();
2908 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2909 if (pat != src_reg)
2910 {
fa465762 2911#ifdef POINTERS_EXTEND_UNSIGNED
16155777
MS
2912 if (GET_MODE (pat) != Pmode)
2913 pat = convert_to_mode (Pmode, pat,
2914 POINTERS_EXTEND_UNSIGNED);
fa465762 2915#endif
16155777
MS
2916 emit_move_insn (src_reg, pat);
2917 }
2918 pat = get_insns ();
2919 end_sequence ();
fca9f642 2920
16155777
MS
2921 if (before_strlen)
2922 emit_insn_after (pat, before_strlen);
2923 else
2924 emit_insn_before (pat, get_insns ());
28f4ec01 2925
16155777
MS
2926 /* Return the value in the proper mode for this function. */
2927 if (GET_MODE (ops[0].value) == target_mode)
2928 target = ops[0].value;
2929 else if (target != 0)
2930 convert_move (target, ops[0].value, 0);
2931 else
2932 target = convert_to_mode (target_mode, ops[0].value, 0);
dd05e4fa 2933
16155777 2934 return target;
28f4ec01
BS
2935}
2936
781ff3d8
MS
2937/* Expand call EXP to the strnlen built-in, returning the result
2938 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2939
2940static rtx
2941expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2942{
2943 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2944 return NULL_RTX;
2945
2946 tree src = CALL_EXPR_ARG (exp, 0);
2947 tree bound = CALL_EXPR_ARG (exp, 1);
2948
2949 if (!bound)
2950 return NULL_RTX;
2951
2952 location_t loc = UNKNOWN_LOCATION;
2953 if (EXPR_HAS_LOCATION (exp))
2954 loc = EXPR_LOCATION (exp);
2955
2956 tree maxobjsize = max_object_size ();
2957 tree func = get_callee_fndecl (exp);
2958
2959 tree len = c_strlen (src, 0);
2960
2961 if (TREE_CODE (bound) == INTEGER_CST)
2962 {
2963 if (!TREE_NO_WARNING (exp)
2964 && tree_int_cst_lt (maxobjsize, bound)
2965 && warning_at (loc, OPT_Wstringop_overflow_,
2966 "%K%qD specified bound %E "
2967 "exceeds maximum object size %E",
2968 exp, func, bound, maxobjsize))
2969 TREE_NO_WARNING (exp) = true;
2970
2971 if (!len || TREE_CODE (len) != INTEGER_CST)
2972 return NULL_RTX;
2973
2974 len = fold_convert_loc (loc, size_type_node, len);
2975 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2976 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2977 }
2978
2979 if (TREE_CODE (bound) != SSA_NAME)
2980 return NULL_RTX;
2981
2982 wide_int min, max;
2983 enum value_range_type rng = get_range_info (bound, &min, &max);
2984 if (rng != VR_RANGE)
2985 return NULL_RTX;
2986
2987 if (!TREE_NO_WARNING (exp)
2988 && wi::ltu_p (wi::to_wide (maxobjsize), min)
2989 && warning_at (loc, OPT_Wstringop_overflow_,
2990 "%K%qD specified bound [%wu, %wu] "
2991 "exceeds maximum object size %E",
2992 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
2993 TREE_NO_WARNING (exp) = true;
2994
2995 if (!len || TREE_CODE (len) != INTEGER_CST)
2996 return NULL_RTX;
2997
2998 if (wi::gtu_p (min, wi::to_wide (len)))
2999 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3000
3001 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3002 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3003}
3004
57814e5e
JJ
3005/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3006 bytes from constant string DATA + OFFSET and return it as target
3007 constant. */
3008
3009static rtx
4682ae04 3010builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 3011 scalar_int_mode mode)
57814e5e
JJ
3012{
3013 const char *str = (const char *) data;
3014
298e6adc
NS
3015 gcc_assert (offset >= 0
3016 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3017 <= strlen (str) + 1));
57814e5e
JJ
3018
3019 return c_readstr (str + offset, mode);
3020}
3021
3918b108 3022/* LEN specify length of the block of memcpy/memset operation.
82bb7d4e
JH
3023 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3024 In some cases we can make very likely guess on max size, then we
3025 set it into PROBABLE_MAX_SIZE. */
3918b108
JH
3026
3027static void
3028determine_block_size (tree len, rtx len_rtx,
3029 unsigned HOST_WIDE_INT *min_size,
82bb7d4e
JH
3030 unsigned HOST_WIDE_INT *max_size,
3031 unsigned HOST_WIDE_INT *probable_max_size)
3918b108
JH
3032{
3033 if (CONST_INT_P (len_rtx))
3034 {
2738b4c7 3035 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3918b108
JH
3036 return;
3037 }
3038 else
3039 {
807e902e 3040 wide_int min, max;
82bb7d4e
JH
3041 enum value_range_type range_type = VR_UNDEFINED;
3042
3043 /* Determine bounds from the type. */
3044 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3045 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3046 else
3047 *min_size = 0;
3048 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2738b4c7
JJ
3049 *probable_max_size = *max_size
3050 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
82bb7d4e
JH
3051 else
3052 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3053
3054 if (TREE_CODE (len) == SSA_NAME)
3055 range_type = get_range_info (len, &min, &max);
3056 if (range_type == VR_RANGE)
3918b108 3057 {
807e902e 3058 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3918b108 3059 *min_size = min.to_uhwi ();
807e902e 3060 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
82bb7d4e 3061 *probable_max_size = *max_size = max.to_uhwi ();
3918b108 3062 }
82bb7d4e 3063 else if (range_type == VR_ANTI_RANGE)
3918b108 3064 {
70ec86ee 3065 /* Anti range 0...N lets us to determine minimal size to N+1. */
807e902e 3066 if (min == 0)
82bb7d4e 3067 {
807e902e
KZ
3068 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3069 *min_size = max.to_uhwi () + 1;
82bb7d4e
JH
3070 }
3071 /* Code like
3072
3073 int n;
3074 if (n < 100)
70ec86ee 3075 memcpy (a, b, n)
82bb7d4e
JH
3076
3077 Produce anti range allowing negative values of N. We still
3078 can use the information and make a guess that N is not negative.
3079 */
807e902e
KZ
3080 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3081 *probable_max_size = min.to_uhwi () - 1;
3918b108
JH
3082 }
3083 }
3084 gcc_checking_assert (*max_size <=
3085 (unsigned HOST_WIDE_INT)
3086 GET_MODE_MASK (GET_MODE (len_rtx)));
3087}
3088
ee92e7ba
MS
3089/* Try to verify that the sizes and lengths of the arguments to a string
3090 manipulation function given by EXP are within valid bounds and that
cc8bea0a
MS
3091 the operation does not lead to buffer overflow or read past the end.
3092 Arguments other than EXP may be null. When non-null, the arguments
3093 have the following meaning:
3094 DST is the destination of a copy call or NULL otherwise.
3095 SRC is the source of a copy call or NULL otherwise.
3096 DSTWRITE is the number of bytes written into the destination obtained
3097 from the user-supplied size argument to the function (such as in
3098 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3099 MAXREAD is the user-supplied bound on the length of the source sequence
ee92e7ba 3100 (such as in strncat(d, s, N). It specifies the upper limit on the number
cc8bea0a
MS
3101 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3102 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3103 expression EXP is a string function call (as opposed to a memory call
3104 like memcpy). As an exception, SRCSTR can also be an integer denoting
3105 the precomputed size of the source string or object (for functions like
3106 memcpy).
3107 DSTSIZE is the size of the destination object specified by the last
ee92e7ba 3108 argument to the _chk builtins, typically resulting from the expansion
cc8bea0a
MS
3109 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3110 DSTSIZE).
ee92e7ba 3111
cc8bea0a 3112 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
ee92e7ba
MS
3113 SIZE_MAX.
3114
cc8bea0a
MS
3115 If the call is successfully verified as safe return true, otherwise
3116 return false. */
ee92e7ba
MS
3117
3118static bool
cc8bea0a
MS
3119check_access (tree exp, tree, tree, tree dstwrite,
3120 tree maxread, tree srcstr, tree dstsize)
ee92e7ba 3121{
cc8bea0a
MS
3122 int opt = OPT_Wstringop_overflow_;
3123
ee92e7ba 3124 /* The size of the largest object is half the address space, or
cc8bea0a
MS
3125 PTRDIFF_MAX. (This is way too permissive.) */
3126 tree maxobjsize = max_object_size ();
ee92e7ba 3127
cc8bea0a
MS
3128 /* Either the length of the source string for string functions or
3129 the size of the source object for raw memory functions. */
ee92e7ba
MS
3130 tree slen = NULL_TREE;
3131
d9c5a8b9
MS
3132 tree range[2] = { NULL_TREE, NULL_TREE };
3133
ee92e7ba
MS
3134 /* Set to true when the exact number of bytes written by a string
3135 function like strcpy is not known and the only thing that is
3136 known is that it must be at least one (for the terminating nul). */
3137 bool at_least_one = false;
cc8bea0a 3138 if (srcstr)
ee92e7ba 3139 {
cc8bea0a 3140 /* SRCSTR is normally a pointer to string but as a special case
ee92e7ba 3141 it can be an integer denoting the length of a string. */
cc8bea0a 3142 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
ee92e7ba
MS
3143 {
3144 /* Try to determine the range of lengths the source string
d9c5a8b9 3145 refers to. If it can be determined and is less than
cc8bea0a 3146 the upper bound given by MAXREAD add one to it for
ee92e7ba 3147 the terminating nul. Otherwise, set it to one for
cc8bea0a
MS
3148 the same reason, or to MAXREAD as appropriate. */
3149 get_range_strlen (srcstr, range);
3150 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
d9c5a8b9 3151 {
cc8bea0a
MS
3152 if (maxread && tree_int_cst_le (maxread, range[0]))
3153 range[0] = range[1] = maxread;
d9c5a8b9
MS
3154 else
3155 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3156 range[0], size_one_node);
3157
cc8bea0a
MS
3158 if (maxread && tree_int_cst_le (maxread, range[1]))
3159 range[1] = maxread;
d9c5a8b9
MS
3160 else if (!integer_all_onesp (range[1]))
3161 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3162 range[1], size_one_node);
3163
3164 slen = range[0];
3165 }
ee92e7ba
MS
3166 else
3167 {
3168 at_least_one = true;
3169 slen = size_one_node;
3170 }
3171 }
3172 else
cc8bea0a 3173 slen = srcstr;
ee92e7ba
MS
3174 }
3175
cc8bea0a 3176 if (!dstwrite && !maxread)
ee92e7ba
MS
3177 {
3178 /* When the only available piece of data is the object size
3179 there is nothing to do. */
3180 if (!slen)
3181 return true;
3182
3183 /* Otherwise, when the length of the source sequence is known
cc8bea0a 3184 (as with strlen), set DSTWRITE to it. */
d9c5a8b9 3185 if (!range[0])
cc8bea0a 3186 dstwrite = slen;
ee92e7ba
MS
3187 }
3188
cc8bea0a
MS
3189 if (!dstsize)
3190 dstsize = maxobjsize;
ee92e7ba 3191
cc8bea0a
MS
3192 if (dstwrite)
3193 get_size_range (dstwrite, range);
ee92e7ba 3194
5828c09a
BE
3195 /* This can happen at -O0. */
3196 if (range[0] && TREE_CODE (range[0]) != INTEGER_CST)
3197 return false;
3198
cc8bea0a 3199 tree func = get_callee_fndecl (exp);
ee92e7ba
MS
3200
3201 /* First check the number of bytes to be written against the maximum
3202 object size. */
3203 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3204 {
781ff3d8
MS
3205 if (TREE_NO_WARNING (exp))
3206 return false;
3207
ee92e7ba 3208 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3209 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba 3210
781ff3d8 3211 bool warned;
ee92e7ba 3212 if (range[0] == range[1])
781ff3d8
MS
3213 warned = warning_at (loc, opt,
3214 "%K%qD specified size %E "
3215 "exceeds maximum object size %E",
3216 exp, func, range[0], maxobjsize);
3217 else
3218 warned = warning_at (loc, opt,
3219 "%K%qD specified size between %E and %E "
3220 "exceeds maximum object size %E",
3221 exp, func,
3222 range[0], range[1], maxobjsize);
3223 if (warned)
3224 TREE_NO_WARNING (exp) = true;
3225
ee92e7ba
MS
3226 return false;
3227 }
3228
cc8bea0a
MS
3229 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3230 constant, and in range of unsigned HOST_WIDE_INT. */
3231 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3232
ee92e7ba
MS
3233 /* Next check the number of bytes to be written against the destination
3234 object size. */
cc8bea0a 3235 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
ee92e7ba
MS
3236 {
3237 if (range[0]
cc8bea0a
MS
3238 && ((tree_fits_uhwi_p (dstsize)
3239 && tree_int_cst_lt (dstsize, range[0]))
3240 || (tree_fits_uhwi_p (dstwrite)
3241 && tree_int_cst_lt (dstwrite, range[0]))))
ee92e7ba 3242 {
e0676e2e
MS
3243 if (TREE_NO_WARNING (exp))
3244 return false;
3245
ee92e7ba 3246 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3247 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba 3248
cc8bea0a 3249 if (dstwrite == slen && at_least_one)
d9c5a8b9
MS
3250 {
3251 /* This is a call to strcpy with a destination of 0 size
3252 and a source of unknown length. The call will write
3253 at least one byte past the end of the destination. */
3254 warning_at (loc, opt,
13c5654f 3255 "%K%qD writing %E or more bytes into a region "
d9c5a8b9 3256 "of size %E overflows the destination",
cc8bea0a 3257 exp, func, range[0], dstsize);
d9c5a8b9
MS
3258 }
3259 else if (tree_int_cst_equal (range[0], range[1]))
457442eb
MS
3260 warning_n (loc, opt, tree_to_uhwi (range[0]),
3261 "%K%qD writing %E byte into a region "
3262 "of size %E overflows the destination",
3263 "%K%qD writing %E bytes into a region "
3264 "of size %E overflows the destination",
3265 exp, func, range[0], dstsize);
d9c5a8b9
MS
3266 else if (tree_int_cst_sign_bit (range[1]))
3267 {
3268 /* Avoid printing the upper bound if it's invalid. */
3269 warning_at (loc, opt,
13c5654f 3270 "%K%qD writing %E or more bytes into a region "
d9c5a8b9 3271 "of size %E overflows the destination",
cc8bea0a 3272 exp, func, range[0], dstsize);
d9c5a8b9 3273 }
ee92e7ba
MS
3274 else
3275 warning_at (loc, opt,
13c5654f 3276 "%K%qD writing between %E and %E bytes into "
d9c5a8b9 3277 "a region of size %E overflows the destination",
cc8bea0a
MS
3278 exp, func, range[0], range[1],
3279 dstsize);
ee92e7ba
MS
3280
3281 /* Return error when an overflow has been detected. */
3282 return false;
3283 }
3284 }
3285
3286 /* Check the maximum length of the source sequence against the size
3287 of the destination object if known, or against the maximum size
3288 of an object. */
cc8bea0a 3289 if (maxread)
ee92e7ba 3290 {
cc8bea0a
MS
3291 get_size_range (maxread, range);
3292
3293 /* Use the lower end for MAXREAD from now on. */
3294 if (range[0])
3295 maxread = range[0];
ee92e7ba 3296
cc8bea0a 3297 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
ee92e7ba
MS
3298 {
3299 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3300 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba
MS
3301
3302 if (tree_int_cst_lt (maxobjsize, range[0]))
3303 {
e0676e2e
MS
3304 if (TREE_NO_WARNING (exp))
3305 return false;
3306
ee92e7ba
MS
3307 /* Warn about crazy big sizes first since that's more
3308 likely to be meaningful than saying that the bound
3309 is greater than the object size if both are big. */
3310 if (range[0] == range[1])
3311 warning_at (loc, opt,
13c5654f 3312 "%K%qD specified bound %E "
d9c5a8b9 3313 "exceeds maximum object size %E",
cc8bea0a 3314 exp, func,
d9c5a8b9 3315 range[0], maxobjsize);
ee92e7ba
MS
3316 else
3317 warning_at (loc, opt,
13c5654f 3318 "%K%qD specified bound between %E and %E "
d9c5a8b9 3319 "exceeds maximum object size %E",
cc8bea0a 3320 exp, func,
d9c5a8b9 3321 range[0], range[1], maxobjsize);
ee92e7ba
MS
3322
3323 return false;
3324 }
3325
cc8bea0a 3326 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
ee92e7ba 3327 {
e0676e2e
MS
3328 if (TREE_NO_WARNING (exp))
3329 return false;
3330
d9c5a8b9 3331 if (tree_int_cst_equal (range[0], range[1]))
ee92e7ba 3332 warning_at (loc, opt,
13c5654f 3333 "%K%qD specified bound %E "
d9c5a8b9 3334 "exceeds destination size %E",
cc8bea0a
MS
3335 exp, func,
3336 range[0], dstsize);
ee92e7ba
MS
3337 else
3338 warning_at (loc, opt,
13c5654f 3339 "%K%qD specified bound between %E and %E "
d9c5a8b9 3340 "exceeds destination size %E",
cc8bea0a
MS
3341 exp, func,
3342 range[0], range[1], dstsize);
ee92e7ba
MS
3343 return false;
3344 }
3345 }
3346 }
3347
cc8bea0a 3348 /* Check for reading past the end of SRC. */
d9c5a8b9 3349 if (slen
cc8bea0a
MS
3350 && slen == srcstr
3351 && dstwrite && range[0]
d9c5a8b9
MS
3352 && tree_int_cst_lt (slen, range[0]))
3353 {
e0676e2e
MS
3354 if (TREE_NO_WARNING (exp))
3355 return false;
3356
d9c5a8b9
MS
3357 location_t loc = tree_nonartificial_location (exp);
3358
3359 if (tree_int_cst_equal (range[0], range[1]))
457442eb
MS
3360 warning_n (loc, opt, tree_to_uhwi (range[0]),
3361 "%K%qD reading %E byte from a region of size %E",
3362 "%K%qD reading %E bytes from a region of size %E",
cc8bea0a 3363 exp, func, range[0], slen);
d9c5a8b9
MS
3364 else if (tree_int_cst_sign_bit (range[1]))
3365 {
3366 /* Avoid printing the upper bound if it's invalid. */
3367 warning_at (loc, opt,
13c5654f 3368 "%K%qD reading %E or more bytes from a region "
d9c5a8b9 3369 "of size %E",
cc8bea0a 3370 exp, func, range[0], slen);
d9c5a8b9
MS
3371 }
3372 else
3373 warning_at (loc, opt,
13c5654f 3374 "%K%qD reading between %E and %E bytes from a region "
d9c5a8b9 3375 "of size %E",
cc8bea0a 3376 exp, func, range[0], range[1], slen);
d9c5a8b9
MS
3377 return false;
3378 }
3379
ee92e7ba
MS
3380 return true;
3381}
3382
3383/* Helper to compute the size of the object referenced by the DEST
025d57f0 3384 expression which must have pointer type, using Object Size type
ee92e7ba 3385 OSTYPE (only the least significant 2 bits are used). Return
af3fa359
MS
3386 an estimate of the size of the object if successful or NULL when
3387 the size cannot be determined. When the referenced object involves
3388 a non-constant offset in some range the returned value represents
3389 the largest size given the smallest non-negative offset in the
3390 range. The function is intended for diagnostics and should not
3391 be used to influence code generation or optimization. */
ee92e7ba 3392
025d57f0 3393tree
d9c5a8b9 3394compute_objsize (tree dest, int ostype)
ee92e7ba
MS
3395{
3396 unsigned HOST_WIDE_INT size;
025d57f0
MS
3397
3398 /* Only the two least significant bits are meaningful. */
3399 ostype &= 3;
3400
3401 if (compute_builtin_object_size (dest, ostype, &size))
ee92e7ba
MS
3402 return build_int_cst (sizetype, size);
3403
025d57f0
MS
3404 if (TREE_CODE (dest) == SSA_NAME)
3405 {
3406 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3407 if (!is_gimple_assign (stmt))
3408 return NULL_TREE;
3409
af3fa359
MS
3410 dest = gimple_assign_rhs1 (stmt);
3411
025d57f0 3412 tree_code code = gimple_assign_rhs_code (stmt);
af3fa359
MS
3413 if (code == POINTER_PLUS_EXPR)
3414 {
3415 /* compute_builtin_object_size fails for addresses with
3416 non-constant offsets. Try to determine the range of
e3329a78 3417 such an offset here and use it to adjust the constant
af3fa359
MS
3418 size. */
3419 tree off = gimple_assign_rhs2 (stmt);
e3329a78
MS
3420 if (TREE_CODE (off) == INTEGER_CST)
3421 {
3422 if (tree size = compute_objsize (dest, ostype))
3423 {
3424 wide_int wioff = wi::to_wide (off);
3425 wide_int wisiz = wi::to_wide (size);
3426
3427 /* Ignore negative offsets for now. For others,
3428 use the lower bound as the most optimistic
3429 estimate of the (remaining) size. */
3430 if (wi::sign_mask (wioff))
3431 ;
3432 else if (wi::ltu_p (wioff, wisiz))
3433 return wide_int_to_tree (TREE_TYPE (size),
3434 wi::sub (wisiz, wioff));
3435 else
3436 return size_zero_node;
3437 }
3438 }
3439 else if (TREE_CODE (off) == SSA_NAME
af3fa359
MS
3440 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3441 {
3442 wide_int min, max;
3443 enum value_range_type rng = get_range_info (off, &min, &max);
3444
3445 if (rng == VR_RANGE)
3446 {
3447 if (tree size = compute_objsize (dest, ostype))
3448 {
3449 wide_int wisiz = wi::to_wide (size);
3450
3451 /* Ignore negative offsets for now. For others,
3452 use the lower bound as the most optimistic
3453 estimate of the (remaining)size. */
3454 if (wi::sign_mask (min))
3455 ;
3456 else if (wi::ltu_p (min, wisiz))
3457 return wide_int_to_tree (TREE_TYPE (size),
3458 wi::sub (wisiz, min));
3459 else
3460 return size_zero_node;
3461 }
3462 }
3463 }
3464 }
3465 else if (code != ADDR_EXPR)
025d57f0 3466 return NULL_TREE;
025d57f0
MS
3467 }
3468
af3fa359
MS
3469 /* Unless computing the largest size (for memcpy and other raw memory
3470 functions), try to determine the size of the object from its type. */
3471 if (!ostype)
3472 return NULL_TREE;
3473
025d57f0
MS
3474 if (TREE_CODE (dest) != ADDR_EXPR)
3475 return NULL_TREE;
3476
3477 tree type = TREE_TYPE (dest);
3478 if (TREE_CODE (type) == POINTER_TYPE)
3479 type = TREE_TYPE (type);
3480
3481 type = TYPE_MAIN_VARIANT (type);
3482
3483 if (TREE_CODE (type) == ARRAY_TYPE
f1acdcd0 3484 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
025d57f0
MS
3485 {
3486 /* Return the constant size unless it's zero (that's a zero-length
3487 array likely at the end of a struct). */
3488 tree size = TYPE_SIZE_UNIT (type);
3489 if (size && TREE_CODE (size) == INTEGER_CST
3490 && !integer_zerop (size))
3491 return size;
3492 }
3493
ee92e7ba
MS
3494 return NULL_TREE;
3495}
3496
3497/* Helper to determine and check the sizes of the source and the destination
d9c5a8b9
MS
3498 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3499 call expression, DEST is the destination argument, SRC is the source
3500 argument or null, and LEN is the number of bytes. Use Object Size type-0
3501 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
ee92e7ba
MS
3502 (no overflow or invalid sizes), false otherwise. */
3503
3504static bool
cc8bea0a 3505check_memop_access (tree exp, tree dest, tree src, tree size)
ee92e7ba 3506{
ee92e7ba 3507 /* For functions like memset and memcpy that operate on raw memory
d9c5a8b9
MS
3508 try to determine the size of the largest source and destination
3509 object using type-0 Object Size regardless of the object size
3510 type specified by the option. */
3511 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3512 tree dstsize = compute_objsize (dest, 0);
ee92e7ba 3513
cc8bea0a
MS
3514 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3515 srcsize, dstsize);
d9c5a8b9
MS
3516}
3517
3518/* Validate memchr arguments without performing any expansion.
3519 Return NULL_RTX. */
3520
3521static rtx
3522expand_builtin_memchr (tree exp, rtx)
3523{
3524 if (!validate_arglist (exp,
3525 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3526 return NULL_RTX;
3527
3528 tree arg1 = CALL_EXPR_ARG (exp, 0);
3529 tree len = CALL_EXPR_ARG (exp, 2);
3530
3531 /* Diagnose calls where the specified length exceeds the size
3532 of the object. */
3533 if (warn_stringop_overflow)
3534 {
3535 tree size = compute_objsize (arg1, 0);
cc8bea0a
MS
3536 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3537 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
d9c5a8b9
MS
3538 }
3539
3540 return NULL_RTX;
ee92e7ba
MS
3541}
3542
5039610b
SL
3543/* Expand a call EXP to the memcpy builtin.
3544 Return NULL_RTX if we failed, the caller should emit a normal call,
9cb65f92 3545 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e 3546 mode MODE if that's convenient). */
5039610b 3547
28f4ec01 3548static rtx
44e10129 3549expand_builtin_memcpy (tree exp, rtx target)
28f4ec01 3550{
5039610b
SL
3551 if (!validate_arglist (exp,
3552 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3553 return NULL_RTX;
ee92e7ba
MS
3554
3555 tree dest = CALL_EXPR_ARG (exp, 0);
3556 tree src = CALL_EXPR_ARG (exp, 1);
3557 tree len = CALL_EXPR_ARG (exp, 2);
3558
cc8bea0a 3559 check_memop_access (exp, dest, src, len);
ee92e7ba 3560
671a00ee
ML
3561 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3562 /*endp=*/ 0);
edcf72f3 3563}
57814e5e 3564
e50d56a5
MS
3565/* Check a call EXP to the memmove built-in for validity.
3566 Return NULL_RTX on both success and failure. */
3567
3568static rtx
3569expand_builtin_memmove (tree exp, rtx)
3570{
3571 if (!validate_arglist (exp,
3572 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3573 return NULL_RTX;
3574
3575 tree dest = CALL_EXPR_ARG (exp, 0);
d9c5a8b9 3576 tree src = CALL_EXPR_ARG (exp, 1);
e50d56a5
MS
3577 tree len = CALL_EXPR_ARG (exp, 2);
3578
cc8bea0a 3579 check_memop_access (exp, dest, src, len);
e50d56a5
MS
3580
3581 return NULL_RTX;
3582}
3583
5039610b
SL
3584/* Expand a call EXP to the mempcpy builtin.
3585 Return NULL_RTX if we failed; the caller should emit a normal call,
e3e9f108 3586 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e
JJ
3587 mode MODE if that's convenient). If ENDP is 0 return the
3588 destination pointer, if ENDP is 1 return the end pointer ala
3589 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3590 stpcpy. */
e3e9f108
JJ
3591
3592static rtx
671a00ee 3593expand_builtin_mempcpy (tree exp, rtx target)
e3e9f108 3594{
5039610b
SL
3595 if (!validate_arglist (exp,
3596 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3597 return NULL_RTX;
ee92e7ba
MS
3598
3599 tree dest = CALL_EXPR_ARG (exp, 0);
3600 tree src = CALL_EXPR_ARG (exp, 1);
3601 tree len = CALL_EXPR_ARG (exp, 2);
3602
af3fa359
MS
3603 /* Policy does not generally allow using compute_objsize (which
3604 is used internally by check_memop_size) to change code generation
3605 or drive optimization decisions.
3606
3607 In this instance it is safe because the code we generate has
3608 the same semantics regardless of the return value of
3609 check_memop_sizes. Exactly the same amount of data is copied
3610 and the return value is exactly the same in both cases.
3611
3612 Furthermore, check_memop_size always uses mode 0 for the call to
3613 compute_objsize, so the imprecise nature of compute_objsize is
3614 avoided. */
3615
ee92e7ba
MS
3616 /* Avoid expanding mempcpy into memcpy when the call is determined
3617 to overflow the buffer. This also prevents the same overflow
3618 from being diagnosed again when expanding memcpy. */
cc8bea0a 3619 if (!check_memop_access (exp, dest, src, len))
ee92e7ba
MS
3620 return NULL_RTX;
3621
3622 return expand_builtin_mempcpy_args (dest, src, len,
671a00ee 3623 target, exp, /*endp=*/ 1);
edcf72f3
IE
3624}
3625
671a00ee
ML
3626/* Helper function to do the actual work for expand of memory copy family
3627 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3628 of memory from SRC to DEST and assign to TARGET if convenient.
3629 If ENDP is 0 return the
3630 destination pointer, if ENDP is 1 return the end pointer ala
3631 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3632 stpcpy. */
5039610b
SL
3633
3634static rtx
671a00ee
ML
3635expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3636 rtx target, tree exp, int endp)
5039610b 3637{
671a00ee
ML
3638 const char *src_str;
3639 unsigned int src_align = get_pointer_alignment (src);
3640 unsigned int dest_align = get_pointer_alignment (dest);
3641 rtx dest_mem, src_mem, dest_addr, len_rtx;
3642 HOST_WIDE_INT expected_size = -1;
3643 unsigned int expected_align = 0;
3644 unsigned HOST_WIDE_INT min_size;
3645 unsigned HOST_WIDE_INT max_size;
3646 unsigned HOST_WIDE_INT probable_max_size;
edcf72f3 3647
671a00ee
ML
3648 /* If DEST is not a pointer type, call the normal function. */
3649 if (dest_align == 0)
3650 return NULL_RTX;
c22cacf3 3651
671a00ee
ML
3652 /* If either SRC is not a pointer type, don't do this
3653 operation in-line. */
3654 if (src_align == 0)
3655 return NULL_RTX;
8fd3cf4e 3656
671a00ee
ML
3657 if (currently_expanding_gimple_stmt)
3658 stringop_block_profile (currently_expanding_gimple_stmt,
3659 &expected_align, &expected_size);
33521f7d 3660
671a00ee
ML
3661 if (expected_align < dest_align)
3662 expected_align = dest_align;
3663 dest_mem = get_memory_rtx (dest, len);
3664 set_mem_align (dest_mem, dest_align);
3665 len_rtx = expand_normal (len);
3666 determine_block_size (len, len_rtx, &min_size, &max_size,
3667 &probable_max_size);
3668 src_str = c_getstr (src);
e3e9f108 3669
671a00ee
ML
3670 /* If SRC is a string constant and block move would be done
3671 by pieces, we can avoid loading the string from memory
3672 and only stored the computed constants. */
3673 if (src_str
3674 && CONST_INT_P (len_rtx)
3675 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3676 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3677 CONST_CAST (char *, src_str),
3678 dest_align, false))
3679 {
3680 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3681 builtin_memcpy_read_str,
7d3eecca 3682 CONST_CAST (char *, src_str),
671a00ee
ML
3683 dest_align, false, endp);
3684 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3685 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3686 return dest_mem;
3687 }
e3e9f108 3688
671a00ee
ML
3689 src_mem = get_memory_rtx (src, len);
3690 set_mem_align (src_mem, src_align);
8fd3cf4e 3691
671a00ee 3692 /* Copy word part most expediently. */
fdd33254
ML
3693 enum block_op_methods method = BLOCK_OP_NORMAL;
3694 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3695 method = BLOCK_OP_TAILCALL;
3696 if (endp == 1 && target != const0_rtx)
3697 method = BLOCK_OP_NO_LIBCALL_RET;
3698 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
671a00ee
ML
3699 expected_align, expected_size,
3700 min_size, max_size, probable_max_size);
fdd33254
ML
3701 if (dest_addr == pc_rtx)
3702 return NULL_RTX;
671a00ee
ML
3703
3704 if (dest_addr == 0)
3705 {
3706 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3707 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3708 }
3709
3710 if (endp && target != const0_rtx)
3711 {
3712 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3713 /* stpcpy pointer to last byte. */
3714 if (endp == 2)
3715 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
7d3eecca 3716 }
671a00ee
ML
3717
3718 return dest_addr;
3719}
3720
3721static rtx
3722expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3723 rtx target, tree orig_exp, int endp)
3724{
3725 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3726 endp);
e3e9f108
JJ
3727}
3728
5039610b 3729/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
beed8fc0
AO
3730 we failed, the caller should emit a normal call, otherwise try to
3731 get the result in TARGET, if convenient. If ENDP is 0 return the
3732 destination pointer, if ENDP is 1 return the end pointer ala
3733 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3734 stpcpy. */
3735
3736static rtx
3737expand_movstr (tree dest, tree src, rtx target, int endp)
3738{
a5c7d693 3739 struct expand_operand ops[3];
beed8fc0
AO
3740 rtx dest_mem;
3741 rtx src_mem;
beed8fc0 3742
7cff0471 3743 if (!targetm.have_movstr ())
5039610b 3744 return NULL_RTX;
beed8fc0 3745
435bb2a1
JJ
3746 dest_mem = get_memory_rtx (dest, NULL);
3747 src_mem = get_memory_rtx (src, NULL);
beed8fc0
AO
3748 if (!endp)
3749 {
3750 target = force_reg (Pmode, XEXP (dest_mem, 0));
3751 dest_mem = replace_equiv_address (dest_mem, target);
beed8fc0
AO
3752 }
3753
a5c7d693
RS
3754 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3755 create_fixed_operand (&ops[1], dest_mem);
3756 create_fixed_operand (&ops[2], src_mem);
7cff0471 3757 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
7c5425fa 3758 return NULL_RTX;
beed8fc0 3759
a5c7d693 3760 if (endp && target != const0_rtx)
7ce3fc8f 3761 {
a5c7d693
RS
3762 target = ops[0].value;
3763 /* movstr is supposed to set end to the address of the NUL
3764 terminator. If the caller requested a mempcpy-like return value,
3765 adjust it. */
3766 if (endp == 1)
3767 {
0a81f074
RS
3768 rtx tem = plus_constant (GET_MODE (target),
3769 gen_lowpart (GET_MODE (target), target), 1);
a5c7d693
RS
3770 emit_move_insn (target, force_operand (tem, NULL_RTX));
3771 }
7ce3fc8f 3772 }
beed8fc0
AO
3773 return target;
3774}
3775
ee92e7ba
MS
3776/* Do some very basic size validation of a call to the strcpy builtin
3777 given by EXP. Return NULL_RTX to have the built-in expand to a call
3778 to the library function. */
3779
3780static rtx
3781expand_builtin_strcat (tree exp, rtx)
3782{
3783 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3784 || !warn_stringop_overflow)
3785 return NULL_RTX;
3786
3787 tree dest = CALL_EXPR_ARG (exp, 0);
3788 tree src = CALL_EXPR_ARG (exp, 1);
3789
3790 /* There is no way here to determine the length of the string in
3791 the destination to which the SRC string is being appended so
3792 just diagnose cases when the souce string is longer than
3793 the destination object. */
3794
d9c5a8b9 3795 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba 3796
cc8bea0a
MS
3797 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3798 destsize);
ee92e7ba
MS
3799
3800 return NULL_RTX;
3801}
3802
b8698a0f
L
3803/* Expand expression EXP, which is a call to the strcpy builtin. Return
3804 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 3805 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 3806 convenient). */
fed3cef0 3807
28f4ec01 3808static rtx
44e10129 3809expand_builtin_strcpy (tree exp, rtx target)
28f4ec01 3810{
ee92e7ba
MS
3811 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3812 return NULL_RTX;
3813
3814 tree dest = CALL_EXPR_ARG (exp, 0);
3815 tree src = CALL_EXPR_ARG (exp, 1);
3816
3817 if (warn_stringop_overflow)
3818 {
d9c5a8b9 3819 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
cc8bea0a
MS
3820 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3821 src, destsize);
ee92e7ba
MS
3822 }
3823
36537a1c
MS
3824 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3825 {
3826 /* Check to see if the argument was declared attribute nonstring
3827 and if so, issue a warning since at this point it's not known
3828 to be nul-terminated. */
3829 tree fndecl = get_callee_fndecl (exp);
3830 maybe_warn_nonstring_arg (fndecl, exp);
3831 return ret;
3832 }
3833
3834 return NULL_RTX;
5039610b
SL
3835}
3836
3837/* Helper function to do the actual work for expand_builtin_strcpy. The
3838 arguments to the builtin_strcpy call DEST and SRC are broken out
3839 so that this can also be called without constructing an actual CALL_EXPR.
3840 The other arguments and return value are the same as for
3841 expand_builtin_strcpy. */
3842
3843static rtx
44e10129 3844expand_builtin_strcpy_args (tree dest, tree src, rtx target)
5039610b 3845{
5039610b 3846 return expand_movstr (dest, src, target, /*endp=*/0);
28f4ec01
BS
3847}
3848
5039610b
SL
3849/* Expand a call EXP to the stpcpy builtin.
3850 Return NULL_RTX if we failed the caller should emit a normal call,
9cb65f92
KG
3851 otherwise try to get the result in TARGET, if convenient (and in
3852 mode MODE if that's convenient). */
3853
3854static rtx
3ce4cdb2 3855expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
9cb65f92 3856{
5039610b 3857 tree dst, src;
db3927fb 3858 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
3859
3860 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3861 return NULL_RTX;
3862
3863 dst = CALL_EXPR_ARG (exp, 0);
3864 src = CALL_EXPR_ARG (exp, 1);
3865
e50d56a5
MS
3866 if (warn_stringop_overflow)
3867 {
d9c5a8b9 3868 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
cc8bea0a
MS
3869 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3870 src, destsize);
e50d56a5
MS
3871 }
3872
beed8fc0 3873 /* If return value is ignored, transform stpcpy into strcpy. */
e79983f4 3874 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
ad4319ec 3875 {
e79983f4 3876 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
aa493694 3877 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
0d2a6e08 3878 return expand_expr (result, target, mode, EXPAND_NORMAL);
ad4319ec 3879 }
9cb65f92
KG
3880 else
3881 {
5039610b 3882 tree len, lenp1;
beed8fc0 3883 rtx ret;
e3e9f108 3884
8fd3cf4e 3885 /* Ensure we get an actual string whose length can be evaluated at
c22cacf3
MS
3886 compile-time, not an expression containing a string. This is
3887 because the latter will potentially produce pessimized code
3888 when used to produce the return value. */
ae808627 3889 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
5039610b 3890 return expand_movstr (dst, src, target, /*endp=*/2);
9cb65f92 3891
db3927fb 3892 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
44e10129 3893 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
671a00ee 3894 target, exp, /*endp=*/2);
beed8fc0
AO
3895
3896 if (ret)
3897 return ret;
3898
3899 if (TREE_CODE (len) == INTEGER_CST)
3900 {
84217346 3901 rtx len_rtx = expand_normal (len);
beed8fc0 3902
481683e1 3903 if (CONST_INT_P (len_rtx))
beed8fc0 3904 {
44e10129 3905 ret = expand_builtin_strcpy_args (dst, src, target);
beed8fc0
AO
3906
3907 if (ret)
3908 {
3909 if (! target)
58ec6ece
SE
3910 {
3911 if (mode != VOIDmode)
3912 target = gen_reg_rtx (mode);
3913 else
3914 target = gen_reg_rtx (GET_MODE (ret));
3915 }
beed8fc0
AO
3916 if (GET_MODE (target) != GET_MODE (ret))
3917 ret = gen_lowpart (GET_MODE (target), ret);
3918
0a81f074 3919 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
7ce3fc8f 3920 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
298e6adc 3921 gcc_assert (ret);
beed8fc0
AO
3922
3923 return target;
3924 }
3925 }
3926 }
3927
5039610b 3928 return expand_movstr (dst, src, target, /*endp=*/2);
9cb65f92
KG
3929 }
3930}
3931
3ce4cdb2
MS
3932/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3933 arguments while being careful to avoid duplicate warnings (which could
3934 be issued if the expander were to expand the call, resulting in it
3935 being emitted in expand_call(). */
3936
3937static rtx
3938expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3939{
3940 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3941 {
3942 /* The call has been successfully expanded. Check for nonstring
3943 arguments and issue warnings as appropriate. */
3944 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3945 return ret;
3946 }
3947
3948 return NULL_RTX;
3949}
3950
e50d56a5
MS
3951/* Check a call EXP to the stpncpy built-in for validity.
3952 Return NULL_RTX on both success and failure. */
3953
3954static rtx
3955expand_builtin_stpncpy (tree exp, rtx)
3956{
3957 if (!validate_arglist (exp,
3958 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3959 || !warn_stringop_overflow)
3960 return NULL_RTX;
3961
c6c02519 3962 /* The source and destination of the call. */
e50d56a5
MS
3963 tree dest = CALL_EXPR_ARG (exp, 0);
3964 tree src = CALL_EXPR_ARG (exp, 1);
3965
c6c02519 3966 /* The exact number of bytes to write (not the maximum). */
e50d56a5 3967 tree len = CALL_EXPR_ARG (exp, 2);
e50d56a5 3968
c6c02519 3969 /* The size of the destination object. */
d9c5a8b9 3970 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
e50d56a5 3971
cc8bea0a 3972 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
e50d56a5
MS
3973
3974 return NULL_RTX;
3975}
3976
57814e5e
JJ
3977/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3978 bytes from constant string DATA + OFFSET and return it as target
3979 constant. */
3980
14a43348 3981rtx
4682ae04 3982builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 3983 scalar_int_mode mode)
57814e5e
JJ
3984{
3985 const char *str = (const char *) data;
3986
3987 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3988 return const0_rtx;
3989
3990 return c_readstr (str + offset, mode);
3991}
3992
ee92e7ba
MS
3993/* Helper to check the sizes of sequences and the destination of calls
3994 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3995 success (no overflow or invalid sizes), false otherwise. */
3996
3997static bool
3998check_strncat_sizes (tree exp, tree objsize)
3999{
4000 tree dest = CALL_EXPR_ARG (exp, 0);
4001 tree src = CALL_EXPR_ARG (exp, 1);
cc8bea0a 4002 tree maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
4003
4004 /* Try to determine the range of lengths that the source expression
4005 refers to. */
4006 tree lenrange[2];
4007 get_range_strlen (src, lenrange);
4008
4009 /* Try to verify that the destination is big enough for the shortest
4010 string. */
4011
4012 if (!objsize && warn_stringop_overflow)
4013 {
4014 /* If it hasn't been provided by __strncat_chk, try to determine
4015 the size of the destination object into which the source is
4016 being copied. */
d9c5a8b9 4017 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
4018 }
4019
4020 /* Add one for the terminating nul. */
4021 tree srclen = (lenrange[0]
4022 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4023 size_one_node)
4024 : NULL_TREE);
4025
cc8bea0a
MS
4026 /* The strncat function copies at most MAXREAD bytes and always appends
4027 the terminating nul so the specified upper bound should never be equal
4028 to (or greater than) the size of the destination. */
4029 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4030 && tree_int_cst_equal (objsize, maxread))
ee92e7ba 4031 {
e50d56a5
MS
4032 location_t loc = tree_nonartificial_location (exp);
4033 loc = expansion_point_location_if_in_system_header (loc);
4034
4035 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 4036 "%K%qD specified bound %E equals destination size",
cc8bea0a 4037 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
4038
4039 return false;
4040 }
4041
4042 if (!srclen
cc8bea0a 4043 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 4044 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
4045 && tree_int_cst_lt (maxread, srclen)))
4046 srclen = maxread;
ee92e7ba 4047
cc8bea0a 4048 /* The number of bytes to write is LEN but check_access will also
ee92e7ba 4049 check SRCLEN if LEN's value isn't known. */
cc8bea0a
MS
4050 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4051 objsize);
ee92e7ba
MS
4052}
4053
4054/* Similar to expand_builtin_strcat, do some very basic size validation
4055 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4056 the built-in expand to a call to the library function. */
4057
4058static rtx
4059expand_builtin_strncat (tree exp, rtx)
4060{
4061 if (!validate_arglist (exp,
4062 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4063 || !warn_stringop_overflow)
4064 return NULL_RTX;
4065
4066 tree dest = CALL_EXPR_ARG (exp, 0);
4067 tree src = CALL_EXPR_ARG (exp, 1);
4068 /* The upper bound on the number of bytes to write. */
cc8bea0a 4069 tree maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
4070 /* The length of the source sequence. */
4071 tree slen = c_strlen (src, 1);
4072
4073 /* Try to determine the range of lengths that the source expression
4074 refers to. */
4075 tree lenrange[2];
4076 if (slen)
4077 lenrange[0] = lenrange[1] = slen;
4078 else
4079 get_range_strlen (src, lenrange);
4080
4081 /* Try to verify that the destination is big enough for the shortest
4082 string. First try to determine the size of the destination object
4083 into which the source is being copied. */
d9c5a8b9 4084 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
4085
4086 /* Add one for the terminating nul. */
4087 tree srclen = (lenrange[0]
4088 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4089 size_one_node)
4090 : NULL_TREE);
4091
cc8bea0a
MS
4092 /* The strncat function copies at most MAXREAD bytes and always appends
4093 the terminating nul so the specified upper bound should never be equal
4094 to (or greater than) the size of the destination. */
4095 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4096 && tree_int_cst_equal (destsize, maxread))
ee92e7ba 4097 {
e50d56a5
MS
4098 location_t loc = tree_nonartificial_location (exp);
4099 loc = expansion_point_location_if_in_system_header (loc);
4100
4101 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 4102 "%K%qD specified bound %E equals destination size",
cc8bea0a 4103 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
4104
4105 return NULL_RTX;
4106 }
4107
4108 if (!srclen
cc8bea0a 4109 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 4110 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
4111 && tree_int_cst_lt (maxread, srclen)))
4112 srclen = maxread;
ee92e7ba 4113
cc8bea0a
MS
4114 /* The number of bytes to write is SRCLEN. */
4115 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
ee92e7ba
MS
4116
4117 return NULL_RTX;
4118}
4119
b8698a0f 4120/* Expand expression EXP, which is a call to the strncpy builtin. Return
5039610b 4121 NULL_RTX if we failed the caller should emit a normal call. */
da9e9f08
KG
4122
4123static rtx
44e10129 4124expand_builtin_strncpy (tree exp, rtx target)
da9e9f08 4125{
db3927fb 4126 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
4127
4128 if (validate_arglist (exp,
4129 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
da9e9f08 4130 {
5039610b
SL
4131 tree dest = CALL_EXPR_ARG (exp, 0);
4132 tree src = CALL_EXPR_ARG (exp, 1);
ee92e7ba 4133 /* The number of bytes to write (not the maximum). */
5039610b 4134 tree len = CALL_EXPR_ARG (exp, 2);
ee92e7ba 4135 /* The length of the source sequence. */
5039610b 4136 tree slen = c_strlen (src, 1);
57814e5e 4137
cc8bea0a
MS
4138 if (warn_stringop_overflow)
4139 {
4140 tree destsize = compute_objsize (dest,
4141 warn_stringop_overflow - 1);
4142
4143 /* The number of bytes to write is LEN but check_access will also
4144 check SLEN if LEN's value isn't known. */
4145 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4146 destsize);
4147 }
ee92e7ba 4148
559837f7 4149 /* We must be passed a constant len and src parameter. */
cc269bb6 4150 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
5039610b 4151 return NULL_RTX;
da9e9f08 4152
db3927fb 4153 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
da9e9f08
KG
4154
4155 /* We're required to pad with trailing zeros if the requested
c22cacf3 4156 len is greater than strlen(s2)+1. In that case try to
57814e5e 4157 use store_by_pieces, if it fails, punt. */
da9e9f08 4158 if (tree_int_cst_lt (slen, len))
57814e5e 4159 {
0eb77834 4160 unsigned int dest_align = get_pointer_alignment (dest);
5039610b 4161 const char *p = c_getstr (src);
57814e5e
JJ
4162 rtx dest_mem;
4163
cc269bb6 4164 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
ae7e9ddd 4165 || !can_store_by_pieces (tree_to_uhwi (len),
57814e5e 4166 builtin_strncpy_read_str,
f883e0a7
KG
4167 CONST_CAST (char *, p),
4168 dest_align, false))
5039610b 4169 return NULL_RTX;
57814e5e 4170
435bb2a1 4171 dest_mem = get_memory_rtx (dest, len);
ae7e9ddd 4172 store_by_pieces (dest_mem, tree_to_uhwi (len),
57814e5e 4173 builtin_strncpy_read_str,
f883e0a7 4174 CONST_CAST (char *, p), dest_align, false, 0);
44e10129 4175 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5ae6cd0d 4176 dest_mem = convert_memory_address (ptr_mode, dest_mem);
aa0f70e6 4177 return dest_mem;
57814e5e 4178 }
da9e9f08 4179 }
5039610b 4180 return NULL_RTX;
da9e9f08
KG
4181}
4182
ab937357
JJ
4183/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4184 bytes from constant string DATA + OFFSET and return it as target
4185 constant. */
4186
34d85166 4187rtx
4682ae04 4188builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4189 scalar_int_mode mode)
ab937357
JJ
4190{
4191 const char *c = (const char *) data;
f883e0a7 4192 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ab937357
JJ
4193
4194 memset (p, *c, GET_MODE_SIZE (mode));
4195
4196 return c_readstr (p, mode);
4197}
4198
1a887f86
RS
4199/* Callback routine for store_by_pieces. Return the RTL of a register
4200 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4201 char value given in the RTL register data. For example, if mode is
4202 4 bytes wide, return the RTL for 0x01010101*data. */
4203
4204static rtx
4682ae04 4205builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4206 scalar_int_mode mode)
1a887f86
RS
4207{
4208 rtx target, coeff;
4209 size_t size;
4210 char *p;
4211
4212 size = GET_MODE_SIZE (mode);
5ab2f7b7
KH
4213 if (size == 1)
4214 return (rtx) data;
1a887f86 4215
f883e0a7 4216 p = XALLOCAVEC (char, size);
1a887f86
RS
4217 memset (p, 1, size);
4218 coeff = c_readstr (p, mode);
4219
5ab2f7b7 4220 target = convert_to_mode (mode, (rtx) data, 1);
1a887f86
RS
4221 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4222 return force_reg (mode, target);
4223}
4224
b8698a0f
L
4225/* Expand expression EXP, which is a call to the memset builtin. Return
4226 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 4227 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 4228 convenient). */
fed3cef0 4229
28f4ec01 4230static rtx
ef4bddc2 4231expand_builtin_memset (tree exp, rtx target, machine_mode mode)
28f4ec01 4232{
5039610b
SL
4233 if (!validate_arglist (exp,
4234 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4235 return NULL_RTX;
ee92e7ba
MS
4236
4237 tree dest = CALL_EXPR_ARG (exp, 0);
4238 tree val = CALL_EXPR_ARG (exp, 1);
4239 tree len = CALL_EXPR_ARG (exp, 2);
4240
cc8bea0a 4241 check_memop_access (exp, dest, NULL_TREE, len);
ee92e7ba
MS
4242
4243 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5039610b 4244}
28f4ec01 4245
5039610b
SL
4246/* Helper function to do the actual work for expand_builtin_memset. The
4247 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4248 so that this can also be called without constructing an actual CALL_EXPR.
4249 The other arguments and return value are the same as for
4250 expand_builtin_memset. */
880864cf 4251
5039610b
SL
4252static rtx
4253expand_builtin_memset_args (tree dest, tree val, tree len,
ef4bddc2 4254 rtx target, machine_mode mode, tree orig_exp)
5039610b
SL
4255{
4256 tree fndecl, fn;
4257 enum built_in_function fcode;
ef4bddc2 4258 machine_mode val_mode;
5039610b
SL
4259 char c;
4260 unsigned int dest_align;
4261 rtx dest_mem, dest_addr, len_rtx;
4262 HOST_WIDE_INT expected_size = -1;
4263 unsigned int expected_align = 0;
3918b108
JH
4264 unsigned HOST_WIDE_INT min_size;
4265 unsigned HOST_WIDE_INT max_size;
82bb7d4e 4266 unsigned HOST_WIDE_INT probable_max_size;
28f4ec01 4267
0eb77834 4268 dest_align = get_pointer_alignment (dest);
079a182e 4269
5039610b
SL
4270 /* If DEST is not a pointer type, don't do this operation in-line. */
4271 if (dest_align == 0)
4272 return NULL_RTX;
c2bd38e8 4273
a5883ba0
MM
4274 if (currently_expanding_gimple_stmt)
4275 stringop_block_profile (currently_expanding_gimple_stmt,
4276 &expected_align, &expected_size);
726a989a 4277
5039610b
SL
4278 if (expected_align < dest_align)
4279 expected_align = dest_align;
880864cf 4280
5039610b
SL
4281 /* If the LEN parameter is zero, return DEST. */
4282 if (integer_zerop (len))
4283 {
4284 /* Evaluate and ignore VAL in case it has side-effects. */
4285 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4286 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4287 }
57e84f18 4288
5039610b
SL
4289 /* Stabilize the arguments in case we fail. */
4290 dest = builtin_save_expr (dest);
4291 val = builtin_save_expr (val);
4292 len = builtin_save_expr (len);
1a887f86 4293
5039610b 4294 len_rtx = expand_normal (len);
82bb7d4e
JH
4295 determine_block_size (len, len_rtx, &min_size, &max_size,
4296 &probable_max_size);
5039610b 4297 dest_mem = get_memory_rtx (dest, len);
8a445129 4298 val_mode = TYPE_MODE (unsigned_char_type_node);
1a887f86 4299
5039610b
SL
4300 if (TREE_CODE (val) != INTEGER_CST)
4301 {
4302 rtx val_rtx;
1a887f86 4303
5039610b 4304 val_rtx = expand_normal (val);
8a445129 4305 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
28f4ec01 4306
5039610b
SL
4307 /* Assume that we can memset by pieces if we can store
4308 * the coefficients by pieces (in the required modes).
4309 * We can't pass builtin_memset_gen_str as that emits RTL. */
4310 c = 1;
cc269bb6 4311 if (tree_fits_uhwi_p (len)
ae7e9ddd 4312 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
4313 builtin_memset_read_str, &c, dest_align,
4314 true))
5039610b 4315 {
8a445129 4316 val_rtx = force_reg (val_mode, val_rtx);
ae7e9ddd 4317 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150
SL
4318 builtin_memset_gen_str, val_rtx, dest_align,
4319 true, 0);
5039610b
SL
4320 }
4321 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4322 dest_align, expected_align,
82bb7d4e
JH
4323 expected_size, min_size, max_size,
4324 probable_max_size))
880864cf 4325 goto do_libcall;
b8698a0f 4326
5039610b
SL
4327 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4328 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4329 return dest_mem;
4330 }
28f4ec01 4331
5039610b
SL
4332 if (target_char_cast (val, &c))
4333 goto do_libcall;
ab937357 4334
5039610b
SL
4335 if (c)
4336 {
cc269bb6 4337 if (tree_fits_uhwi_p (len)
ae7e9ddd 4338 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
4339 builtin_memset_read_str, &c, dest_align,
4340 true))
ae7e9ddd 4341 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150 4342 builtin_memset_read_str, &c, dest_align, true, 0);
8a445129
RS
4343 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4344 gen_int_mode (c, val_mode),
5039610b 4345 dest_align, expected_align,
82bb7d4e
JH
4346 expected_size, min_size, max_size,
4347 probable_max_size))
5039610b 4348 goto do_libcall;
b8698a0f 4349
5039610b
SL
4350 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4351 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4352 return dest_mem;
4353 }
ab937357 4354
5039610b
SL
4355 set_mem_align (dest_mem, dest_align);
4356 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4357 CALL_EXPR_TAILCALL (orig_exp)
4358 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3918b108 4359 expected_align, expected_size,
82bb7d4e
JH
4360 min_size, max_size,
4361 probable_max_size);
28f4ec01 4362
5039610b
SL
4363 if (dest_addr == 0)
4364 {
4365 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4366 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4367 }
28f4ec01 4368
5039610b 4369 return dest_addr;
880864cf 4370
5039610b
SL
4371 do_libcall:
4372 fndecl = get_callee_fndecl (orig_exp);
4373 fcode = DECL_FUNCTION_CODE (fndecl);
31db0fe0 4374 if (fcode == BUILT_IN_MEMSET)
aa493694
JJ
4375 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4376 dest, val, len);
5039610b 4377 else if (fcode == BUILT_IN_BZERO)
aa493694
JJ
4378 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4379 dest, len);
5039610b
SL
4380 else
4381 gcc_unreachable ();
44e10129
MM
4382 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4383 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5039610b 4384 return expand_call (fn, target, target == const0_rtx);
28f4ec01
BS
4385}
4386
b8698a0f 4387/* Expand expression EXP, which is a call to the bzero builtin. Return
5039610b 4388 NULL_RTX if we failed the caller should emit a normal call. */
5197bd50 4389
e3a709be 4390static rtx
8148fe65 4391expand_builtin_bzero (tree exp)
e3a709be 4392{
5039610b 4393 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3477addf 4394 return NULL_RTX;
e3a709be 4395
ee92e7ba
MS
4396 tree dest = CALL_EXPR_ARG (exp, 0);
4397 tree size = CALL_EXPR_ARG (exp, 1);
4398
cc8bea0a 4399 check_memop_access (exp, dest, NULL_TREE, size);
8d51ecf8 4400
3477addf 4401 /* New argument list transforming bzero(ptr x, int y) to
c2bd38e8
RS
4402 memset(ptr x, int 0, size_t y). This is done this way
4403 so that if it isn't expanded inline, we fallback to
4404 calling bzero instead of memset. */
8d51ecf8 4405
ee92e7ba
MS
4406 location_t loc = EXPR_LOCATION (exp);
4407
5039610b 4408 return expand_builtin_memset_args (dest, integer_zero_node,
0d82a1c8
RG
4409 fold_convert_loc (loc,
4410 size_type_node, size),
5039610b 4411 const0_rtx, VOIDmode, exp);
e3a709be
KG
4412}
4413
a666df60
RS
4414/* Try to expand cmpstr operation ICODE with the given operands.
4415 Return the result rtx on success, otherwise return null. */
4416
4417static rtx
4418expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4419 HOST_WIDE_INT align)
4420{
4421 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4422
4423 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4424 target = NULL_RTX;
4425
4426 struct expand_operand ops[4];
4427 create_output_operand (&ops[0], target, insn_mode);
4428 create_fixed_operand (&ops[1], arg1_rtx);
4429 create_fixed_operand (&ops[2], arg2_rtx);
4430 create_integer_operand (&ops[3], align);
4431 if (maybe_expand_insn (icode, 4, ops))
4432 return ops[0].value;
4433 return NULL_RTX;
4434}
4435
2be3b5ce 4436/* Expand expression EXP, which is a call to the memcmp built-in function.
9b0f6f5e 4437 Return NULL_RTX if we failed and the caller should emit a normal call,
36b85e43
BS
4438 otherwise try to get the result in TARGET, if convenient.
4439 RESULT_EQ is true if we can relax the returned value to be either zero
4440 or nonzero, without caring about the sign. */
5197bd50 4441
28f4ec01 4442static rtx
36b85e43 4443expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
28f4ec01 4444{
5039610b
SL
4445 if (!validate_arglist (exp,
4446 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4447 return NULL_RTX;
c2bd38e8 4448
7f9f48be
RS
4449 tree arg1 = CALL_EXPR_ARG (exp, 0);
4450 tree arg2 = CALL_EXPR_ARG (exp, 1);
4451 tree len = CALL_EXPR_ARG (exp, 2);
b2272b13
QZ
4452 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4453 bool no_overflow = true;
d9c5a8b9
MS
4454
4455 /* Diagnose calls where the specified length exceeds the size of either
4456 object. */
b2272b13
QZ
4457 tree size = compute_objsize (arg1, 0);
4458 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4459 len, /*maxread=*/NULL_TREE, size,
4460 /*objsize=*/NULL_TREE);
10a0e2a9 4461 if (no_overflow)
b2272b13
QZ
4462 {
4463 size = compute_objsize (arg2, 0);
4464 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4465 len, /*maxread=*/NULL_TREE, size,
4466 /*objsize=*/NULL_TREE);
10a0e2a9 4467 }
b2272b13 4468
10a0e2a9 4469 /* Due to the performance benefit, always inline the calls first
b2272b13
QZ
4470 when result_eq is false. */
4471 rtx result = NULL_RTX;
10a0e2a9
JJ
4472
4473 if (!result_eq && fcode != BUILT_IN_BCMP && no_overflow)
d9c5a8b9 4474 {
523a59ff 4475 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
4476 if (result)
4477 return result;
d9c5a8b9
MS
4478 }
4479
36b85e43
BS
4480 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4481 location_t loc = EXPR_LOCATION (exp);
358b8f01 4482
7f9f48be
RS
4483 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4484 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
28f4ec01 4485
7f9f48be
RS
4486 /* If we don't have POINTER_TYPE, call the function. */
4487 if (arg1_align == 0 || arg2_align == 0)
4488 return NULL_RTX;
28f4ec01 4489
7f9f48be
RS
4490 rtx arg1_rtx = get_memory_rtx (arg1, len);
4491 rtx arg2_rtx = get_memory_rtx (arg2, len);
36b85e43 4492 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
28f4ec01 4493
7f9f48be 4494 /* Set MEM_SIZE as appropriate. */
36b85e43 4495 if (CONST_INT_P (len_rtx))
7f9f48be 4496 {
36b85e43
BS
4497 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4498 set_mem_size (arg2_rtx, INTVAL (len_rtx));
7f9f48be 4499 }
6cbaec9e 4500
36b85e43
BS
4501 by_pieces_constfn constfn = NULL;
4502
d0d7f887
BS
4503 const char *src_str = c_getstr (arg2);
4504 if (result_eq && src_str == NULL)
4505 {
4506 src_str = c_getstr (arg1);
4507 if (src_str != NULL)
4f353581 4508 std::swap (arg1_rtx, arg2_rtx);
d0d7f887 4509 }
36b85e43
BS
4510
4511 /* If SRC is a string constant and block move would be done
4512 by pieces, we can avoid loading the string from memory
4513 and only stored the computed constants. */
4514 if (src_str
4515 && CONST_INT_P (len_rtx)
4516 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4517 constfn = builtin_memcpy_read_str;
4518
b2272b13
QZ
4519 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4520 TREE_TYPE (len), target,
4521 result_eq, constfn,
4522 CONST_CAST (char *, src_str));
36b85e43 4523
7f9f48be
RS
4524 if (result)
4525 {
4526 /* Return the value in the proper mode for this function. */
4527 if (GET_MODE (result) == mode)
4528 return result;
6cbaec9e 4529
7f9f48be
RS
4530 if (target != 0)
4531 {
4532 convert_move (target, result, 0);
4533 return target;
4534 }
8878e913 4535
28f4ec01 4536 return convert_to_mode (mode, result, 0);
7f9f48be 4537 }
28f4ec01 4538
ee516de9 4539 return NULL_RTX;
c2bd38e8
RS
4540}
4541
5039610b 4542/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
28f4ec01
BS
4543 if we failed the caller should emit a normal call, otherwise try to get
4544 the result in TARGET, if convenient. */
fed3cef0 4545
28f4ec01 4546static rtx
44e10129 4547expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
28f4ec01 4548{
5039610b
SL
4549 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4550 return NULL_RTX;
8d51ecf8 4551
b2272b13
QZ
4552 /* Due to the performance benefit, always inline the calls first. */
4553 rtx result = NULL_RTX;
523a59ff 4554 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
4555 if (result)
4556 return result;
4557
a666df60
RS
4558 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4559 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
4560 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4561 return NULL_RTX;
c22cacf3 4562
16155777
MS
4563 tree arg1 = CALL_EXPR_ARG (exp, 0);
4564 tree arg2 = CALL_EXPR_ARG (exp, 1);
40c1d5f8 4565
16155777
MS
4566 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4567 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
2be3b5ce 4568
16155777
MS
4569 /* If we don't have POINTER_TYPE, call the function. */
4570 if (arg1_align == 0 || arg2_align == 0)
4571 return NULL_RTX;
2be3b5ce 4572
16155777
MS
4573 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4574 arg1 = builtin_save_expr (arg1);
4575 arg2 = builtin_save_expr (arg2);
28f4ec01 4576
16155777
MS
4577 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4578 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
a666df60 4579
16155777
MS
4580 /* Try to call cmpstrsi. */
4581 if (cmpstr_icode != CODE_FOR_nothing)
4582 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4583 MIN (arg1_align, arg2_align));
40c1d5f8 4584
16155777
MS
4585 /* Try to determine at least one length and call cmpstrnsi. */
4586 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4587 {
4588 tree len;
4589 rtx arg3_rtx;
4590
4591 tree len1 = c_strlen (arg1, 1);
4592 tree len2 = c_strlen (arg2, 1);
4593
4594 if (len1)
4595 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4596 if (len2)
4597 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4598
4599 /* If we don't have a constant length for the first, use the length
4600 of the second, if we know it. We don't require a constant for
4601 this case; some cost analysis could be done if both are available
4602 but neither is constant. For now, assume they're equally cheap,
4603 unless one has side effects. If both strings have constant lengths,
4604 use the smaller. */
4605
4606 if (!len1)
4607 len = len2;
4608 else if (!len2)
4609 len = len1;
4610 else if (TREE_SIDE_EFFECTS (len1))
4611 len = len2;
4612 else if (TREE_SIDE_EFFECTS (len2))
4613 len = len1;
4614 else if (TREE_CODE (len1) != INTEGER_CST)
4615 len = len2;
4616 else if (TREE_CODE (len2) != INTEGER_CST)
4617 len = len1;
4618 else if (tree_int_cst_lt (len1, len2))
4619 len = len1;
4620 else
4621 len = len2;
c43fa1f5 4622
16155777
MS
4623 /* If both arguments have side effects, we cannot optimize. */
4624 if (len && !TREE_SIDE_EFFECTS (len))
40c1d5f8 4625 {
16155777
MS
4626 arg3_rtx = expand_normal (len);
4627 result = expand_cmpstrn_or_cmpmem
4628 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4629 arg3_rtx, MIN (arg1_align, arg2_align));
40c1d5f8 4630 }
16155777
MS
4631 }
4632
16155777 4633 tree fndecl = get_callee_fndecl (exp);
16155777
MS
4634 if (result)
4635 {
36537a1c
MS
4636 /* Check to see if the argument was declared attribute nonstring
4637 and if so, issue a warning since at this point it's not known
4638 to be nul-terminated. */
4639 maybe_warn_nonstring_arg (fndecl, exp);
4640
16155777
MS
4641 /* Return the value in the proper mode for this function. */
4642 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4643 if (GET_MODE (result) == mode)
4644 return result;
4645 if (target == 0)
4646 return convert_to_mode (mode, result, 0);
4647 convert_move (target, result, 0);
4648 return target;
40c1d5f8 4649 }
16155777
MS
4650
4651 /* Expand the library call ourselves using a stabilized argument
4652 list to avoid re-evaluating the function's arguments twice. */
4653 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4654 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4655 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4656 return expand_call (fn, target, target == const0_rtx);
2dee4af1 4657}
28f4ec01 4658
b8698a0f 4659/* Expand expression EXP, which is a call to the strncmp builtin. Return
5039610b 4660 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
da9e9f08 4661 the result in TARGET, if convenient. */
5197bd50 4662
da9e9f08 4663static rtx
44e10129 4664expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
ef4bddc2 4665 ATTRIBUTE_UNUSED machine_mode mode)
da9e9f08 4666{
5039610b
SL
4667 if (!validate_arglist (exp,
4668 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4669 return NULL_RTX;
da9e9f08 4670
b2272b13
QZ
4671 /* Due to the performance benefit, always inline the calls first. */
4672 rtx result = NULL_RTX;
523a59ff 4673 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
4674 if (result)
4675 return result;
4676
819c1488 4677 /* If c_strlen can determine an expression for one of the string
40c1d5f8 4678 lengths, and it doesn't have side effects, then emit cmpstrnsi
2be3b5ce 4679 using length MIN(strlen(string)+1, arg3). */
a666df60 4680 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
4681 if (cmpstrn_icode == CODE_FOR_nothing)
4682 return NULL_RTX;
5197bd50 4683
16155777
MS
4684 tree len;
4685
4686 tree arg1 = CALL_EXPR_ARG (exp, 0);
4687 tree arg2 = CALL_EXPR_ARG (exp, 1);
4688 tree arg3 = CALL_EXPR_ARG (exp, 2);
4689
4690 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4691 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4692
4693 tree len1 = c_strlen (arg1, 1);
4694 tree len2 = c_strlen (arg2, 1);
4695
4696 location_t loc = EXPR_LOCATION (exp);
4697
4698 if (len1)
4699 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4700 if (len2)
4701 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4702
4703 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4704
4705 /* If we don't have a constant length for the first, use the length
4706 of the second, if we know it. If neither string is constant length,
4707 use the given length argument. We don't require a constant for
4708 this case; some cost analysis could be done if both are available
4709 but neither is constant. For now, assume they're equally cheap,
4710 unless one has side effects. If both strings have constant lengths,
4711 use the smaller. */
4712
4713 if (!len1 && !len2)
4714 len = len3;
4715 else if (!len1)
4716 len = len2;
4717 else if (!len2)
4718 len = len1;
4719 else if (TREE_SIDE_EFFECTS (len1))
4720 len = len2;
4721 else if (TREE_SIDE_EFFECTS (len2))
4722 len = len1;
4723 else if (TREE_CODE (len1) != INTEGER_CST)
4724 len = len2;
4725 else if (TREE_CODE (len2) != INTEGER_CST)
4726 len = len1;
4727 else if (tree_int_cst_lt (len1, len2))
4728 len = len1;
4729 else
4730 len = len2;
4731
4732 /* If we are not using the given length, we must incorporate it here.
4733 The actual new length parameter will be MIN(len,arg3) in this case. */
4734 if (len != len3)
4735 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4736 rtx arg1_rtx = get_memory_rtx (arg1, len);
4737 rtx arg2_rtx = get_memory_rtx (arg2, len);
4738 rtx arg3_rtx = expand_normal (len);
b2272b13
QZ
4739 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4740 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4741 MIN (arg1_align, arg2_align));
16155777 4742
16155777 4743 tree fndecl = get_callee_fndecl (exp);
16155777
MS
4744 if (result)
4745 {
36537a1c
MS
4746 /* Check to see if the argument was declared attribute nonstring
4747 and if so, issue a warning since at this point it's not known
4748 to be nul-terminated. */
4749 maybe_warn_nonstring_arg (fndecl, exp);
4750
16155777
MS
4751 /* Return the value in the proper mode for this function. */
4752 mode = TYPE_MODE (TREE_TYPE (exp));
4753 if (GET_MODE (result) == mode)
4754 return result;
4755 if (target == 0)
4756 return convert_to_mode (mode, result, 0);
4757 convert_move (target, result, 0);
4758 return target;
4759 }
4760
4761 /* Expand the library call ourselves using a stabilized argument
4762 list to avoid re-evaluating the function's arguments twice. */
4763 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4764 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4765 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4766 return expand_call (fn, target, target == const0_rtx);
d118937d
KG
4767}
4768
d3707adb
RH
4769/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4770 if that's convenient. */
fed3cef0 4771
d3707adb 4772rtx
4682ae04 4773expand_builtin_saveregs (void)
28f4ec01 4774{
58f4cf2a
DM
4775 rtx val;
4776 rtx_insn *seq;
28f4ec01
BS
4777
4778 /* Don't do __builtin_saveregs more than once in a function.
4779 Save the result of the first call and reuse it. */
4780 if (saveregs_value != 0)
4781 return saveregs_value;
28f4ec01 4782
d3707adb
RH
4783 /* When this function is called, it means that registers must be
4784 saved on entry to this function. So we migrate the call to the
4785 first insn of this function. */
4786
4787 start_sequence ();
28f4ec01 4788
d3707adb 4789 /* Do whatever the machine needs done in this case. */
61f71b34 4790 val = targetm.calls.expand_builtin_saveregs ();
28f4ec01 4791
d3707adb
RH
4792 seq = get_insns ();
4793 end_sequence ();
28f4ec01 4794
d3707adb 4795 saveregs_value = val;
28f4ec01 4796
2f937369
DM
4797 /* Put the insns after the NOTE that starts the function. If this
4798 is inside a start_sequence, make the outer-level insn chain current, so
d3707adb
RH
4799 the code is placed at the start of the function. */
4800 push_topmost_sequence ();
242229bb 4801 emit_insn_after (seq, entry_of_function ());
d3707adb
RH
4802 pop_topmost_sequence ();
4803
4804 return val;
28f4ec01
BS
4805}
4806
8870e212 4807/* Expand a call to __builtin_next_arg. */
5197bd50 4808
28f4ec01 4809static rtx
8870e212 4810expand_builtin_next_arg (void)
28f4ec01 4811{
8870e212
JJ
4812 /* Checking arguments is already done in fold_builtin_next_arg
4813 that must be called before this function. */
4319e38c 4814 return expand_binop (ptr_mode, add_optab,
38173d38
JH
4815 crtl->args.internal_arg_pointer,
4816 crtl->args.arg_offset_rtx,
28f4ec01
BS
4817 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4818}
4819
d3707adb
RH
4820/* Make it easier for the backends by protecting the valist argument
4821 from multiple evaluations. */
4822
4823static tree
db3927fb 4824stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
d3707adb 4825{
35cbb299
KT
4826 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4827
70f34814
RG
4828 /* The current way of determining the type of valist is completely
4829 bogus. We should have the information on the va builtin instead. */
4830 if (!vatype)
4831 vatype = targetm.fn_abi_va_list (cfun->decl);
35cbb299
KT
4832
4833 if (TREE_CODE (vatype) == ARRAY_TYPE)
d3707adb 4834 {
9f720c3e
GK
4835 if (TREE_SIDE_EFFECTS (valist))
4836 valist = save_expr (valist);
8ebecc3b 4837
9f720c3e 4838 /* For this case, the backends will be expecting a pointer to
35cbb299
KT
4839 vatype, but it's possible we've actually been given an array
4840 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
9f720c3e
GK
4841 So fix it. */
4842 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
daf68dd7 4843 {
35cbb299 4844 tree p1 = build_pointer_type (TREE_TYPE (vatype));
db3927fb 4845 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
daf68dd7 4846 }
d3707adb 4847 }
8ebecc3b 4848 else
d3707adb 4849 {
70f34814 4850 tree pt = build_pointer_type (vatype);
8ebecc3b 4851
9f720c3e
GK
4852 if (! needs_lvalue)
4853 {
8ebecc3b
RH
4854 if (! TREE_SIDE_EFFECTS (valist))
4855 return valist;
8d51ecf8 4856
db3927fb 4857 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
d3707adb 4858 TREE_SIDE_EFFECTS (valist) = 1;
d3707adb 4859 }
9f720c3e 4860
8ebecc3b 4861 if (TREE_SIDE_EFFECTS (valist))
9f720c3e 4862 valist = save_expr (valist);
70f34814
RG
4863 valist = fold_build2_loc (loc, MEM_REF,
4864 vatype, valist, build_int_cst (pt, 0));
d3707adb
RH
4865 }
4866
4867 return valist;
4868}
4869
c35d187f
RH
4870/* The "standard" definition of va_list is void*. */
4871
4872tree
4873std_build_builtin_va_list (void)
4874{
4875 return ptr_type_node;
4876}
4877
35cbb299
KT
4878/* The "standard" abi va_list is va_list_type_node. */
4879
4880tree
4881std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4882{
4883 return va_list_type_node;
4884}
4885
4886/* The "standard" type of va_list is va_list_type_node. */
4887
4888tree
4889std_canonical_va_list_type (tree type)
4890{
4891 tree wtype, htype;
4892
35cbb299
KT
4893 wtype = va_list_type_node;
4894 htype = type;
431e31a9
TV
4895
4896 if (TREE_CODE (wtype) == ARRAY_TYPE)
35cbb299
KT
4897 {
4898 /* If va_list is an array type, the argument may have decayed
4899 to a pointer type, e.g. by being passed to another function.
4900 In that case, unwrap both types so that we can compare the
4901 underlying records. */
4902 if (TREE_CODE (htype) == ARRAY_TYPE
4903 || POINTER_TYPE_P (htype))
4904 {
4905 wtype = TREE_TYPE (wtype);
4906 htype = TREE_TYPE (htype);
4907 }
4908 }
4909 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4910 return va_list_type_node;
4911
4912 return NULL_TREE;
4913}
4914
d3707adb
RH
4915/* The "standard" implementation of va_start: just assign `nextarg' to
4916 the variable. */
5197bd50 4917
d3707adb 4918void
4682ae04 4919std_expand_builtin_va_start (tree valist, rtx nextarg)
d3707adb 4920{
508dabda
ILT
4921 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4922 convert_move (va_r, nextarg, 0);
d3707adb
RH
4923}
4924
5039610b 4925/* Expand EXP, a call to __builtin_va_start. */
5197bd50 4926
d3707adb 4927static rtx
5039610b 4928expand_builtin_va_start (tree exp)
d3707adb
RH
4929{
4930 rtx nextarg;
5039610b 4931 tree valist;
db3927fb 4932 location_t loc = EXPR_LOCATION (exp);
d3707adb 4933
5039610b 4934 if (call_expr_nargs (exp) < 2)
c69c9b36 4935 {
db3927fb 4936 error_at (loc, "too few arguments to function %<va_start%>");
c69c9b36
JM
4937 return const0_rtx;
4938 }
d3707adb 4939
5039610b 4940 if (fold_builtin_next_arg (exp, true))
8870e212 4941 return const0_rtx;
d3147f64 4942
8870e212 4943 nextarg = expand_builtin_next_arg ();
db3927fb 4944 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
d3707adb 4945
d7bd8aeb
JJ
4946 if (targetm.expand_builtin_va_start)
4947 targetm.expand_builtin_va_start (valist, nextarg);
4948 else
4949 std_expand_builtin_va_start (valist, nextarg);
d3707adb
RH
4950
4951 return const0_rtx;
4952}
4953
5039610b 4954/* Expand EXP, a call to __builtin_va_end. */
3bdf5ad1 4955
d3707adb 4956static rtx
5039610b 4957expand_builtin_va_end (tree exp)
d3707adb 4958{
5039610b 4959 tree valist = CALL_EXPR_ARG (exp, 0);
daf68dd7 4960
daf68dd7
RH
4961 /* Evaluate for side effects, if needed. I hate macros that don't
4962 do that. */
4963 if (TREE_SIDE_EFFECTS (valist))
4964 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
d3707adb
RH
4965
4966 return const0_rtx;
4967}
4968
5039610b 4969/* Expand EXP, a call to __builtin_va_copy. We do this as a
d3707adb
RH
4970 builtin rather than just as an assignment in stdarg.h because of the
4971 nastiness of array-type va_list types. */
3bdf5ad1 4972
d3707adb 4973static rtx
5039610b 4974expand_builtin_va_copy (tree exp)
d3707adb
RH
4975{
4976 tree dst, src, t;
db3927fb 4977 location_t loc = EXPR_LOCATION (exp);
d3707adb 4978
5039610b
SL
4979 dst = CALL_EXPR_ARG (exp, 0);
4980 src = CALL_EXPR_ARG (exp, 1);
d3707adb 4981
db3927fb
AH
4982 dst = stabilize_va_list_loc (loc, dst, 1);
4983 src = stabilize_va_list_loc (loc, src, 0);
d3707adb 4984
35cbb299
KT
4985 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4986
4987 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
d3707adb 4988 {
35cbb299 4989 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
d3707adb
RH
4990 TREE_SIDE_EFFECTS (t) = 1;
4991 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4992 }
4993 else
4994 {
8ebecc3b
RH
4995 rtx dstb, srcb, size;
4996
4997 /* Evaluate to pointers. */
4998 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4999 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
35cbb299
KT
5000 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5001 NULL_RTX, VOIDmode, EXPAND_NORMAL);
8ebecc3b 5002
5ae6cd0d
MM
5003 dstb = convert_memory_address (Pmode, dstb);
5004 srcb = convert_memory_address (Pmode, srcb);
ce2d32cd 5005
8ebecc3b
RH
5006 /* "Dereference" to BLKmode memories. */
5007 dstb = gen_rtx_MEM (BLKmode, dstb);
ba4828e0 5008 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
35cbb299 5009 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b 5010 srcb = gen_rtx_MEM (BLKmode, srcb);
ba4828e0 5011 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
35cbb299 5012 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b
RH
5013
5014 /* Copy. */
44bb111a 5015 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
d3707adb
RH
5016 }
5017
5018 return const0_rtx;
5019}
5020
28f4ec01
BS
5021/* Expand a call to one of the builtin functions __builtin_frame_address or
5022 __builtin_return_address. */
5197bd50 5023
28f4ec01 5024static rtx
5039610b 5025expand_builtin_frame_address (tree fndecl, tree exp)
28f4ec01 5026{
28f4ec01
BS
5027 /* The argument must be a nonnegative integer constant.
5028 It counts the number of frames to scan up the stack.
8423e57c
MS
5029 The value is either the frame pointer value or the return
5030 address saved in that frame. */
5039610b 5031 if (call_expr_nargs (exp) == 0)
28f4ec01
BS
5032 /* Warning about missing arg was already issued. */
5033 return const0_rtx;
cc269bb6 5034 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
28f4ec01 5035 {
8423e57c 5036 error ("invalid argument to %qD", fndecl);
28f4ec01
BS
5037 return const0_rtx;
5038 }
5039 else
5040 {
8423e57c
MS
5041 /* Number of frames to scan up the stack. */
5042 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5043
5044 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
28f4ec01
BS
5045
5046 /* Some ports cannot access arbitrary stack frames. */
5047 if (tem == NULL)
5048 {
8423e57c 5049 warning (0, "unsupported argument to %qD", fndecl);
28f4ec01
BS
5050 return const0_rtx;
5051 }
5052
8423e57c
MS
5053 if (count)
5054 {
5055 /* Warn since no effort is made to ensure that any frame
5056 beyond the current one exists or can be safely reached. */
5057 warning (OPT_Wframe_address, "calling %qD with "
5058 "a nonzero argument is unsafe", fndecl);
5059 }
5060
28f4ec01
BS
5061 /* For __builtin_frame_address, return what we've got. */
5062 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5063 return tem;
5064
f8cfc6aa 5065 if (!REG_P (tem)
28f4ec01 5066 && ! CONSTANT_P (tem))
18ae1560 5067 tem = copy_addr_to_reg (tem);
28f4ec01
BS
5068 return tem;
5069 }
5070}
5071
d3c12306 5072/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
b7e52782 5073 failed and the caller should emit a normal call. */
d5457140 5074
28f4ec01 5075static rtx
b7e52782 5076expand_builtin_alloca (tree exp)
28f4ec01
BS
5077{
5078 rtx op0;
d5457140 5079 rtx result;
13e49da9 5080 unsigned int align;
8bd9f164 5081 tree fndecl = get_callee_fndecl (exp);
9e878cf1
EB
5082 HOST_WIDE_INT max_size;
5083 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
b7e52782 5084 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
8bd9f164 5085 bool valid_arglist
9e878cf1
EB
5086 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5087 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5088 VOID_TYPE)
5089 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5090 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5091 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
13e49da9
TV
5092
5093 if (!valid_arglist)
5039610b 5094 return NULL_RTX;
28f4ec01 5095
00abf86c
MS
5096 if ((alloca_for_var
5097 && warn_vla_limit >= HOST_WIDE_INT_MAX
5098 && warn_alloc_size_limit < warn_vla_limit)
5099 || (!alloca_for_var
5100 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5101 && warn_alloc_size_limit < warn_alloca_limit
5102 ))
8bd9f164 5103 {
00abf86c
MS
5104 /* -Walloca-larger-than and -Wvla-larger-than settings of
5105 less than HOST_WIDE_INT_MAX override the more general
5106 -Walloc-size-larger-than so unless either of the former
5107 options is smaller than the last one (wchich would imply
5108 that the call was already checked), check the alloca
5109 arguments for overflow. */
8bd9f164
MS
5110 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5111 int idx[] = { 0, -1 };
5112 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5113 }
5114
28f4ec01 5115 /* Compute the argument. */
5039610b 5116 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
28f4ec01 5117
13e49da9 5118 /* Compute the alignment. */
9e878cf1
EB
5119 align = (fcode == BUILT_IN_ALLOCA
5120 ? BIGGEST_ALIGNMENT
5121 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5122
5123 /* Compute the maximum size. */
5124 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5125 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5126 : -1);
13e49da9 5127
b7e52782
EB
5128 /* Allocate the desired space. If the allocation stems from the declaration
5129 of a variable-sized object, it cannot accumulate. */
9e878cf1
EB
5130 result
5131 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5ae6cd0d 5132 result = convert_memory_address (ptr_mode, result);
d5457140
RK
5133
5134 return result;
28f4ec01
BS
5135}
5136
7504c3bf
JJ
5137/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5138 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5139 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5140 handle_builtin_stack_restore function. */
e3174bdf
MO
5141
5142static rtx
5143expand_asan_emit_allocas_unpoison (tree exp)
5144{
5145 tree arg0 = CALL_EXPR_ARG (exp, 0);
7504c3bf 5146 tree arg1 = CALL_EXPR_ARG (exp, 1);
8f4956ca 5147 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7504c3bf
JJ
5148 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5149 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5150 stack_pointer_rtx, NULL_RTX, 0,
5151 OPTAB_LIB_WIDEN);
5152 off = convert_modes (ptr_mode, Pmode, off, 0);
5153 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5154 OPTAB_LIB_WIDEN);
e3174bdf 5155 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
db69559b
RS
5156 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5157 top, ptr_mode, bot, ptr_mode);
e3174bdf
MO
5158 return ret;
5159}
5160
ac868f29
EB
5161/* Expand a call to bswap builtin in EXP.
5162 Return NULL_RTX if a normal call should be emitted rather than expanding the
5163 function in-line. If convenient, the result should be placed in TARGET.
5164 SUBTARGET may be used as the target for computing one of EXP's operands. */
167fa32c
EC
5165
5166static rtx
ef4bddc2 5167expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
ac868f29 5168 rtx subtarget)
167fa32c 5169{
167fa32c
EC
5170 tree arg;
5171 rtx op0;
5172
5039610b
SL
5173 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5174 return NULL_RTX;
167fa32c 5175
5039610b 5176 arg = CALL_EXPR_ARG (exp, 0);
ac868f29
EB
5177 op0 = expand_expr (arg,
5178 subtarget && GET_MODE (subtarget) == target_mode
5179 ? subtarget : NULL_RTX,
5180 target_mode, EXPAND_NORMAL);
5181 if (GET_MODE (op0) != target_mode)
5182 op0 = convert_to_mode (target_mode, op0, 1);
167fa32c 5183
ac868f29 5184 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
167fa32c
EC
5185
5186 gcc_assert (target);
5187
ac868f29 5188 return convert_to_mode (target_mode, target, 1);
167fa32c
EC
5189}
5190
5039610b
SL
5191/* Expand a call to a unary builtin in EXP.
5192 Return NULL_RTX if a normal call should be emitted rather than expanding the
28f4ec01
BS
5193 function in-line. If convenient, the result should be placed in TARGET.
5194 SUBTARGET may be used as the target for computing one of EXP's operands. */
d5457140 5195
28f4ec01 5196static rtx
ef4bddc2 5197expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4682ae04 5198 rtx subtarget, optab op_optab)
28f4ec01
BS
5199{
5200 rtx op0;
5039610b
SL
5201
5202 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5203 return NULL_RTX;
28f4ec01
BS
5204
5205 /* Compute the argument. */
4359dc2a
JJ
5206 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5207 (subtarget
5208 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5209 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
49452c07 5210 VOIDmode, EXPAND_NORMAL);
2928cd7a 5211 /* Compute op, into TARGET if possible.
28f4ec01 5212 Set TARGET to wherever the result comes back. */
5039610b 5213 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
146aef0b 5214 op_optab, op0, target, op_optab != clrsb_optab);
298e6adc 5215 gcc_assert (target);
5906d013 5216
6c537d03 5217 return convert_to_mode (target_mode, target, 0);
28f4ec01 5218}
994a57cd 5219
b8698a0f 5220/* Expand a call to __builtin_expect. We just return our argument
ef950eba
JH
5221 as the builtin_expect semantic should've been already executed by
5222 tree branch prediction pass. */
994a57cd
RH
5223
5224static rtx
5039610b 5225expand_builtin_expect (tree exp, rtx target)
994a57cd 5226{
451409e4 5227 tree arg;
994a57cd 5228
5039610b 5229 if (call_expr_nargs (exp) < 2)
994a57cd 5230 return const0_rtx;
5039610b 5231 arg = CALL_EXPR_ARG (exp, 0);
994a57cd 5232
5039610b 5233 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
ef950eba 5234 /* When guessing was done, the hints should be already stripped away. */
1d8381f1 5235 gcc_assert (!flag_guess_branch_prob
1da2ed5f 5236 || optimize == 0 || seen_error ());
994a57cd
RH
5237 return target;
5238}
5f2d6cfa 5239
45d439ac
JJ
5240/* Expand a call to __builtin_assume_aligned. We just return our first
5241 argument as the builtin_assume_aligned semantic should've been already
5242 executed by CCP. */
5243
5244static rtx
5245expand_builtin_assume_aligned (tree exp, rtx target)
5246{
5247 if (call_expr_nargs (exp) < 2)
5248 return const0_rtx;
5249 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5250 EXPAND_NORMAL);
5251 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5252 && (call_expr_nargs (exp) < 3
5253 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5254 return target;
5255}
5256
1e188d1e 5257void
4682ae04 5258expand_builtin_trap (void)
9602f5a0 5259{
eb6f47fb 5260 if (targetm.have_trap ())
206604dc 5261 {
eb6f47fb 5262 rtx_insn *insn = emit_insn (targetm.gen_trap ());
206604dc
JJ
5263 /* For trap insns when not accumulating outgoing args force
5264 REG_ARGS_SIZE note to prevent crossjumping of calls with
5265 different args sizes. */
5266 if (!ACCUMULATE_OUTGOING_ARGS)
68184180 5267 add_args_size_note (insn, stack_pointer_delta);
206604dc 5268 }
9602f5a0 5269 else
ee516de9
EB
5270 {
5271 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5272 tree call_expr = build_call_expr (fn, 0);
5273 expand_call (call_expr, NULL_RTX, false);
5274 }
5275
9602f5a0
RH
5276 emit_barrier ();
5277}
075ec276 5278
468059bc
DD
5279/* Expand a call to __builtin_unreachable. We do nothing except emit
5280 a barrier saying that control flow will not pass here.
5281
5282 It is the responsibility of the program being compiled to ensure
5283 that control flow does never reach __builtin_unreachable. */
5284static void
5285expand_builtin_unreachable (void)
5286{
5287 emit_barrier ();
5288}
5289
5039610b
SL
5290/* Expand EXP, a call to fabs, fabsf or fabsl.
5291 Return NULL_RTX if a normal call should be emitted rather than expanding
075ec276
RS
5292 the function inline. If convenient, the result should be placed
5293 in TARGET. SUBTARGET may be used as the target for computing
5294 the operand. */
5295
5296static rtx
5039610b 5297expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
075ec276 5298{
ef4bddc2 5299 machine_mode mode;
075ec276
RS
5300 tree arg;
5301 rtx op0;
5302
5039610b
SL
5303 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5304 return NULL_RTX;
075ec276 5305
5039610b 5306 arg = CALL_EXPR_ARG (exp, 0);
4cd8e76f 5307 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
075ec276 5308 mode = TYPE_MODE (TREE_TYPE (arg));
49452c07 5309 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
075ec276
RS
5310 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5311}
5312
5039610b 5313/* Expand EXP, a call to copysign, copysignf, or copysignl.
046625fa
RH
5314 Return NULL is a normal call should be emitted rather than expanding the
5315 function inline. If convenient, the result should be placed in TARGET.
5316 SUBTARGET may be used as the target for computing the operand. */
5317
5318static rtx
5039610b 5319expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
046625fa
RH
5320{
5321 rtx op0, op1;
5322 tree arg;
5323
5039610b
SL
5324 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5325 return NULL_RTX;
046625fa 5326
5039610b 5327 arg = CALL_EXPR_ARG (exp, 0);
84217346 5328 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
046625fa 5329
5039610b 5330 arg = CALL_EXPR_ARG (exp, 1);
84217346 5331 op1 = expand_normal (arg);
046625fa
RH
5332
5333 return expand_copysign (op0, op1, target);
5334}
5335
677feb77
DD
5336/* Expand a call to __builtin___clear_cache. */
5337
5338static rtx
f2cf13bd 5339expand_builtin___clear_cache (tree exp)
677feb77 5340{
f2cf13bd
RS
5341 if (!targetm.code_for_clear_cache)
5342 {
677feb77 5343#ifdef CLEAR_INSN_CACHE
f2cf13bd
RS
5344 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5345 does something. Just do the default expansion to a call to
5346 __clear_cache(). */
5347 return NULL_RTX;
677feb77 5348#else
f2cf13bd
RS
5349 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5350 does nothing. There is no need to call it. Do nothing. */
5351 return const0_rtx;
677feb77 5352#endif /* CLEAR_INSN_CACHE */
f2cf13bd
RS
5353 }
5354
677feb77
DD
5355 /* We have a "clear_cache" insn, and it will handle everything. */
5356 tree begin, end;
5357 rtx begin_rtx, end_rtx;
677feb77
DD
5358
5359 /* We must not expand to a library call. If we did, any
5360 fallback library function in libgcc that might contain a call to
5361 __builtin___clear_cache() would recurse infinitely. */
5362 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5363 {
5364 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5365 return const0_rtx;
5366 }
5367
f2cf13bd 5368 if (targetm.have_clear_cache ())
677feb77 5369 {
a5c7d693 5370 struct expand_operand ops[2];
677feb77
DD
5371
5372 begin = CALL_EXPR_ARG (exp, 0);
5373 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77
DD
5374
5375 end = CALL_EXPR_ARG (exp, 1);
5376 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77 5377
a5c7d693
RS
5378 create_address_operand (&ops[0], begin_rtx);
5379 create_address_operand (&ops[1], end_rtx);
f2cf13bd 5380 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
a5c7d693 5381 return const0_rtx;
677feb77
DD
5382 }
5383 return const0_rtx;
677feb77
DD
5384}
5385
6de9cd9a
DN
5386/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5387
5388static rtx
5389round_trampoline_addr (rtx tramp)
5390{
5391 rtx temp, addend, mask;
5392
5393 /* If we don't need too much alignment, we'll have been guaranteed
5394 proper alignment by get_trampoline_type. */
5395 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5396 return tramp;
5397
5398 /* Round address up to desired boundary. */
5399 temp = gen_reg_rtx (Pmode);
2f1cd2eb
RS
5400 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5401 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6de9cd9a
DN
5402
5403 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5404 temp, 0, OPTAB_LIB_WIDEN);
5405 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5406 temp, 0, OPTAB_LIB_WIDEN);
5407
5408 return tramp;
5409}
5410
5411static rtx
183dd130 5412expand_builtin_init_trampoline (tree exp, bool onstack)
6de9cd9a
DN
5413{
5414 tree t_tramp, t_func, t_chain;
531ca746 5415 rtx m_tramp, r_tramp, r_chain, tmp;
6de9cd9a 5416
5039610b 5417 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6de9cd9a
DN
5418 POINTER_TYPE, VOID_TYPE))
5419 return NULL_RTX;
5420
5039610b
SL
5421 t_tramp = CALL_EXPR_ARG (exp, 0);
5422 t_func = CALL_EXPR_ARG (exp, 1);
5423 t_chain = CALL_EXPR_ARG (exp, 2);
6de9cd9a 5424
84217346 5425 r_tramp = expand_normal (t_tramp);
531ca746
RH
5426 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5427 MEM_NOTRAP_P (m_tramp) = 1;
5428
183dd130
ILT
5429 /* If ONSTACK, the TRAMP argument should be the address of a field
5430 within the local function's FRAME decl. Either way, let's see if
5431 we can fill in the MEM_ATTRs for this memory. */
531ca746 5432 if (TREE_CODE (t_tramp) == ADDR_EXPR)
ad2e5b71 5433 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
531ca746 5434
183dd130
ILT
5435 /* Creator of a heap trampoline is responsible for making sure the
5436 address is aligned to at least STACK_BOUNDARY. Normally malloc
5437 will ensure this anyhow. */
531ca746
RH
5438 tmp = round_trampoline_addr (r_tramp);
5439 if (tmp != r_tramp)
5440 {
5441 m_tramp = change_address (m_tramp, BLKmode, tmp);
5442 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
f5541398 5443 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
531ca746
RH
5444 }
5445
5446 /* The FUNC argument should be the address of the nested function.
5447 Extract the actual function decl to pass to the hook. */
5448 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5449 t_func = TREE_OPERAND (t_func, 0);
5450 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5451
84217346 5452 r_chain = expand_normal (t_chain);
6de9cd9a
DN
5453
5454 /* Generate insns to initialize the trampoline. */
531ca746 5455 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6de9cd9a 5456
183dd130
ILT
5457 if (onstack)
5458 {
5459 trampolines_created = 1;
8ffadef9 5460
4c640e26
EB
5461 if (targetm.calls.custom_function_descriptors != 0)
5462 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5463 "trampoline generated for nested function %qD", t_func);
183dd130 5464 }
8ffadef9 5465
6de9cd9a
DN
5466 return const0_rtx;
5467}
5468
5469static rtx
5039610b 5470expand_builtin_adjust_trampoline (tree exp)
6de9cd9a
DN
5471{
5472 rtx tramp;
5473
5039610b 5474 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
5475 return NULL_RTX;
5476
5039610b 5477 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6de9cd9a 5478 tramp = round_trampoline_addr (tramp);
531ca746
RH
5479 if (targetm.calls.trampoline_adjust_address)
5480 tramp = targetm.calls.trampoline_adjust_address (tramp);
6de9cd9a
DN
5481
5482 return tramp;
5483}
5484
4c640e26
EB
5485/* Expand a call to the builtin descriptor initialization routine.
5486 A descriptor is made up of a couple of pointers to the static
5487 chain and the code entry in this order. */
5488
5489static rtx
5490expand_builtin_init_descriptor (tree exp)
5491{
5492 tree t_descr, t_func, t_chain;
5493 rtx m_descr, r_descr, r_func, r_chain;
5494
5495 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5496 VOID_TYPE))
5497 return NULL_RTX;
5498
5499 t_descr = CALL_EXPR_ARG (exp, 0);
5500 t_func = CALL_EXPR_ARG (exp, 1);
5501 t_chain = CALL_EXPR_ARG (exp, 2);
5502
5503 r_descr = expand_normal (t_descr);
5504 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5505 MEM_NOTRAP_P (m_descr) = 1;
5506
5507 r_func = expand_normal (t_func);
5508 r_chain = expand_normal (t_chain);
5509
5510 /* Generate insns to initialize the descriptor. */
5511 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5512 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5513 POINTER_SIZE / BITS_PER_UNIT), r_func);
5514
5515 return const0_rtx;
5516}
5517
5518/* Expand a call to the builtin descriptor adjustment routine. */
5519
5520static rtx
5521expand_builtin_adjust_descriptor (tree exp)
5522{
5523 rtx tramp;
5524
5525 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5526 return NULL_RTX;
5527
5528 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5529
5530 /* Unalign the descriptor to allow runtime identification. */
5531 tramp = plus_constant (ptr_mode, tramp,
5532 targetm.calls.custom_function_descriptors);
5533
5534 return force_operand (tramp, NULL_RTX);
5535}
5536
0f67fa83
WG
5537/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5538 function. The function first checks whether the back end provides
5539 an insn to implement signbit for the respective mode. If not, it
5540 checks whether the floating point format of the value is such that
61717a45
FXC
5541 the sign bit can be extracted. If that is not the case, error out.
5542 EXP is the expression that is a call to the builtin function; if
5543 convenient, the result should be placed in TARGET. */
ef79730c
RS
5544static rtx
5545expand_builtin_signbit (tree exp, rtx target)
5546{
5547 const struct real_format *fmt;
b5f2d801 5548 scalar_float_mode fmode;
095a2d76 5549 scalar_int_mode rmode, imode;
5039610b 5550 tree arg;
e4fbead1 5551 int word, bitpos;
d0c9d431 5552 enum insn_code icode;
ef79730c 5553 rtx temp;
db3927fb 5554 location_t loc = EXPR_LOCATION (exp);
ef79730c 5555
5039610b
SL
5556 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5557 return NULL_RTX;
ef79730c 5558
5039610b 5559 arg = CALL_EXPR_ARG (exp, 0);
b5f2d801 5560 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
7a504f33 5561 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
ef79730c
RS
5562 fmt = REAL_MODE_FORMAT (fmode);
5563
0f67fa83
WG
5564 arg = builtin_save_expr (arg);
5565
5566 /* Expand the argument yielding a RTX expression. */
5567 temp = expand_normal (arg);
5568
5569 /* Check if the back end provides an insn that handles signbit for the
5570 argument's mode. */
947131ba 5571 icode = optab_handler (signbit_optab, fmode);
d0c9d431 5572 if (icode != CODE_FOR_nothing)
0f67fa83 5573 {
58f4cf2a 5574 rtx_insn *last = get_last_insn ();
0f67fa83 5575 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8a0b1aa4
MM
5576 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5577 return target;
5578 delete_insns_since (last);
0f67fa83
WG
5579 }
5580
ef79730c
RS
5581 /* For floating point formats without a sign bit, implement signbit
5582 as "ARG < 0.0". */
b87a0206 5583 bitpos = fmt->signbit_ro;
e4fbead1 5584 if (bitpos < 0)
ef79730c
RS
5585 {
5586 /* But we can't do this if the format supports signed zero. */
61717a45 5587 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
ef79730c 5588
db3927fb 5589 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
987b67bc 5590 build_real (TREE_TYPE (arg), dconst0));
ef79730c
RS
5591 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5592 }
5593
e4fbead1 5594 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
ef79730c 5595 {
304b9962 5596 imode = int_mode_for_mode (fmode).require ();
e4fbead1 5597 temp = gen_lowpart (imode, temp);
254878ea
RS
5598 }
5599 else
5600 {
e4fbead1
RS
5601 imode = word_mode;
5602 /* Handle targets with different FP word orders. */
5603 if (FLOAT_WORDS_BIG_ENDIAN)
c22cacf3 5604 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
e4fbead1 5605 else
c22cacf3 5606 word = bitpos / BITS_PER_WORD;
e4fbead1
RS
5607 temp = operand_subword_force (temp, word, fmode);
5608 bitpos = bitpos % BITS_PER_WORD;
5609 }
5610
210e1852
RS
5611 /* Force the intermediate word_mode (or narrower) result into a
5612 register. This avoids attempting to create paradoxical SUBREGs
5613 of floating point modes below. */
5614 temp = force_reg (imode, temp);
5615
e4fbead1
RS
5616 /* If the bitpos is within the "result mode" lowpart, the operation
5617 can be implement with a single bitwise AND. Otherwise, we need
5618 a right shift and an AND. */
5619
5620 if (bitpos < GET_MODE_BITSIZE (rmode))
5621 {
807e902e 5622 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
ef79730c 5623
515e442a 5624 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
e4fbead1 5625 temp = gen_lowpart (rmode, temp);
254878ea 5626 temp = expand_binop (rmode, and_optab, temp,
807e902e 5627 immed_wide_int_const (mask, rmode),
e4fbead1 5628 NULL_RTX, 1, OPTAB_LIB_WIDEN);
ef79730c 5629 }
e4fbead1
RS
5630 else
5631 {
5632 /* Perform a logical right shift to place the signbit in the least
c22cacf3 5633 significant bit, then truncate the result to the desired mode
e4fbead1 5634 and mask just this bit. */
eb6c3df1 5635 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
e4fbead1
RS
5636 temp = gen_lowpart (rmode, temp);
5637 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5638 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5639 }
5640
ef79730c
RS
5641 return temp;
5642}
d1c38823
ZD
5643
5644/* Expand fork or exec calls. TARGET is the desired target of the
5039610b 5645 call. EXP is the call. FN is the
d1c38823
ZD
5646 identificator of the actual function. IGNORE is nonzero if the
5647 value is to be ignored. */
5648
5649static rtx
5039610b 5650expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
d1c38823
ZD
5651{
5652 tree id, decl;
5653 tree call;
5654
5655 /* If we are not profiling, just call the function. */
5656 if (!profile_arc_flag)
5657 return NULL_RTX;
5658
5659 /* Otherwise call the wrapper. This should be equivalent for the rest of
5660 compiler, so the code does not diverge, and the wrapper may run the
2b8a92de 5661 code necessary for keeping the profiling sane. */
d1c38823
ZD
5662
5663 switch (DECL_FUNCTION_CODE (fn))
5664 {
5665 case BUILT_IN_FORK:
5666 id = get_identifier ("__gcov_fork");
5667 break;
5668
5669 case BUILT_IN_EXECL:
5670 id = get_identifier ("__gcov_execl");
5671 break;
5672
5673 case BUILT_IN_EXECV:
5674 id = get_identifier ("__gcov_execv");
5675 break;
5676
5677 case BUILT_IN_EXECLP:
5678 id = get_identifier ("__gcov_execlp");
5679 break;
5680
5681 case BUILT_IN_EXECLE:
5682 id = get_identifier ("__gcov_execle");
5683 break;
5684
5685 case BUILT_IN_EXECVP:
5686 id = get_identifier ("__gcov_execvp");
5687 break;
5688
5689 case BUILT_IN_EXECVE:
5690 id = get_identifier ("__gcov_execve");
5691 break;
5692
5693 default:
298e6adc 5694 gcc_unreachable ();
d1c38823
ZD
5695 }
5696
c2255bc4
AH
5697 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5698 FUNCTION_DECL, id, TREE_TYPE (fn));
d1c38823
ZD
5699 DECL_EXTERNAL (decl) = 1;
5700 TREE_PUBLIC (decl) = 1;
5701 DECL_ARTIFICIAL (decl) = 1;
5702 TREE_NOTHROW (decl) = 1;
ac382b62
JM
5703 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5704 DECL_VISIBILITY_SPECIFIED (decl) = 1;
db3927fb 5705 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
d1c38823 5706 return expand_call (call, target, ignore);
5039610b 5707 }
b8698a0f 5708
48ae6c13
RH
5709
5710\f
02ee605c
RH
5711/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5712 the pointer in these functions is void*, the tree optimizers may remove
5713 casts. The mode computed in expand_builtin isn't reliable either, due
5714 to __sync_bool_compare_and_swap.
5715
5716 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5717 group of builtins. This gives us log2 of the mode size. */
5718
ef4bddc2 5719static inline machine_mode
02ee605c
RH
5720get_builtin_sync_mode (int fcode_diff)
5721{
2de0aa52
HPN
5722 /* The size is not negotiable, so ask not to get BLKmode in return
5723 if the target indicates that a smaller size would be better. */
f4b31647 5724 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
02ee605c
RH
5725}
5726
1387fef3
AS
5727/* Expand the memory expression LOC and return the appropriate memory operand
5728 for the builtin_sync operations. */
5729
5730static rtx
ef4bddc2 5731get_builtin_sync_mem (tree loc, machine_mode mode)
1387fef3
AS
5732{
5733 rtx addr, mem;
5734
f46835f5
JJ
5735 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5736 addr = convert_memory_address (Pmode, addr);
1387fef3
AS
5737
5738 /* Note that we explicitly do not want any alias information for this
5739 memory, so that we kill all other live memories. Otherwise we don't
5740 satisfy the full barrier semantics of the intrinsic. */
5741 mem = validize_mem (gen_rtx_MEM (mode, addr));
5742
1be38ccb
RG
5743 /* The alignment needs to be at least according to that of the mode. */
5744 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
0eb77834 5745 get_pointer_alignment (loc)));
9cd9e512 5746 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
1387fef3
AS
5747 MEM_VOLATILE_P (mem) = 1;
5748
5749 return mem;
5750}
5751
86951993
AM
5752/* Make sure an argument is in the right mode.
5753 EXP is the tree argument.
5754 MODE is the mode it should be in. */
5755
5756static rtx
ef4bddc2 5757expand_expr_force_mode (tree exp, machine_mode mode)
86951993
AM
5758{
5759 rtx val;
ef4bddc2 5760 machine_mode old_mode;
86951993
AM
5761
5762 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5763 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5764 of CONST_INTs, where we know the old_mode only from the call argument. */
5765
5766 old_mode = GET_MODE (val);
5767 if (old_mode == VOIDmode)
5768 old_mode = TYPE_MODE (TREE_TYPE (exp));
5769 val = convert_modes (mode, old_mode, val, 1);
5770 return val;
5771}
5772
5773
48ae6c13 5774/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5039610b 5775 EXP is the CALL_EXPR. CODE is the rtx code
48ae6c13
RH
5776 that corresponds to the arithmetic or logical operation from the name;
5777 an exception here is that NOT actually means NAND. TARGET is an optional
5778 place for us to store the results; AFTER is true if this is the
86951993 5779 fetch_and_xxx form. */
48ae6c13
RH
5780
5781static rtx
ef4bddc2 5782expand_builtin_sync_operation (machine_mode mode, tree exp,
02ee605c 5783 enum rtx_code code, bool after,
86951993 5784 rtx target)
48ae6c13 5785{
1387fef3 5786 rtx val, mem;
c2255bc4 5787 location_t loc = EXPR_LOCATION (exp);
48ae6c13 5788
23462d4d
UB
5789 if (code == NOT && warn_sync_nand)
5790 {
5791 tree fndecl = get_callee_fndecl (exp);
5792 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5793
5794 static bool warned_f_a_n, warned_n_a_f;
5795
5796 switch (fcode)
5797 {
e0a8ecf2
AM
5798 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5799 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5800 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5801 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5802 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
23462d4d
UB
5803 if (warned_f_a_n)
5804 break;
5805
e79983f4 5806 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
c2255bc4 5807 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
5808 warned_f_a_n = true;
5809 break;
5810
e0a8ecf2
AM
5811 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5812 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5813 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5814 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5815 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
23462d4d
UB
5816 if (warned_n_a_f)
5817 break;
5818
e79983f4 5819 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
c2255bc4 5820 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
5821 warned_n_a_f = true;
5822 break;
5823
5824 default:
5825 gcc_unreachable ();
5826 }
5827 }
5828
48ae6c13 5829 /* Expand the operands. */
5039610b 5830 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993 5831 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
48ae6c13 5832
46b35980 5833 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
86951993 5834 after);
48ae6c13
RH
5835}
5836
5837/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5039610b 5838 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
48ae6c13
RH
5839 true if this is the boolean form. TARGET is a place for us to store the
5840 results; this is NOT optional if IS_BOOL is true. */
5841
5842static rtx
ef4bddc2 5843expand_builtin_compare_and_swap (machine_mode mode, tree exp,
02ee605c 5844 bool is_bool, rtx target)
48ae6c13 5845{
1387fef3 5846 rtx old_val, new_val, mem;
f0409b19 5847 rtx *pbool, *poval;
48ae6c13
RH
5848
5849 /* Expand the operands. */
5039610b 5850 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
5851 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5852 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
48ae6c13 5853
f0409b19
RH
5854 pbool = poval = NULL;
5855 if (target != const0_rtx)
5856 {
5857 if (is_bool)
5858 pbool = &target;
5859 else
5860 poval = &target;
5861 }
5862 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
46b35980
AM
5863 false, MEMMODEL_SYNC_SEQ_CST,
5864 MEMMODEL_SYNC_SEQ_CST))
86951993 5865 return NULL_RTX;
5039610b 5866
86951993 5867 return target;
48ae6c13
RH
5868}
5869
5870/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5871 general form is actually an atomic exchange, and some targets only
5872 support a reduced form with the second argument being a constant 1.
b8698a0f 5873 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5039610b 5874 the results. */
48ae6c13
RH
5875
5876static rtx
ef4bddc2 5877expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
86951993 5878 rtx target)
48ae6c13 5879{
1387fef3 5880 rtx val, mem;
48ae6c13
RH
5881
5882 /* Expand the operands. */
5039610b 5883 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
5884 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5885
744accb2 5886 return expand_sync_lock_test_and_set (target, mem, val);
86951993
AM
5887}
5888
5889/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5890
5891static void
ef4bddc2 5892expand_builtin_sync_lock_release (machine_mode mode, tree exp)
86951993
AM
5893{
5894 rtx mem;
5895
5896 /* Expand the operands. */
5897 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5898
46b35980 5899 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
86951993
AM
5900}
5901
5902/* Given an integer representing an ``enum memmodel'', verify its
5903 correctness and return the memory model enum. */
5904
5905static enum memmodel
5906get_memmodel (tree exp)
5907{
5908 rtx op;
5dcfdccd 5909 unsigned HOST_WIDE_INT val;
8d9fdb49
MP
5910 source_location loc
5911 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
5912
5913 /* If the parameter is not a constant, it's a run time value so we'll just
5914 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5915 if (TREE_CODE (exp) != INTEGER_CST)
5916 return MEMMODEL_SEQ_CST;
5917
5918 op = expand_normal (exp);
5dcfdccd
KY
5919
5920 val = INTVAL (op);
5921 if (targetm.memmodel_check)
5922 val = targetm.memmodel_check (val);
5923 else if (val & ~MEMMODEL_MASK)
5924 {
8d9fdb49
MP
5925 warning_at (loc, OPT_Winvalid_memory_model,
5926 "unknown architecture specifier in memory model to builtin");
5dcfdccd
KY
5927 return MEMMODEL_SEQ_CST;
5928 }
5929
46b35980
AM
5930 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5931 if (memmodel_base (val) >= MEMMODEL_LAST)
86951993 5932 {
8d9fdb49
MP
5933 warning_at (loc, OPT_Winvalid_memory_model,
5934 "invalid memory model argument to builtin");
86951993
AM
5935 return MEMMODEL_SEQ_CST;
5936 }
5dcfdccd 5937
8673b671
AM
5938 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5939 be conservative and promote consume to acquire. */
5940 if (val == MEMMODEL_CONSUME)
5941 val = MEMMODEL_ACQUIRE;
5942
5dcfdccd 5943 return (enum memmodel) val;
86951993
AM
5944}
5945
5946/* Expand the __atomic_exchange intrinsic:
5947 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5948 EXP is the CALL_EXPR.
5949 TARGET is an optional place for us to store the results. */
5950
5951static rtx
ef4bddc2 5952expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
86951993
AM
5953{
5954 rtx val, mem;
5955 enum memmodel model;
5956
5957 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
86951993
AM
5958
5959 if (!flag_inline_atomics)
5960 return NULL_RTX;
5961
5962 /* Expand the operands. */
5963 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5964 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5965
744accb2 5966 return expand_atomic_exchange (target, mem, val, model);
86951993
AM
5967}
5968
5969/* Expand the __atomic_compare_exchange intrinsic:
5970 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5971 TYPE desired, BOOL weak,
5972 enum memmodel success,
5973 enum memmodel failure)
5974 EXP is the CALL_EXPR.
5975 TARGET is an optional place for us to store the results. */
5976
5977static rtx
ef4bddc2 5978expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
86951993
AM
5979 rtx target)
5980{
58f4cf2a
DM
5981 rtx expect, desired, mem, oldval;
5982 rtx_code_label *label;
86951993
AM
5983 enum memmodel success, failure;
5984 tree weak;
5985 bool is_weak;
8d9fdb49
MP
5986 source_location loc
5987 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
5988
5989 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5990 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5991
77df5327
AM
5992 if (failure > success)
5993 {
8d9fdb49
MP
5994 warning_at (loc, OPT_Winvalid_memory_model,
5995 "failure memory model cannot be stronger than success "
5996 "memory model for %<__atomic_compare_exchange%>");
77df5327
AM
5997 success = MEMMODEL_SEQ_CST;
5998 }
5999
46b35980 6000 if (is_mm_release (failure) || is_mm_acq_rel (failure))
86951993 6001 {
8d9fdb49
MP
6002 warning_at (loc, OPT_Winvalid_memory_model,
6003 "invalid failure memory model for "
6004 "%<__atomic_compare_exchange%>");
77df5327
AM
6005 failure = MEMMODEL_SEQ_CST;
6006 success = MEMMODEL_SEQ_CST;
86951993
AM
6007 }
6008
77df5327 6009
86951993
AM
6010 if (!flag_inline_atomics)
6011 return NULL_RTX;
6012
6013 /* Expand the operands. */
6014 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6015
6016 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6017 expect = convert_memory_address (Pmode, expect);
215770ad 6018 expect = gen_rtx_MEM (mode, expect);
86951993
AM
6019 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6020
6021 weak = CALL_EXPR_ARG (exp, 3);
6022 is_weak = false;
9439e9a1 6023 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
86951993
AM
6024 is_weak = true;
6025
672ce939
RH
6026 if (target == const0_rtx)
6027 target = NULL;
672ce939 6028
2fdc29e8
RH
6029 /* Lest the rtl backend create a race condition with an imporoper store
6030 to memory, always create a new pseudo for OLDVAL. */
6031 oldval = NULL;
6032
6033 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
f0409b19 6034 is_weak, success, failure))
86951993
AM
6035 return NULL_RTX;
6036
672ce939
RH
6037 /* Conditionally store back to EXPECT, lest we create a race condition
6038 with an improper store to memory. */
6039 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6040 the normal case where EXPECT is totally private, i.e. a register. At
6041 which point the store can be unconditional. */
6042 label = gen_label_rtx ();
f8940d4a
JG
6043 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6044 GET_MODE (target), 1, label);
672ce939
RH
6045 emit_move_insn (expect, oldval);
6046 emit_label (label);
215770ad 6047
86951993
AM
6048 return target;
6049}
6050
849a76a5
JJ
6051/* Helper function for expand_ifn_atomic_compare_exchange - expand
6052 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6053 call. The weak parameter must be dropped to match the expected parameter
6054 list and the expected argument changed from value to pointer to memory
6055 slot. */
6056
6057static void
6058expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6059{
6060 unsigned int z;
6061 vec<tree, va_gc> *vec;
6062
6063 vec_alloc (vec, 5);
6064 vec->quick_push (gimple_call_arg (call, 0));
6065 tree expected = gimple_call_arg (call, 1);
6066 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6067 TREE_TYPE (expected));
6068 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6069 if (expd != x)
6070 emit_move_insn (x, expd);
6071 tree v = make_tree (TREE_TYPE (expected), x);
6072 vec->quick_push (build1 (ADDR_EXPR,
6073 build_pointer_type (TREE_TYPE (expected)), v));
6074 vec->quick_push (gimple_call_arg (call, 2));
6075 /* Skip the boolean weak parameter. */
6076 for (z = 4; z < 6; z++)
6077 vec->quick_push (gimple_call_arg (call, z));
4871e1ed 6078 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
cf098191 6079 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
4871e1ed 6080 gcc_assert (bytes_log2 < 5);
849a76a5
JJ
6081 built_in_function fncode
6082 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
4871e1ed 6083 + bytes_log2);
849a76a5
JJ
6084 tree fndecl = builtin_decl_explicit (fncode);
6085 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6086 fndecl);
6087 tree exp = build_call_vec (boolean_type_node, fn, vec);
6088 tree lhs = gimple_call_lhs (call);
6089 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6090 if (lhs)
6091 {
6092 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6093 if (GET_MODE (boolret) != mode)
6094 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6095 x = force_reg (mode, x);
6096 write_complex_part (target, boolret, true);
6097 write_complex_part (target, x, false);
6098 }
6099}
6100
6101/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6102
6103void
6104expand_ifn_atomic_compare_exchange (gcall *call)
6105{
6106 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6107 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
f4b31647 6108 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
849a76a5
JJ
6109 rtx expect, desired, mem, oldval, boolret;
6110 enum memmodel success, failure;
6111 tree lhs;
6112 bool is_weak;
6113 source_location loc
6114 = expansion_point_location_if_in_system_header (gimple_location (call));
6115
6116 success = get_memmodel (gimple_call_arg (call, 4));
6117 failure = get_memmodel (gimple_call_arg (call, 5));
6118
6119 if (failure > success)
6120 {
6121 warning_at (loc, OPT_Winvalid_memory_model,
6122 "failure memory model cannot be stronger than success "
6123 "memory model for %<__atomic_compare_exchange%>");
6124 success = MEMMODEL_SEQ_CST;
6125 }
6126
6127 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6128 {
6129 warning_at (loc, OPT_Winvalid_memory_model,
6130 "invalid failure memory model for "
6131 "%<__atomic_compare_exchange%>");
6132 failure = MEMMODEL_SEQ_CST;
6133 success = MEMMODEL_SEQ_CST;
6134 }
6135
6136 if (!flag_inline_atomics)
6137 {
6138 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6139 return;
6140 }
6141
6142 /* Expand the operands. */
6143 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6144
6145 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6146 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6147
6148 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6149
6150 boolret = NULL;
6151 oldval = NULL;
6152
6153 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6154 is_weak, success, failure))
6155 {
6156 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6157 return;
6158 }
6159
6160 lhs = gimple_call_lhs (call);
6161 if (lhs)
6162 {
6163 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6164 if (GET_MODE (boolret) != mode)
6165 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6166 write_complex_part (target, boolret, true);
6167 write_complex_part (target, oldval, false);
6168 }
6169}
6170
86951993
AM
6171/* Expand the __atomic_load intrinsic:
6172 TYPE __atomic_load (TYPE *object, enum memmodel)
6173 EXP is the CALL_EXPR.
6174 TARGET is an optional place for us to store the results. */
6175
6176static rtx
ef4bddc2 6177expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
86951993
AM
6178{
6179 rtx mem;
6180 enum memmodel model;
6181
6182 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
46b35980 6183 if (is_mm_release (model) || is_mm_acq_rel (model))
86951993 6184 {
8d9fdb49
MP
6185 source_location loc
6186 = expansion_point_location_if_in_system_header (input_location);
6187 warning_at (loc, OPT_Winvalid_memory_model,
6188 "invalid memory model for %<__atomic_load%>");
77df5327 6189 model = MEMMODEL_SEQ_CST;
86951993
AM
6190 }
6191
6192 if (!flag_inline_atomics)
6193 return NULL_RTX;
6194
6195 /* Expand the operand. */
6196 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6197
6198 return expand_atomic_load (target, mem, model);
6199}
6200
6201
6202/* Expand the __atomic_store intrinsic:
6203 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6204 EXP is the CALL_EXPR.
6205 TARGET is an optional place for us to store the results. */
6206
6207static rtx
ef4bddc2 6208expand_builtin_atomic_store (machine_mode mode, tree exp)
86951993
AM
6209{
6210 rtx mem, val;
6211 enum memmodel model;
6212
6213 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
46b35980
AM
6214 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6215 || is_mm_release (model)))
86951993 6216 {
8d9fdb49
MP
6217 source_location loc
6218 = expansion_point_location_if_in_system_header (input_location);
6219 warning_at (loc, OPT_Winvalid_memory_model,
6220 "invalid memory model for %<__atomic_store%>");
77df5327 6221 model = MEMMODEL_SEQ_CST;
86951993
AM
6222 }
6223
6224 if (!flag_inline_atomics)
6225 return NULL_RTX;
6226
6227 /* Expand the operands. */
6228 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6229 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6230
0669295b 6231 return expand_atomic_store (mem, val, model, false);
86951993
AM
6232}
6233
6234/* Expand the __atomic_fetch_XXX intrinsic:
6235 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6236 EXP is the CALL_EXPR.
6237 TARGET is an optional place for us to store the results.
6238 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6239 FETCH_AFTER is true if returning the result of the operation.
6240 FETCH_AFTER is false if returning the value before the operation.
6241 IGNORE is true if the result is not used.
6242 EXT_CALL is the correct builtin for an external call if this cannot be
6243 resolved to an instruction sequence. */
6244
6245static rtx
ef4bddc2 6246expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
86951993
AM
6247 enum rtx_code code, bool fetch_after,
6248 bool ignore, enum built_in_function ext_call)
6249{
6250 rtx val, mem, ret;
6251 enum memmodel model;
6252 tree fndecl;
6253 tree addr;
6254
6255 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6256
6257 /* Expand the operands. */
6258 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6259 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6260
6261 /* Only try generating instructions if inlining is turned on. */
6262 if (flag_inline_atomics)
6263 {
6264 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6265 if (ret)
6266 return ret;
6267 }
6268
6269 /* Return if a different routine isn't needed for the library call. */
6270 if (ext_call == BUILT_IN_NONE)
6271 return NULL_RTX;
6272
6273 /* Change the call to the specified function. */
6274 fndecl = get_callee_fndecl (exp);
6275 addr = CALL_EXPR_FN (exp);
6276 STRIP_NOPS (addr);
6277
6278 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
c3284718 6279 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
86951993 6280
08c273bb
SB
6281 /* If we will emit code after the call, the call can not be a tail call.
6282 If it is emitted as a tail call, a barrier is emitted after it, and
6283 then all trailing code is removed. */
6284 if (!ignore)
6285 CALL_EXPR_TAILCALL (exp) = 0;
6286
86951993
AM
6287 /* Expand the call here so we can emit trailing code. */
6288 ret = expand_call (exp, target, ignore);
6289
6290 /* Replace the original function just in case it matters. */
6291 TREE_OPERAND (addr, 0) = fndecl;
6292
6293 /* Then issue the arithmetic correction to return the right result. */
6294 if (!ignore)
154b68db
AM
6295 {
6296 if (code == NOT)
6297 {
6298 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6299 OPTAB_LIB_WIDEN);
6300 ret = expand_simple_unop (mode, NOT, ret, target, true);
6301 }
6302 else
6303 ret = expand_simple_binop (mode, code, ret, val, target, true,
6304 OPTAB_LIB_WIDEN);
6305 }
86951993
AM
6306 return ret;
6307}
6308
adedd5c1
JJ
6309/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6310
6311void
6312expand_ifn_atomic_bit_test_and (gcall *call)
6313{
6314 tree ptr = gimple_call_arg (call, 0);
6315 tree bit = gimple_call_arg (call, 1);
6316 tree flag = gimple_call_arg (call, 2);
6317 tree lhs = gimple_call_lhs (call);
6318 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6319 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6320 enum rtx_code code;
6321 optab optab;
6322 struct expand_operand ops[5];
6323
6324 gcc_assert (flag_inline_atomics);
6325
6326 if (gimple_call_num_args (call) == 4)
6327 model = get_memmodel (gimple_call_arg (call, 3));
6328
6329 rtx mem = get_builtin_sync_mem (ptr, mode);
6330 rtx val = expand_expr_force_mode (bit, mode);
6331
6332 switch (gimple_call_internal_fn (call))
6333 {
6334 case IFN_ATOMIC_BIT_TEST_AND_SET:
6335 code = IOR;
6336 optab = atomic_bit_test_and_set_optab;
6337 break;
6338 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6339 code = XOR;
6340 optab = atomic_bit_test_and_complement_optab;
6341 break;
6342 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6343 code = AND;
6344 optab = atomic_bit_test_and_reset_optab;
6345 break;
6346 default:
6347 gcc_unreachable ();
6348 }
6349
6350 if (lhs == NULL_TREE)
6351 {
6352 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6353 val, NULL_RTX, true, OPTAB_DIRECT);
6354 if (code == AND)
6355 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6356 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6357 return;
6358 }
6359
6360 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6361 enum insn_code icode = direct_optab_handler (optab, mode);
6362 gcc_assert (icode != CODE_FOR_nothing);
6363 create_output_operand (&ops[0], target, mode);
6364 create_fixed_operand (&ops[1], mem);
6365 create_convert_operand_to (&ops[2], val, mode, true);
6366 create_integer_operand (&ops[3], model);
6367 create_integer_operand (&ops[4], integer_onep (flag));
6368 if (maybe_expand_insn (icode, 5, ops))
6369 return;
6370
6371 rtx bitval = val;
6372 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6373 val, NULL_RTX, true, OPTAB_DIRECT);
6374 rtx maskval = val;
6375 if (code == AND)
6376 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6377 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6378 code, model, false);
6379 if (integer_onep (flag))
6380 {
6381 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6382 NULL_RTX, true, OPTAB_DIRECT);
6383 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6384 true, OPTAB_DIRECT);
6385 }
6386 else
6387 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6388 OPTAB_DIRECT);
6389 if (result != target)
6390 emit_move_insn (target, result);
6391}
6392
d660c35e
AM
6393/* Expand an atomic clear operation.
6394 void _atomic_clear (BOOL *obj, enum memmodel)
6395 EXP is the call expression. */
6396
6397static rtx
6398expand_builtin_atomic_clear (tree exp)
6399{
ef4bddc2 6400 machine_mode mode;
d660c35e
AM
6401 rtx mem, ret;
6402 enum memmodel model;
6403
f4b31647 6404 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
6405 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6406 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6407
46b35980 6408 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
d660c35e 6409 {
8d9fdb49
MP
6410 source_location loc
6411 = expansion_point_location_if_in_system_header (input_location);
6412 warning_at (loc, OPT_Winvalid_memory_model,
6413 "invalid memory model for %<__atomic_store%>");
77df5327 6414 model = MEMMODEL_SEQ_CST;
d660c35e
AM
6415 }
6416
6417 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6418 Failing that, a store is issued by __atomic_store. The only way this can
6419 fail is if the bool type is larger than a word size. Unlikely, but
6420 handle it anyway for completeness. Assume a single threaded model since
6421 there is no atomic support in this case, and no barriers are required. */
6422 ret = expand_atomic_store (mem, const0_rtx, model, true);
6423 if (!ret)
6424 emit_move_insn (mem, const0_rtx);
6425 return const0_rtx;
6426}
6427
6428/* Expand an atomic test_and_set operation.
6429 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6430 EXP is the call expression. */
6431
6432static rtx
744accb2 6433expand_builtin_atomic_test_and_set (tree exp, rtx target)
d660c35e 6434{
744accb2 6435 rtx mem;
d660c35e 6436 enum memmodel model;
ef4bddc2 6437 machine_mode mode;
d660c35e 6438
f4b31647 6439 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
6440 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6441 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6442
744accb2 6443 return expand_atomic_test_and_set (target, mem, model);
d660c35e
AM
6444}
6445
6446
86951993
AM
6447/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6448 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6449
6450static tree
6451fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6452{
6453 int size;
ef4bddc2 6454 machine_mode mode;
86951993
AM
6455 unsigned int mode_align, type_align;
6456
6457 if (TREE_CODE (arg0) != INTEGER_CST)
6458 return NULL_TREE;
48ae6c13 6459
f4b31647 6460 /* We need a corresponding integer mode for the access to be lock-free. */
86951993 6461 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
f4b31647
RS
6462 if (!int_mode_for_size (size, 0).exists (&mode))
6463 return boolean_false_node;
6464
86951993
AM
6465 mode_align = GET_MODE_ALIGNMENT (mode);
6466
310055e7
JW
6467 if (TREE_CODE (arg1) == INTEGER_CST)
6468 {
6469 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6470
6471 /* Either this argument is null, or it's a fake pointer encoding
6472 the alignment of the object. */
146ec50f 6473 val = least_bit_hwi (val);
310055e7
JW
6474 val *= BITS_PER_UNIT;
6475
6476 if (val == 0 || mode_align < val)
6477 type_align = mode_align;
6478 else
6479 type_align = val;
6480 }
86951993
AM
6481 else
6482 {
6483 tree ttype = TREE_TYPE (arg1);
6484
6485 /* This function is usually invoked and folded immediately by the front
6486 end before anything else has a chance to look at it. The pointer
6487 parameter at this point is usually cast to a void *, so check for that
6488 and look past the cast. */
7d9cf801
JJ
6489 if (CONVERT_EXPR_P (arg1)
6490 && POINTER_TYPE_P (ttype)
6491 && VOID_TYPE_P (TREE_TYPE (ttype))
6492 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
86951993
AM
6493 arg1 = TREE_OPERAND (arg1, 0);
6494
6495 ttype = TREE_TYPE (arg1);
6496 gcc_assert (POINTER_TYPE_P (ttype));
6497
6498 /* Get the underlying type of the object. */
6499 ttype = TREE_TYPE (ttype);
6500 type_align = TYPE_ALIGN (ttype);
6501 }
6502
026c3cfd 6503 /* If the object has smaller alignment, the lock free routines cannot
86951993
AM
6504 be used. */
6505 if (type_align < mode_align)
58d38fd2 6506 return boolean_false_node;
86951993
AM
6507
6508 /* Check if a compare_and_swap pattern exists for the mode which represents
6509 the required size. The pattern is not allowed to fail, so the existence
969a32ce
TR
6510 of the pattern indicates support is present. Also require that an
6511 atomic load exists for the required size. */
6512 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
58d38fd2 6513 return boolean_true_node;
86951993 6514 else
58d38fd2 6515 return boolean_false_node;
86951993
AM
6516}
6517
6518/* Return true if the parameters to call EXP represent an object which will
6519 always generate lock free instructions. The first argument represents the
6520 size of the object, and the second parameter is a pointer to the object
6521 itself. If NULL is passed for the object, then the result is based on
6522 typical alignment for an object of the specified size. Otherwise return
6523 false. */
6524
6525static rtx
6526expand_builtin_atomic_always_lock_free (tree exp)
6527{
6528 tree size;
6529 tree arg0 = CALL_EXPR_ARG (exp, 0);
6530 tree arg1 = CALL_EXPR_ARG (exp, 1);
6531
6532 if (TREE_CODE (arg0) != INTEGER_CST)
6533 {
6534 error ("non-constant argument 1 to __atomic_always_lock_free");
6535 return const0_rtx;
6536 }
6537
6538 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
58d38fd2 6539 if (size == boolean_true_node)
86951993
AM
6540 return const1_rtx;
6541 return const0_rtx;
6542}
6543
6544/* Return a one or zero if it can be determined that object ARG1 of size ARG
6545 is lock free on this architecture. */
6546
6547static tree
6548fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6549{
6550 if (!flag_inline_atomics)
6551 return NULL_TREE;
6552
6553 /* If it isn't always lock free, don't generate a result. */
58d38fd2
JJ
6554 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6555 return boolean_true_node;
86951993
AM
6556
6557 return NULL_TREE;
6558}
6559
6560/* Return true if the parameters to call EXP represent an object which will
6561 always generate lock free instructions. The first argument represents the
6562 size of the object, and the second parameter is a pointer to the object
6563 itself. If NULL is passed for the object, then the result is based on
6564 typical alignment for an object of the specified size. Otherwise return
6565 NULL*/
6566
6567static rtx
6568expand_builtin_atomic_is_lock_free (tree exp)
6569{
6570 tree size;
6571 tree arg0 = CALL_EXPR_ARG (exp, 0);
6572 tree arg1 = CALL_EXPR_ARG (exp, 1);
6573
6574 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6575 {
6576 error ("non-integer argument 1 to __atomic_is_lock_free");
6577 return NULL_RTX;
6578 }
6579
6580 if (!flag_inline_atomics)
6581 return NULL_RTX;
6582
6583 /* If the value is known at compile time, return the RTX for it. */
6584 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
58d38fd2 6585 if (size == boolean_true_node)
86951993
AM
6586 return const1_rtx;
6587
6588 return NULL_RTX;
6589}
6590
86951993
AM
6591/* Expand the __atomic_thread_fence intrinsic:
6592 void __atomic_thread_fence (enum memmodel)
6593 EXP is the CALL_EXPR. */
6594
6595static void
6596expand_builtin_atomic_thread_fence (tree exp)
6597{
c39169c8
RH
6598 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6599 expand_mem_thread_fence (model);
86951993
AM
6600}
6601
6602/* Expand the __atomic_signal_fence intrinsic:
6603 void __atomic_signal_fence (enum memmodel)
6604 EXP is the CALL_EXPR. */
6605
6606static void
6607expand_builtin_atomic_signal_fence (tree exp)
6608{
c39169c8
RH
6609 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6610 expand_mem_signal_fence (model);
48ae6c13
RH
6611}
6612
6613/* Expand the __sync_synchronize intrinsic. */
6614
6615static void
e0a8ecf2 6616expand_builtin_sync_synchronize (void)
48ae6c13 6617{
46b35980 6618 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
48ae6c13
RH
6619}
6620
f959607b
CLT
6621static rtx
6622expand_builtin_thread_pointer (tree exp, rtx target)
6623{
6624 enum insn_code icode;
6625 if (!validate_arglist (exp, VOID_TYPE))
6626 return const0_rtx;
6627 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6628 if (icode != CODE_FOR_nothing)
6629 {
6630 struct expand_operand op;
b8a542c6
AP
6631 /* If the target is not sutitable then create a new target. */
6632 if (target == NULL_RTX
6633 || !REG_P (target)
6634 || GET_MODE (target) != Pmode)
f959607b
CLT
6635 target = gen_reg_rtx (Pmode);
6636 create_output_operand (&op, target, Pmode);
6637 expand_insn (icode, 1, &op);
6638 return target;
6639 }
6640 error ("__builtin_thread_pointer is not supported on this target");
6641 return const0_rtx;
6642}
6643
6644static void
6645expand_builtin_set_thread_pointer (tree exp)
6646{
6647 enum insn_code icode;
6648 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6649 return;
6650 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6651 if (icode != CODE_FOR_nothing)
6652 {
6653 struct expand_operand op;
6654 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6655 Pmode, EXPAND_NORMAL);
5440a1b0 6656 create_input_operand (&op, val, Pmode);
f959607b
CLT
6657 expand_insn (icode, 1, &op);
6658 return;
6659 }
6660 error ("__builtin_set_thread_pointer is not supported on this target");
6661}
6662
28f4ec01 6663\f
862d0b35
DN
6664/* Emit code to restore the current value of stack. */
6665
6666static void
6667expand_stack_restore (tree var)
6668{
58f4cf2a
DM
6669 rtx_insn *prev;
6670 rtx sa = expand_normal (var);
862d0b35
DN
6671
6672 sa = convert_memory_address (Pmode, sa);
6673
6674 prev = get_last_insn ();
6675 emit_stack_restore (SAVE_BLOCK, sa);
d33606c3
EB
6676
6677 record_new_stack_level ();
6678
862d0b35
DN
6679 fixup_args_size_notes (prev, get_last_insn (), 0);
6680}
6681
862d0b35
DN
6682/* Emit code to save the current value of stack. */
6683
6684static rtx
6685expand_stack_save (void)
6686{
6687 rtx ret = NULL_RTX;
6688
862d0b35
DN
6689 emit_stack_save (SAVE_BLOCK, &ret);
6690 return ret;
6691}
6692
1f62d637
TV
6693/* Emit code to get the openacc gang, worker or vector id or size. */
6694
6695static rtx
6696expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6697{
6698 const char *name;
6699 rtx fallback_retval;
6700 rtx_insn *(*gen_fn) (rtx, rtx);
6701 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6702 {
6703 case BUILT_IN_GOACC_PARLEVEL_ID:
6704 name = "__builtin_goacc_parlevel_id";
6705 fallback_retval = const0_rtx;
6706 gen_fn = targetm.gen_oacc_dim_pos;
6707 break;
6708 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6709 name = "__builtin_goacc_parlevel_size";
6710 fallback_retval = const1_rtx;
6711 gen_fn = targetm.gen_oacc_dim_size;
6712 break;
6713 default:
6714 gcc_unreachable ();
6715 }
6716
6717 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6718 {
6719 error ("%qs only supported in OpenACC code", name);
6720 return const0_rtx;
6721 }
6722
6723 tree arg = CALL_EXPR_ARG (exp, 0);
6724 if (TREE_CODE (arg) != INTEGER_CST)
6725 {
6726 error ("non-constant argument 0 to %qs", name);
6727 return const0_rtx;
6728 }
6729
6730 int dim = TREE_INT_CST_LOW (arg);
6731 switch (dim)
6732 {
6733 case GOMP_DIM_GANG:
6734 case GOMP_DIM_WORKER:
6735 case GOMP_DIM_VECTOR:
6736 break;
6737 default:
6738 error ("illegal argument 0 to %qs", name);
6739 return const0_rtx;
6740 }
6741
6742 if (ignore)
6743 return target;
6744
39bc9f83
TV
6745 if (target == NULL_RTX)
6746 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6747
1f62d637
TV
6748 if (!targetm.have_oacc_dim_size ())
6749 {
6750 emit_move_insn (target, fallback_retval);
6751 return target;
6752 }
6753
6754 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6755 emit_insn (gen_fn (reg, GEN_INT (dim)));
6756 if (reg != target)
6757 emit_move_insn (target, reg);
6758
6759 return target;
6760}
41dbbb37 6761
10a0e2a9 6762/* Expand a string compare operation using a sequence of char comparison
b2272b13
QZ
6763 to get rid of the calling overhead, with result going to TARGET if
6764 that's convenient.
6765
6766 VAR_STR is the variable string source;
6767 CONST_STR is the constant string source;
6768 LENGTH is the number of chars to compare;
6769 CONST_STR_N indicates which source string is the constant string;
6770 IS_MEMCMP indicates whether it's a memcmp or strcmp.
10a0e2a9 6771
b2272b13
QZ
6772 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6773
523a59ff
QZ
6774 target = (int) (unsigned char) var_str[0]
6775 - (int) (unsigned char) const_str[0];
b2272b13
QZ
6776 if (target != 0)
6777 goto ne_label;
6778 ...
523a59ff
QZ
6779 target = (int) (unsigned char) var_str[length - 2]
6780 - (int) (unsigned char) const_str[length - 2];
b2272b13
QZ
6781 if (target != 0)
6782 goto ne_label;
523a59ff
QZ
6783 target = (int) (unsigned char) var_str[length - 1]
6784 - (int) (unsigned char) const_str[length - 1];
b2272b13
QZ
6785 ne_label:
6786 */
6787
6788static rtx
10a0e2a9 6789inline_string_cmp (rtx target, tree var_str, const char *const_str,
b2272b13 6790 unsigned HOST_WIDE_INT length,
523a59ff 6791 int const_str_n, machine_mode mode)
b2272b13
QZ
6792{
6793 HOST_WIDE_INT offset = 0;
10a0e2a9 6794 rtx var_rtx_array
b2272b13
QZ
6795 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6796 rtx var_rtx = NULL_RTX;
10a0e2a9
JJ
6797 rtx const_rtx = NULL_RTX;
6798 rtx result = target ? target : gen_reg_rtx (mode);
6799 rtx_code_label *ne_label = gen_label_rtx ();
523a59ff 6800 tree unit_type_node = unsigned_char_type_node;
10a0e2a9
JJ
6801 scalar_int_mode unit_mode
6802 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
b2272b13
QZ
6803
6804 start_sequence ();
6805
6806 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6807 {
10a0e2a9 6808 var_rtx
b2272b13 6809 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
10a0e2a9 6810 const_rtx = c_readstr (const_str + offset, unit_mode);
b2272b13
QZ
6811 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6812 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
10a0e2a9 6813
523a59ff
QZ
6814 op0 = convert_modes (mode, unit_mode, op0, 1);
6815 op1 = convert_modes (mode, unit_mode, op1, 1);
10a0e2a9 6816 result = expand_simple_binop (mode, MINUS, op0, op1,
523a59ff 6817 result, 1, OPTAB_WIDEN);
10a0e2a9
JJ
6818 if (i < length - 1)
6819 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6820 mode, true, ne_label);
6821 offset += GET_MODE_SIZE (unit_mode);
b2272b13
QZ
6822 }
6823
6824 emit_label (ne_label);
6825 rtx_insn *insns = get_insns ();
6826 end_sequence ();
6827 emit_insn (insns);
6828
6829 return result;
6830}
6831
10a0e2a9 6832/* Inline expansion a call to str(n)cmp, with result going to
b2272b13
QZ
6833 TARGET if that's convenient.
6834 If the call is not been inlined, return NULL_RTX. */
6835static rtx
523a59ff 6836inline_expand_builtin_string_cmp (tree exp, rtx target)
b2272b13
QZ
6837{
6838 tree fndecl = get_callee_fndecl (exp);
6839 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6840 unsigned HOST_WIDE_INT length = 0;
6841 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6842
6843 gcc_checking_assert (fcode == BUILT_IN_STRCMP
10a0e2a9 6844 || fcode == BUILT_IN_STRNCMP
b2272b13
QZ
6845 || fcode == BUILT_IN_MEMCMP);
6846
523a59ff
QZ
6847 /* On a target where the type of the call (int) has same or narrower presicion
6848 than unsigned char, give up the inlining expansion. */
6849 if (TYPE_PRECISION (unsigned_char_type_node)
6850 >= TYPE_PRECISION (TREE_TYPE (exp)))
6851 return NULL_RTX;
6852
b2272b13
QZ
6853 tree arg1 = CALL_EXPR_ARG (exp, 0);
6854 tree arg2 = CALL_EXPR_ARG (exp, 1);
6855 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6856
6857 unsigned HOST_WIDE_INT len1 = 0;
6858 unsigned HOST_WIDE_INT len2 = 0;
6859 unsigned HOST_WIDE_INT len3 = 0;
6860
6861 const char *src_str1 = c_getstr (arg1, &len1);
6862 const char *src_str2 = c_getstr (arg2, &len2);
10a0e2a9 6863
b2272b13
QZ
6864 /* If neither strings is constant string, the call is not qualify. */
6865 if (!src_str1 && !src_str2)
6866 return NULL_RTX;
6867
6868 /* For strncmp, if the length is not a const, not qualify. */
6869 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6870 return NULL_RTX;
6871
6872 int const_str_n = 0;
6873 if (!len1)
6874 const_str_n = 2;
6875 else if (!len2)
6876 const_str_n = 1;
6877 else if (len2 > len1)
6878 const_str_n = 1;
6879 else
6880 const_str_n = 2;
6881
6882 gcc_checking_assert (const_str_n > 0);
6883 length = (const_str_n == 1) ? len1 : len2;
6884
6885 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6886 length = len3;
6887
10a0e2a9 6888 /* If the length of the comparision is larger than the threshold,
b2272b13 6889 do nothing. */
10a0e2a9 6890 if (length > (unsigned HOST_WIDE_INT)
b2272b13
QZ
6891 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6892 return NULL_RTX;
6893
6894 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6895
6896 /* Now, start inline expansion the call. */
10a0e2a9 6897 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
b2272b13 6898 (const_str_n == 1) ? src_str1 : src_str2, length,
523a59ff 6899 const_str_n, mode);
b2272b13
QZ
6900}
6901
28f4ec01
BS
6902/* Expand an expression EXP that calls a built-in function,
6903 with result going to TARGET if that's convenient
6904 (and in mode MODE if that's convenient).
6905 SUBTARGET may be used as the target for computing one of EXP's operands.
6906 IGNORE is nonzero if the value is to be ignored. */
6907
6908rtx
ef4bddc2 6909expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
4682ae04 6910 int ignore)
28f4ec01 6911{
2f503025 6912 tree fndecl = get_callee_fndecl (exp);
28f4ec01 6913 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
ef4bddc2 6914 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9e3920e9 6915 int flags;
28f4ec01 6916
d51151b2
JJ
6917 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6918 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6919
bdea98ca
MO
6920 /* When ASan is enabled, we don't want to expand some memory/string
6921 builtins and rely on libsanitizer's hooks. This allows us to avoid
6922 redundant checks and be sure, that possible overflow will be detected
6923 by ASan. */
6924
6925 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6926 return expand_call (exp, target, ignore);
6927
28f4ec01
BS
6928 /* When not optimizing, generate calls to library functions for a certain
6929 set of builtins. */
d25225de 6930 if (!optimize
48ae6c13 6931 && !called_as_built_in (fndecl)
63bf9a90
JH
6932 && fcode != BUILT_IN_FORK
6933 && fcode != BUILT_IN_EXECL
6934 && fcode != BUILT_IN_EXECV
6935 && fcode != BUILT_IN_EXECLP
6936 && fcode != BUILT_IN_EXECLE
6937 && fcode != BUILT_IN_EXECVP
6938 && fcode != BUILT_IN_EXECVE
9e878cf1 6939 && !ALLOCA_FUNCTION_CODE_P (fcode)
31db0fe0 6940 && fcode != BUILT_IN_FREE)
d25225de 6941 return expand_call (exp, target, ignore);
28f4ec01 6942
0a45ec5c
RS
6943 /* The built-in function expanders test for target == const0_rtx
6944 to determine whether the function's result will be ignored. */
6945 if (ignore)
6946 target = const0_rtx;
6947
6948 /* If the result of a pure or const built-in function is ignored, and
6949 none of its arguments are volatile, we can avoid expanding the
6950 built-in call and just evaluate the arguments for side-effects. */
6951 if (target == const0_rtx
9e3920e9
JJ
6952 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6953 && !(flags & ECF_LOOPING_CONST_OR_PURE))
0a45ec5c
RS
6954 {
6955 bool volatilep = false;
6956 tree arg;
5039610b 6957 call_expr_arg_iterator iter;
0a45ec5c 6958
5039610b
SL
6959 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6960 if (TREE_THIS_VOLATILE (arg))
0a45ec5c
RS
6961 {
6962 volatilep = true;
6963 break;
6964 }
6965
6966 if (! volatilep)
6967 {
5039610b
SL
6968 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6969 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a45ec5c
RS
6970 return const0_rtx;
6971 }
6972 }
6973
28f4ec01
BS
6974 switch (fcode)
6975 {
ea6a6627 6976 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 6977 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
6978 case BUILT_IN_FABSD32:
6979 case BUILT_IN_FABSD64:
6980 case BUILT_IN_FABSD128:
5039610b 6981 target = expand_builtin_fabs (exp, target, subtarget);
075ec276 6982 if (target)
c22cacf3 6983 return target;
075ec276
RS
6984 break;
6985
ea6a6627 6986 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6dc198e3 6987 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5039610b 6988 target = expand_builtin_copysign (exp, target, subtarget);
046625fa
RH
6989 if (target)
6990 return target;
6991 break;
6992
5906d013
EC
6993 /* Just do a normal library call if we were unable to fold
6994 the values. */
ea6a6627 6995 CASE_FLT_FN (BUILT_IN_CABS):
075ec276 6996 break;
28f4ec01 6997
1b1562a5 6998 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 6999 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
7000 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7001 if (target)
7002 return target;
7003 break;
7004
eaee4464
UB
7005 CASE_FLT_FN (BUILT_IN_ILOGB):
7006 if (! flag_unsafe_math_optimizations)
7007 break;
903c723b
TC
7008 gcc_fallthrough ();
7009 CASE_FLT_FN (BUILT_IN_ISINF):
7010 CASE_FLT_FN (BUILT_IN_FINITE):
7011 case BUILT_IN_ISFINITE:
7012 case BUILT_IN_ISNORMAL:
4359dc2a 7013 target = expand_builtin_interclass_mathfn (exp, target);
eaee4464
UB
7014 if (target)
7015 return target;
7016 break;
7017
6c32ee74 7018 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
7019 CASE_FLT_FN (BUILT_IN_LCEIL):
7020 CASE_FLT_FN (BUILT_IN_LLCEIL):
7021 CASE_FLT_FN (BUILT_IN_LFLOOR):
6c32ee74 7022 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627 7023 CASE_FLT_FN (BUILT_IN_LLFLOOR):
1856c8dc 7024 target = expand_builtin_int_roundingfn (exp, target);
d8b42d06
UB
7025 if (target)
7026 return target;
7027 break;
7028
6c32ee74 7029 CASE_FLT_FN (BUILT_IN_IRINT):
0bfa1541
RG
7030 CASE_FLT_FN (BUILT_IN_LRINT):
7031 CASE_FLT_FN (BUILT_IN_LLRINT):
6c32ee74 7032 CASE_FLT_FN (BUILT_IN_IROUND):
4d81bf84
RG
7033 CASE_FLT_FN (BUILT_IN_LROUND):
7034 CASE_FLT_FN (BUILT_IN_LLROUND):
1856c8dc 7035 target = expand_builtin_int_roundingfn_2 (exp, target);
0bfa1541
RG
7036 if (target)
7037 return target;
7038 break;
7039
ea6a6627 7040 CASE_FLT_FN (BUILT_IN_POWI):
4359dc2a 7041 target = expand_builtin_powi (exp, target);
17684d46
RG
7042 if (target)
7043 return target;
7044 break;
7045
75c7c595 7046 CASE_FLT_FN (BUILT_IN_CEXPI):
4359dc2a 7047 target = expand_builtin_cexpi (exp, target);
75c7c595
RG
7048 gcc_assert (target);
7049 return target;
7050
ea6a6627
VR
7051 CASE_FLT_FN (BUILT_IN_SIN):
7052 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
7053 if (! flag_unsafe_math_optimizations)
7054 break;
7055 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7056 if (target)
7057 return target;
7058 break;
7059
403e54f0
RG
7060 CASE_FLT_FN (BUILT_IN_SINCOS):
7061 if (! flag_unsafe_math_optimizations)
7062 break;
7063 target = expand_builtin_sincos (exp);
7064 if (target)
7065 return target;
7066 break;
7067
28f4ec01
BS
7068 case BUILT_IN_APPLY_ARGS:
7069 return expand_builtin_apply_args ();
7070
7071 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7072 FUNCTION with a copy of the parameters described by
7073 ARGUMENTS, and ARGSIZE. It returns a block of memory
7074 allocated on the stack into which is stored all the registers
7075 that might possibly be used for returning the result of a
7076 function. ARGUMENTS is the value returned by
7077 __builtin_apply_args. ARGSIZE is the number of bytes of
7078 arguments that must be copied. ??? How should this value be
7079 computed? We'll also need a safe worst case value for varargs
7080 functions. */
7081 case BUILT_IN_APPLY:
5039610b 7082 if (!validate_arglist (exp, POINTER_TYPE,
019fa094 7083 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5039610b 7084 && !validate_arglist (exp, REFERENCE_TYPE,
019fa094 7085 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01
BS
7086 return const0_rtx;
7087 else
7088 {
28f4ec01
BS
7089 rtx ops[3];
7090
5039610b
SL
7091 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7092 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7093 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
28f4ec01
BS
7094
7095 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7096 }
7097
7098 /* __builtin_return (RESULT) causes the function to return the
7099 value described by RESULT. RESULT is address of the block of
7100 memory returned by __builtin_apply. */
7101 case BUILT_IN_RETURN:
5039610b
SL
7102 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7103 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
28f4ec01
BS
7104 return const0_rtx;
7105
7106 case BUILT_IN_SAVEREGS:
d3707adb 7107 return expand_builtin_saveregs ();
28f4ec01 7108
6ef5231b
JJ
7109 case BUILT_IN_VA_ARG_PACK:
7110 /* All valid uses of __builtin_va_arg_pack () are removed during
7111 inlining. */
c94ed7a1 7112 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6ef5231b
JJ
7113 return const0_rtx;
7114
ab0e176c
JJ
7115 case BUILT_IN_VA_ARG_PACK_LEN:
7116 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7117 inlining. */
c94ed7a1 7118 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
ab0e176c
JJ
7119 return const0_rtx;
7120
28f4ec01
BS
7121 /* Return the address of the first anonymous stack arg. */
7122 case BUILT_IN_NEXT_ARG:
5039610b 7123 if (fold_builtin_next_arg (exp, false))
c22cacf3 7124 return const0_rtx;
8870e212 7125 return expand_builtin_next_arg ();
28f4ec01 7126
677feb77
DD
7127 case BUILT_IN_CLEAR_CACHE:
7128 target = expand_builtin___clear_cache (exp);
7129 if (target)
7130 return target;
7131 break;
7132
28f4ec01 7133 case BUILT_IN_CLASSIFY_TYPE:
5039610b 7134 return expand_builtin_classify_type (exp);
28f4ec01
BS
7135
7136 case BUILT_IN_CONSTANT_P:
6de9cd9a 7137 return const0_rtx;
28f4ec01
BS
7138
7139 case BUILT_IN_FRAME_ADDRESS:
7140 case BUILT_IN_RETURN_ADDRESS:
5039610b 7141 return expand_builtin_frame_address (fndecl, exp);
28f4ec01
BS
7142
7143 /* Returns the address of the area where the structure is returned.
7144 0 otherwise. */
7145 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5039610b 7146 if (call_expr_nargs (exp) != 0
ca7fd9cd 7147 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3c0cb5de 7148 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
ca7fd9cd 7149 return const0_rtx;
28f4ec01 7150 else
ca7fd9cd 7151 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
28f4ec01 7152
9e878cf1 7153 CASE_BUILT_IN_ALLOCA:
b7e52782 7154 target = expand_builtin_alloca (exp);
28f4ec01
BS
7155 if (target)
7156 return target;
7157 break;
7158
e3174bdf
MO
7159 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7160 return expand_asan_emit_allocas_unpoison (exp);
7161
6de9cd9a
DN
7162 case BUILT_IN_STACK_SAVE:
7163 return expand_stack_save ();
7164
7165 case BUILT_IN_STACK_RESTORE:
5039610b 7166 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6de9cd9a
DN
7167 return const0_rtx;
7168
ac868f29 7169 case BUILT_IN_BSWAP16:
167fa32c
EC
7170 case BUILT_IN_BSWAP32:
7171 case BUILT_IN_BSWAP64:
ac868f29 7172 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
167fa32c
EC
7173 if (target)
7174 return target;
7175 break;
7176
ea6a6627 7177 CASE_INT_FN (BUILT_IN_FFS):
5039610b 7178 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7179 subtarget, ffs_optab);
2928cd7a
RH
7180 if (target)
7181 return target;
7182 break;
7183
ea6a6627 7184 CASE_INT_FN (BUILT_IN_CLZ):
5039610b 7185 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7186 subtarget, clz_optab);
2928cd7a
RH
7187 if (target)
7188 return target;
7189 break;
7190
ea6a6627 7191 CASE_INT_FN (BUILT_IN_CTZ):
5039610b 7192 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7193 subtarget, ctz_optab);
2928cd7a
RH
7194 if (target)
7195 return target;
7196 break;
7197
3801c801 7198 CASE_INT_FN (BUILT_IN_CLRSB):
3801c801
BS
7199 target = expand_builtin_unop (target_mode, exp, target,
7200 subtarget, clrsb_optab);
7201 if (target)
7202 return target;
7203 break;
7204
ea6a6627 7205 CASE_INT_FN (BUILT_IN_POPCOUNT):
5039610b 7206 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7207 subtarget, popcount_optab);
2928cd7a
RH
7208 if (target)
7209 return target;
7210 break;
7211
ea6a6627 7212 CASE_INT_FN (BUILT_IN_PARITY):
5039610b 7213 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7214 subtarget, parity_optab);
28f4ec01
BS
7215 if (target)
7216 return target;
7217 break;
7218
7219 case BUILT_IN_STRLEN:
5039610b 7220 target = expand_builtin_strlen (exp, target, target_mode);
28f4ec01
BS
7221 if (target)
7222 return target;
7223 break;
7224
781ff3d8
MS
7225 case BUILT_IN_STRNLEN:
7226 target = expand_builtin_strnlen (exp, target, target_mode);
7227 if (target)
7228 return target;
7229 break;
7230
ee92e7ba
MS
7231 case BUILT_IN_STRCAT:
7232 target = expand_builtin_strcat (exp, target);
7233 if (target)
7234 return target;
7235 break;
7236
28f4ec01 7237 case BUILT_IN_STRCPY:
44e10129 7238 target = expand_builtin_strcpy (exp, target);
28f4ec01
BS
7239 if (target)
7240 return target;
7241 break;
8d51ecf8 7242
ee92e7ba
MS
7243 case BUILT_IN_STRNCAT:
7244 target = expand_builtin_strncat (exp, target);
7245 if (target)
7246 return target;
7247 break;
7248
da9e9f08 7249 case BUILT_IN_STRNCPY:
44e10129 7250 target = expand_builtin_strncpy (exp, target);
da9e9f08
KG
7251 if (target)
7252 return target;
7253 break;
8d51ecf8 7254
9cb65f92 7255 case BUILT_IN_STPCPY:
609ae0e2 7256 target = expand_builtin_stpcpy (exp, target, mode);
9cb65f92
KG
7257 if (target)
7258 return target;
7259 break;
7260
e50d56a5
MS
7261 case BUILT_IN_STPNCPY:
7262 target = expand_builtin_stpncpy (exp, target);
7263 if (target)
7264 return target;
7265 break;
7266
d9c5a8b9
MS
7267 case BUILT_IN_MEMCHR:
7268 target = expand_builtin_memchr (exp, target);
7269 if (target)
7270 return target;
7271 break;
7272
28f4ec01 7273 case BUILT_IN_MEMCPY:
44e10129 7274 target = expand_builtin_memcpy (exp, target);
9cb65f92
KG
7275 if (target)
7276 return target;
7277 break;
7278
e50d56a5
MS
7279 case BUILT_IN_MEMMOVE:
7280 target = expand_builtin_memmove (exp, target);
7281 if (target)
7282 return target;
7283 break;
7284
9cb65f92 7285 case BUILT_IN_MEMPCPY:
671a00ee 7286 target = expand_builtin_mempcpy (exp, target);
28f4ec01
BS
7287 if (target)
7288 return target;
7289 break;
7290
7291 case BUILT_IN_MEMSET:
5039610b 7292 target = expand_builtin_memset (exp, target, mode);
28f4ec01
BS
7293 if (target)
7294 return target;
7295 break;
7296
e3a709be 7297 case BUILT_IN_BZERO:
8148fe65 7298 target = expand_builtin_bzero (exp);
e3a709be
KG
7299 if (target)
7300 return target;
7301 break;
7302
10a0e2a9 7303 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8b0b334a
QZ
7304 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7305 when changing it to a strcmp call. */
7306 case BUILT_IN_STRCMP_EQ:
7307 target = expand_builtin_memcmp (exp, target, true);
7308 if (target)
7309 return target;
7310
7311 /* Change this call back to a BUILT_IN_STRCMP. */
10a0e2a9 7312 TREE_OPERAND (exp, 1)
8b0b334a
QZ
7313 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7314
7315 /* Delete the last parameter. */
7316 unsigned int i;
7317 vec<tree, va_gc> *arg_vec;
7318 vec_alloc (arg_vec, 2);
7319 for (i = 0; i < 2; i++)
7320 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7321 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7322 /* FALLTHROUGH */
7323
28f4ec01 7324 case BUILT_IN_STRCMP:
44e10129 7325 target = expand_builtin_strcmp (exp, target);
28f4ec01
BS
7326 if (target)
7327 return target;
7328 break;
7329
8b0b334a
QZ
7330 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7331 back to a BUILT_IN_STRNCMP. */
7332 case BUILT_IN_STRNCMP_EQ:
7333 target = expand_builtin_memcmp (exp, target, true);
7334 if (target)
7335 return target;
7336
7337 /* Change it back to a BUILT_IN_STRNCMP. */
10a0e2a9 7338 TREE_OPERAND (exp, 1)
8b0b334a
QZ
7339 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7340 /* FALLTHROUGH */
7341
da9e9f08
KG
7342 case BUILT_IN_STRNCMP:
7343 target = expand_builtin_strncmp (exp, target, mode);
7344 if (target)
7345 return target;
7346 break;
7347
4b2a62db 7348 case BUILT_IN_BCMP:
28f4ec01 7349 case BUILT_IN_MEMCMP:
36b85e43
BS
7350 case BUILT_IN_MEMCMP_EQ:
7351 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
28f4ec01
BS
7352 if (target)
7353 return target;
36b85e43
BS
7354 if (fcode == BUILT_IN_MEMCMP_EQ)
7355 {
7356 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7357 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7358 }
28f4ec01 7359 break;
28f4ec01
BS
7360
7361 case BUILT_IN_SETJMP:
903c723b 7362 /* This should have been lowered to the builtins below. */
4f6c2131
EB
7363 gcc_unreachable ();
7364
7365 case BUILT_IN_SETJMP_SETUP:
7366 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7367 and the receiver label. */
5039610b 7368 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4f6c2131 7369 {
5039610b 7370 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4f6c2131 7371 VOIDmode, EXPAND_NORMAL);
5039610b 7372 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
e67d1102 7373 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
7374
7375 /* This is copied from the handling of non-local gotos. */
7376 expand_builtin_setjmp_setup (buf_addr, label_r);
7377 nonlocal_goto_handler_labels
b5241a5a 7378 = gen_rtx_INSN_LIST (VOIDmode, label_r,
4f6c2131
EB
7379 nonlocal_goto_handler_labels);
7380 /* ??? Do not let expand_label treat us as such since we would
7381 not want to be both on the list of non-local labels and on
7382 the list of forced labels. */
7383 FORCED_LABEL (label) = 0;
7384 return const0_rtx;
7385 }
7386 break;
7387
4f6c2131
EB
7388 case BUILT_IN_SETJMP_RECEIVER:
7389 /* __builtin_setjmp_receiver is passed the receiver label. */
5039610b 7390 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4f6c2131 7391 {
5039610b 7392 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
e67d1102 7393 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
7394
7395 expand_builtin_setjmp_receiver (label_r);
7396 return const0_rtx;
7397 }
250d07b6 7398 break;
28f4ec01
BS
7399
7400 /* __builtin_longjmp is passed a pointer to an array of five words.
7401 It's similar to the C library longjmp function but works with
7402 __builtin_setjmp above. */
7403 case BUILT_IN_LONGJMP:
5039610b 7404 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01 7405 {
5039610b 7406 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
84217346 7407 VOIDmode, EXPAND_NORMAL);
5039610b 7408 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
28f4ec01
BS
7409
7410 if (value != const1_rtx)
7411 {
9e637a26 7412 error ("%<__builtin_longjmp%> second argument must be 1");
28f4ec01
BS
7413 return const0_rtx;
7414 }
7415
7416 expand_builtin_longjmp (buf_addr, value);
7417 return const0_rtx;
7418 }
4f6c2131 7419 break;
28f4ec01 7420
6de9cd9a 7421 case BUILT_IN_NONLOCAL_GOTO:
5039610b 7422 target = expand_builtin_nonlocal_goto (exp);
6de9cd9a
DN
7423 if (target)
7424 return target;
7425 break;
7426
2b92e7f5
RK
7427 /* This updates the setjmp buffer that is its argument with the value
7428 of the current stack pointer. */
7429 case BUILT_IN_UPDATE_SETJMP_BUF:
5039610b 7430 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2b92e7f5
RK
7431 {
7432 rtx buf_addr
5039610b 7433 = expand_normal (CALL_EXPR_ARG (exp, 0));
2b92e7f5
RK
7434
7435 expand_builtin_update_setjmp_buf (buf_addr);
7436 return const0_rtx;
7437 }
7438 break;
7439
28f4ec01 7440 case BUILT_IN_TRAP:
9602f5a0 7441 expand_builtin_trap ();
28f4ec01
BS
7442 return const0_rtx;
7443
468059bc
DD
7444 case BUILT_IN_UNREACHABLE:
7445 expand_builtin_unreachable ();
7446 return const0_rtx;
7447
ea6a6627 7448 CASE_FLT_FN (BUILT_IN_SIGNBIT):
44aea9ac
JJ
7449 case BUILT_IN_SIGNBITD32:
7450 case BUILT_IN_SIGNBITD64:
7451 case BUILT_IN_SIGNBITD128:
ef79730c
RS
7452 target = expand_builtin_signbit (exp, target);
7453 if (target)
7454 return target;
7455 break;
7456
28f4ec01
BS
7457 /* Various hooks for the DWARF 2 __throw routine. */
7458 case BUILT_IN_UNWIND_INIT:
7459 expand_builtin_unwind_init ();
7460 return const0_rtx;
7461 case BUILT_IN_DWARF_CFA:
7462 return virtual_cfa_rtx;
7463#ifdef DWARF2_UNWIND_INFO
9c80ff25
RH
7464 case BUILT_IN_DWARF_SP_COLUMN:
7465 return expand_builtin_dwarf_sp_column ();
d9d5c9de 7466 case BUILT_IN_INIT_DWARF_REG_SIZES:
5039610b 7467 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
d9d5c9de 7468 return const0_rtx;
28f4ec01
BS
7469#endif
7470 case BUILT_IN_FROB_RETURN_ADDR:
5039610b 7471 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 7472 case BUILT_IN_EXTRACT_RETURN_ADDR:
5039610b 7473 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 7474 case BUILT_IN_EH_RETURN:
5039610b
SL
7475 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7476 CALL_EXPR_ARG (exp, 1));
28f4ec01 7477 return const0_rtx;
52a11cbf 7478 case BUILT_IN_EH_RETURN_DATA_REGNO:
5039610b 7479 return expand_builtin_eh_return_data_regno (exp);
c76362b4 7480 case BUILT_IN_EXTEND_POINTER:
5039610b 7481 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
1d65f45c
RH
7482 case BUILT_IN_EH_POINTER:
7483 return expand_builtin_eh_pointer (exp);
7484 case BUILT_IN_EH_FILTER:
7485 return expand_builtin_eh_filter (exp);
7486 case BUILT_IN_EH_COPY_VALUES:
7487 return expand_builtin_eh_copy_values (exp);
c76362b4 7488
6c535c69 7489 case BUILT_IN_VA_START:
5039610b 7490 return expand_builtin_va_start (exp);
d3707adb 7491 case BUILT_IN_VA_END:
5039610b 7492 return expand_builtin_va_end (exp);
d3707adb 7493 case BUILT_IN_VA_COPY:
5039610b 7494 return expand_builtin_va_copy (exp);
994a57cd 7495 case BUILT_IN_EXPECT:
5039610b 7496 return expand_builtin_expect (exp, target);
45d439ac
JJ
7497 case BUILT_IN_ASSUME_ALIGNED:
7498 return expand_builtin_assume_aligned (exp, target);
a9ccbb60 7499 case BUILT_IN_PREFETCH:
5039610b 7500 expand_builtin_prefetch (exp);
a9ccbb60
JJ
7501 return const0_rtx;
7502
6de9cd9a 7503 case BUILT_IN_INIT_TRAMPOLINE:
183dd130
ILT
7504 return expand_builtin_init_trampoline (exp, true);
7505 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7506 return expand_builtin_init_trampoline (exp, false);
6de9cd9a 7507 case BUILT_IN_ADJUST_TRAMPOLINE:
5039610b 7508 return expand_builtin_adjust_trampoline (exp);
6de9cd9a 7509
4c640e26
EB
7510 case BUILT_IN_INIT_DESCRIPTOR:
7511 return expand_builtin_init_descriptor (exp);
7512 case BUILT_IN_ADJUST_DESCRIPTOR:
7513 return expand_builtin_adjust_descriptor (exp);
7514
d1c38823
ZD
7515 case BUILT_IN_FORK:
7516 case BUILT_IN_EXECL:
7517 case BUILT_IN_EXECV:
7518 case BUILT_IN_EXECLP:
7519 case BUILT_IN_EXECLE:
7520 case BUILT_IN_EXECVP:
7521 case BUILT_IN_EXECVE:
5039610b 7522 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
d1c38823
ZD
7523 if (target)
7524 return target;
7525 break;
28f4ec01 7526
e0a8ecf2
AM
7527 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7528 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7529 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7530 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7531 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7532 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
86951993 7533 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
48ae6c13
RH
7534 if (target)
7535 return target;
7536 break;
7537
e0a8ecf2
AM
7538 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7539 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7540 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7541 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7542 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7543 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
86951993 7544 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
48ae6c13
RH
7545 if (target)
7546 return target;
7547 break;
7548
e0a8ecf2
AM
7549 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7550 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7551 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7552 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7553 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7554 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
86951993 7555 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
48ae6c13
RH
7556 if (target)
7557 return target;
7558 break;
7559
e0a8ecf2
AM
7560 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7561 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7562 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7563 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7564 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7565 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
86951993 7566 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
48ae6c13
RH
7567 if (target)
7568 return target;
7569 break;
7570
e0a8ecf2
AM
7571 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7572 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7573 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7574 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7575 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7576 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
86951993 7577 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
48ae6c13
RH
7578 if (target)
7579 return target;
7580 break;
7581
e0a8ecf2
AM
7582 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7583 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7584 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7585 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7586 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7587 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
86951993 7588 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
48ae6c13
RH
7589 if (target)
7590 return target;
7591 break;
7592
e0a8ecf2
AM
7593 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7594 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7595 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7596 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7597 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7598 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
86951993 7599 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
48ae6c13
RH
7600 if (target)
7601 return target;
7602 break;
7603
e0a8ecf2
AM
7604 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7605 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7606 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7607 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7608 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7609 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
86951993 7610 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
48ae6c13
RH
7611 if (target)
7612 return target;
7613 break;
7614
e0a8ecf2
AM
7615 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7616 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7617 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7618 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7619 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7620 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
86951993 7621 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
48ae6c13
RH
7622 if (target)
7623 return target;
7624 break;
7625
e0a8ecf2
AM
7626 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7627 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7628 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7629 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7630 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7631 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
86951993 7632 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
48ae6c13
RH
7633 if (target)
7634 return target;
7635 break;
7636
e0a8ecf2
AM
7637 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7638 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7639 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7640 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7641 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7642 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
86951993 7643 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
48ae6c13
RH
7644 if (target)
7645 return target;
7646 break;
7647
e0a8ecf2
AM
7648 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7649 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7650 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7651 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7652 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7653 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
86951993 7654 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
48ae6c13
RH
7655 if (target)
7656 return target;
7657 break;
7658
e0a8ecf2
AM
7659 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7660 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7661 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7662 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7663 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
5b5513d0
RH
7664 if (mode == VOIDmode)
7665 mode = TYPE_MODE (boolean_type_node);
48ae6c13
RH
7666 if (!target || !register_operand (target, mode))
7667 target = gen_reg_rtx (mode);
02ee605c 7668
e0a8ecf2
AM
7669 mode = get_builtin_sync_mode
7670 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
5039610b 7671 target = expand_builtin_compare_and_swap (mode, exp, true, target);
48ae6c13
RH
7672 if (target)
7673 return target;
7674 break;
7675
e0a8ecf2
AM
7676 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7677 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7678 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7679 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7680 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7681 mode = get_builtin_sync_mode
7682 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
5039610b 7683 target = expand_builtin_compare_and_swap (mode, exp, false, target);
48ae6c13
RH
7684 if (target)
7685 return target;
7686 break;
7687
e0a8ecf2
AM
7688 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7689 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7690 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7691 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7692 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7693 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7694 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
48ae6c13
RH
7695 if (target)
7696 return target;
7697 break;
7698
e0a8ecf2
AM
7699 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7700 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7701 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7702 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7703 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7704 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7705 expand_builtin_sync_lock_release (mode, exp);
48ae6c13
RH
7706 return const0_rtx;
7707
e0a8ecf2
AM
7708 case BUILT_IN_SYNC_SYNCHRONIZE:
7709 expand_builtin_sync_synchronize ();
48ae6c13
RH
7710 return const0_rtx;
7711
86951993
AM
7712 case BUILT_IN_ATOMIC_EXCHANGE_1:
7713 case BUILT_IN_ATOMIC_EXCHANGE_2:
7714 case BUILT_IN_ATOMIC_EXCHANGE_4:
7715 case BUILT_IN_ATOMIC_EXCHANGE_8:
7716 case BUILT_IN_ATOMIC_EXCHANGE_16:
7717 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7718 target = expand_builtin_atomic_exchange (mode, exp, target);
7719 if (target)
7720 return target;
7721 break;
7722
7723 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7724 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7725 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7726 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7727 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
e351ae85
AM
7728 {
7729 unsigned int nargs, z;
9771b263 7730 vec<tree, va_gc> *vec;
e351ae85
AM
7731
7732 mode =
7733 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7734 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7735 if (target)
7736 return target;
7737
7738 /* If this is turned into an external library call, the weak parameter
7739 must be dropped to match the expected parameter list. */
7740 nargs = call_expr_nargs (exp);
9771b263 7741 vec_alloc (vec, nargs - 1);
e351ae85 7742 for (z = 0; z < 3; z++)
9771b263 7743 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
7744 /* Skip the boolean weak parameter. */
7745 for (z = 4; z < 6; z++)
9771b263 7746 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
7747 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7748 break;
7749 }
86951993
AM
7750
7751 case BUILT_IN_ATOMIC_LOAD_1:
7752 case BUILT_IN_ATOMIC_LOAD_2:
7753 case BUILT_IN_ATOMIC_LOAD_4:
7754 case BUILT_IN_ATOMIC_LOAD_8:
7755 case BUILT_IN_ATOMIC_LOAD_16:
7756 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7757 target = expand_builtin_atomic_load (mode, exp, target);
7758 if (target)
7759 return target;
7760 break;
7761
7762 case BUILT_IN_ATOMIC_STORE_1:
7763 case BUILT_IN_ATOMIC_STORE_2:
7764 case BUILT_IN_ATOMIC_STORE_4:
7765 case BUILT_IN_ATOMIC_STORE_8:
7766 case BUILT_IN_ATOMIC_STORE_16:
7767 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7768 target = expand_builtin_atomic_store (mode, exp);
7769 if (target)
7770 return const0_rtx;
7771 break;
7772
7773 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7774 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7775 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7776 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7777 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7778 {
7779 enum built_in_function lib;
7780 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7781 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7782 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7783 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7784 ignore, lib);
7785 if (target)
7786 return target;
7787 break;
7788 }
7789 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7790 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7791 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7792 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7793 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7794 {
7795 enum built_in_function lib;
7796 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7797 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7798 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7799 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7800 ignore, lib);
7801 if (target)
7802 return target;
7803 break;
7804 }
7805 case BUILT_IN_ATOMIC_AND_FETCH_1:
7806 case BUILT_IN_ATOMIC_AND_FETCH_2:
7807 case BUILT_IN_ATOMIC_AND_FETCH_4:
7808 case BUILT_IN_ATOMIC_AND_FETCH_8:
7809 case BUILT_IN_ATOMIC_AND_FETCH_16:
7810 {
7811 enum built_in_function lib;
7812 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7813 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7814 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7815 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7816 ignore, lib);
7817 if (target)
7818 return target;
7819 break;
7820 }
7821 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7822 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7823 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7824 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7825 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7826 {
7827 enum built_in_function lib;
7828 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7829 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7830 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7831 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7832 ignore, lib);
7833 if (target)
7834 return target;
7835 break;
7836 }
7837 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7838 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7839 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7840 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7841 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7842 {
7843 enum built_in_function lib;
7844 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7845 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7846 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7847 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7848 ignore, lib);
7849 if (target)
7850 return target;
7851 break;
7852 }
7853 case BUILT_IN_ATOMIC_OR_FETCH_1:
7854 case BUILT_IN_ATOMIC_OR_FETCH_2:
7855 case BUILT_IN_ATOMIC_OR_FETCH_4:
7856 case BUILT_IN_ATOMIC_OR_FETCH_8:
7857 case BUILT_IN_ATOMIC_OR_FETCH_16:
7858 {
7859 enum built_in_function lib;
7860 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7861 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7862 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7863 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7864 ignore, lib);
7865 if (target)
7866 return target;
7867 break;
7868 }
7869 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7870 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7871 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7872 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7873 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7874 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7875 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7876 ignore, BUILT_IN_NONE);
7877 if (target)
7878 return target;
7879 break;
7880
7881 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7882 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7883 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7884 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7885 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7886 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7887 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7888 ignore, BUILT_IN_NONE);
7889 if (target)
7890 return target;
7891 break;
7892
7893 case BUILT_IN_ATOMIC_FETCH_AND_1:
7894 case BUILT_IN_ATOMIC_FETCH_AND_2:
7895 case BUILT_IN_ATOMIC_FETCH_AND_4:
7896 case BUILT_IN_ATOMIC_FETCH_AND_8:
7897 case BUILT_IN_ATOMIC_FETCH_AND_16:
7898 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7899 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7900 ignore, BUILT_IN_NONE);
7901 if (target)
7902 return target;
7903 break;
7904
7905 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7906 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7907 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7908 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7909 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7910 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7911 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7912 ignore, BUILT_IN_NONE);
7913 if (target)
7914 return target;
7915 break;
7916
7917 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7918 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7919 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7920 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7921 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7922 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7923 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7924 ignore, BUILT_IN_NONE);
7925 if (target)
7926 return target;
7927 break;
7928
7929 case BUILT_IN_ATOMIC_FETCH_OR_1:
7930 case BUILT_IN_ATOMIC_FETCH_OR_2:
7931 case BUILT_IN_ATOMIC_FETCH_OR_4:
7932 case BUILT_IN_ATOMIC_FETCH_OR_8:
7933 case BUILT_IN_ATOMIC_FETCH_OR_16:
7934 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7935 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7936 ignore, BUILT_IN_NONE);
7937 if (target)
7938 return target;
7939 break;
d660c35e
AM
7940
7941 case BUILT_IN_ATOMIC_TEST_AND_SET:
744accb2 7942 return expand_builtin_atomic_test_and_set (exp, target);
d660c35e
AM
7943
7944 case BUILT_IN_ATOMIC_CLEAR:
7945 return expand_builtin_atomic_clear (exp);
86951993
AM
7946
7947 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7948 return expand_builtin_atomic_always_lock_free (exp);
7949
7950 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7951 target = expand_builtin_atomic_is_lock_free (exp);
7952 if (target)
7953 return target;
7954 break;
7955
7956 case BUILT_IN_ATOMIC_THREAD_FENCE:
7957 expand_builtin_atomic_thread_fence (exp);
7958 return const0_rtx;
7959
7960 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7961 expand_builtin_atomic_signal_fence (exp);
7962 return const0_rtx;
7963
10a0d495
JJ
7964 case BUILT_IN_OBJECT_SIZE:
7965 return expand_builtin_object_size (exp);
7966
7967 case BUILT_IN_MEMCPY_CHK:
7968 case BUILT_IN_MEMPCPY_CHK:
7969 case BUILT_IN_MEMMOVE_CHK:
7970 case BUILT_IN_MEMSET_CHK:
7971 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7972 if (target)
7973 return target;
7974 break;
7975
7976 case BUILT_IN_STRCPY_CHK:
7977 case BUILT_IN_STPCPY_CHK:
7978 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 7979 case BUILT_IN_STPNCPY_CHK:
10a0d495 7980 case BUILT_IN_STRCAT_CHK:
1c2fc017 7981 case BUILT_IN_STRNCAT_CHK:
10a0d495
JJ
7982 case BUILT_IN_SNPRINTF_CHK:
7983 case BUILT_IN_VSNPRINTF_CHK:
7984 maybe_emit_chk_warning (exp, fcode);
7985 break;
7986
7987 case BUILT_IN_SPRINTF_CHK:
7988 case BUILT_IN_VSPRINTF_CHK:
7989 maybe_emit_sprintf_chk_warning (exp, fcode);
7990 break;
7991
f9555f40 7992 case BUILT_IN_FREE:
a3a704a4
MH
7993 if (warn_free_nonheap_object)
7994 maybe_emit_free_warning (exp);
f9555f40
JJ
7995 break;
7996
f959607b
CLT
7997 case BUILT_IN_THREAD_POINTER:
7998 return expand_builtin_thread_pointer (exp, target);
7999
8000 case BUILT_IN_SET_THREAD_POINTER:
8001 expand_builtin_set_thread_pointer (exp);
8002 return const0_rtx;
8003
41dbbb37 8004 case BUILT_IN_ACC_ON_DEVICE:
164453bb
NS
8005 /* Do library call, if we failed to expand the builtin when
8006 folding. */
41dbbb37
TS
8007 break;
8008
1f62d637
TV
8009 case BUILT_IN_GOACC_PARLEVEL_ID:
8010 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8011 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8012
e62f4abc 8013 default: /* just do library call, if unknown builtin */
84b8b0e0 8014 break;
28f4ec01
BS
8015 }
8016
8017 /* The switch statement above can drop through to cause the function
8018 to be called normally. */
8019 return expand_call (exp, target, ignore);
8020}
b0b3afb2 8021
4977bab6 8022/* Determine whether a tree node represents a call to a built-in
feda1845
RS
8023 function. If the tree T is a call to a built-in function with
8024 the right number of arguments of the appropriate types, return
8025 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8026 Otherwise the return value is END_BUILTINS. */
4682ae04 8027
4977bab6 8028enum built_in_function
fa233e34 8029builtin_mathfn_code (const_tree t)
4977bab6 8030{
fa233e34
KG
8031 const_tree fndecl, arg, parmlist;
8032 const_tree argtype, parmtype;
8033 const_call_expr_arg_iterator iter;
4977bab6 8034
5f92d109 8035 if (TREE_CODE (t) != CALL_EXPR)
4977bab6
ZW
8036 return END_BUILTINS;
8037
2f503025
JM
8038 fndecl = get_callee_fndecl (t);
8039 if (fndecl == NULL_TREE
feda1845 8040 || TREE_CODE (fndecl) != FUNCTION_DECL
4977bab6
ZW
8041 || ! DECL_BUILT_IN (fndecl)
8042 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8043 return END_BUILTINS;
8044
feda1845 8045 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
fa233e34 8046 init_const_call_expr_arg_iterator (t, &iter);
feda1845 8047 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
c0a47a61 8048 {
feda1845
RS
8049 /* If a function doesn't take a variable number of arguments,
8050 the last element in the list will have type `void'. */
8051 parmtype = TREE_VALUE (parmlist);
8052 if (VOID_TYPE_P (parmtype))
8053 {
fa233e34 8054 if (more_const_call_expr_args_p (&iter))
feda1845
RS
8055 return END_BUILTINS;
8056 return DECL_FUNCTION_CODE (fndecl);
8057 }
8058
fa233e34 8059 if (! more_const_call_expr_args_p (&iter))
c0a47a61 8060 return END_BUILTINS;
b8698a0f 8061
fa233e34 8062 arg = next_const_call_expr_arg (&iter);
5039610b 8063 argtype = TREE_TYPE (arg);
feda1845
RS
8064
8065 if (SCALAR_FLOAT_TYPE_P (parmtype))
8066 {
8067 if (! SCALAR_FLOAT_TYPE_P (argtype))
8068 return END_BUILTINS;
8069 }
8070 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8071 {
8072 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8073 return END_BUILTINS;
8074 }
8075 else if (POINTER_TYPE_P (parmtype))
8076 {
8077 if (! POINTER_TYPE_P (argtype))
8078 return END_BUILTINS;
8079 }
8080 else if (INTEGRAL_TYPE_P (parmtype))
8081 {
8082 if (! INTEGRAL_TYPE_P (argtype))
8083 return END_BUILTINS;
8084 }
8085 else
c0a47a61 8086 return END_BUILTINS;
c0a47a61
RS
8087 }
8088
feda1845 8089 /* Variable-length argument list. */
4977bab6
ZW
8090 return DECL_FUNCTION_CODE (fndecl);
8091}
8092
5039610b
SL
8093/* Fold a call to __builtin_constant_p, if we know its argument ARG will
8094 evaluate to a constant. */
b0b3afb2
BS
8095
8096static tree
5039610b 8097fold_builtin_constant_p (tree arg)
b0b3afb2 8098{
b0b3afb2
BS
8099 /* We return 1 for a numeric type that's known to be a constant
8100 value at compile-time or for an aggregate type that's a
8101 literal constant. */
5039610b 8102 STRIP_NOPS (arg);
b0b3afb2
BS
8103
8104 /* If we know this is a constant, emit the constant of one. */
5039610b
SL
8105 if (CONSTANT_CLASS_P (arg)
8106 || (TREE_CODE (arg) == CONSTRUCTOR
8107 && TREE_CONSTANT (arg)))
b0b3afb2 8108 return integer_one_node;
5039610b 8109 if (TREE_CODE (arg) == ADDR_EXPR)
fb664a2c 8110 {
5039610b 8111 tree op = TREE_OPERAND (arg, 0);
fb664a2c
RG
8112 if (TREE_CODE (op) == STRING_CST
8113 || (TREE_CODE (op) == ARRAY_REF
8114 && integer_zerop (TREE_OPERAND (op, 1))
8115 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8116 return integer_one_node;
8117 }
b0b3afb2 8118
0dcd3840
RH
8119 /* If this expression has side effects, show we don't know it to be a
8120 constant. Likewise if it's a pointer or aggregate type since in
8121 those case we only want literals, since those are only optimized
13104975
ZW
8122 when generating RTL, not later.
8123 And finally, if we are compiling an initializer, not code, we
8124 need to return a definite result now; there's not going to be any
8125 more optimization done. */
5039610b
SL
8126 if (TREE_SIDE_EFFECTS (arg)
8127 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8128 || POINTER_TYPE_P (TREE_TYPE (arg))
63b48197 8129 || cfun == 0
4e7d7b3d
JJ
8130 || folding_initializer
8131 || force_folding_builtin_constant_p)
b0b3afb2
BS
8132 return integer_zero_node;
8133
5039610b 8134 return NULL_TREE;
b0b3afb2
BS
8135}
8136
419ce103
AN
8137/* Create builtin_expect with PRED and EXPECTED as its arguments and
8138 return it as a truthvalue. */
6de9cd9a
DN
8139
8140static tree
ed9c79e1
JJ
8141build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8142 tree predictor)
6de9cd9a 8143{
419ce103 8144 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6de9cd9a 8145
e79983f4 8146 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
419ce103
AN
8147 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8148 ret_type = TREE_TYPE (TREE_TYPE (fn));
8149 pred_type = TREE_VALUE (arg_types);
8150 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8151
db3927fb
AH
8152 pred = fold_convert_loc (loc, pred_type, pred);
8153 expected = fold_convert_loc (loc, expected_type, expected);
ed9c79e1
JJ
8154 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8155 predictor);
419ce103
AN
8156
8157 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8158 build_int_cst (ret_type, 0));
8159}
8160
8161/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
8162 NULL_TREE if no simplification is possible. */
8163
ed9c79e1
JJ
8164tree
8165fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
419ce103 8166{
be31603a 8167 tree inner, fndecl, inner_arg0;
419ce103
AN
8168 enum tree_code code;
8169
be31603a
KT
8170 /* Distribute the expected value over short-circuiting operators.
8171 See through the cast from truthvalue_type_node to long. */
8172 inner_arg0 = arg0;
625a9766 8173 while (CONVERT_EXPR_P (inner_arg0)
be31603a
KT
8174 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8175 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8176 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8177
419ce103
AN
8178 /* If this is a builtin_expect within a builtin_expect keep the
8179 inner one. See through a comparison against a constant. It
8180 might have been added to create a thruthvalue. */
be31603a
KT
8181 inner = inner_arg0;
8182
419ce103
AN
8183 if (COMPARISON_CLASS_P (inner)
8184 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8185 inner = TREE_OPERAND (inner, 0);
8186
8187 if (TREE_CODE (inner) == CALL_EXPR
8188 && (fndecl = get_callee_fndecl (inner))
8189 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8190 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
8191 return arg0;
8192
be31603a 8193 inner = inner_arg0;
419ce103
AN
8194 code = TREE_CODE (inner);
8195 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8196 {
8197 tree op0 = TREE_OPERAND (inner, 0);
8198 tree op1 = TREE_OPERAND (inner, 1);
0d2f7959 8199 arg1 = save_expr (arg1);
419ce103 8200
ed9c79e1
JJ
8201 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
8202 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
419ce103
AN
8203 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8204
db3927fb 8205 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
419ce103
AN
8206 }
8207
8208 /* If the argument isn't invariant then there's nothing else we can do. */
be31603a 8209 if (!TREE_CONSTANT (inner_arg0))
5039610b 8210 return NULL_TREE;
6de9cd9a 8211
419ce103
AN
8212 /* If we expect that a comparison against the argument will fold to
8213 a constant return the constant. In practice, this means a true
8214 constant or the address of a non-weak symbol. */
be31603a 8215 inner = inner_arg0;
6de9cd9a
DN
8216 STRIP_NOPS (inner);
8217 if (TREE_CODE (inner) == ADDR_EXPR)
8218 {
8219 do
8220 {
8221 inner = TREE_OPERAND (inner, 0);
8222 }
8223 while (TREE_CODE (inner) == COMPONENT_REF
8224 || TREE_CODE (inner) == ARRAY_REF);
8813a647 8225 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
5039610b 8226 return NULL_TREE;
6de9cd9a
DN
8227 }
8228
419ce103
AN
8229 /* Otherwise, ARG0 already has the proper type for the return value. */
8230 return arg0;
6de9cd9a
DN
8231}
8232
5039610b 8233/* Fold a call to __builtin_classify_type with argument ARG. */
5197bd50 8234
ad82abb8 8235static tree
5039610b 8236fold_builtin_classify_type (tree arg)
ad82abb8 8237{
5039610b 8238 if (arg == 0)
45a2c477 8239 return build_int_cst (integer_type_node, no_type_class);
ad82abb8 8240
45a2c477 8241 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
ad82abb8
ZW
8242}
8243
5039610b 8244/* Fold a call to __builtin_strlen with argument ARG. */
667bbbbb
EC
8245
8246static tree
ab996409 8247fold_builtin_strlen (location_t loc, tree type, tree arg)
667bbbbb 8248{
5039610b 8249 if (!validate_arg (arg, POINTER_TYPE))
667bbbbb
EC
8250 return NULL_TREE;
8251 else
8252 {
5039610b 8253 tree len = c_strlen (arg, 0);
667bbbbb
EC
8254
8255 if (len)
ab996409 8256 return fold_convert_loc (loc, type, len);
667bbbbb
EC
8257
8258 return NULL_TREE;
8259 }
8260}
8261
ab5e2615
RH
8262/* Fold a call to __builtin_inf or __builtin_huge_val. */
8263
8264static tree
db3927fb 8265fold_builtin_inf (location_t loc, tree type, int warn)
ab5e2615 8266{
efdc7e19
RH
8267 REAL_VALUE_TYPE real;
8268
6d84156b
JM
8269 /* __builtin_inff is intended to be usable to define INFINITY on all
8270 targets. If an infinity is not available, INFINITY expands "to a
8271 positive constant of type float that overflows at translation
8272 time", footnote "In this case, using INFINITY will violate the
8273 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8274 Thus we pedwarn to ensure this constraint violation is
8275 diagnosed. */
ab5e2615 8276 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
db3927fb 8277 pedwarn (loc, 0, "target format does not support infinity");
ab5e2615 8278
efdc7e19
RH
8279 real_inf (&real);
8280 return build_real (type, real);
ab5e2615
RH
8281}
8282
75c7c595
RG
8283/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8284 NULL_TREE if no simplification can be made. */
8285
8286static tree
db3927fb
AH
8287fold_builtin_sincos (location_t loc,
8288 tree arg0, tree arg1, tree arg2)
75c7c595 8289{
5039610b 8290 tree type;
5c1a2e63 8291 tree fndecl, call = NULL_TREE;
75c7c595 8292
5039610b
SL
8293 if (!validate_arg (arg0, REAL_TYPE)
8294 || !validate_arg (arg1, POINTER_TYPE)
8295 || !validate_arg (arg2, POINTER_TYPE))
75c7c595
RG
8296 return NULL_TREE;
8297
75c7c595 8298 type = TREE_TYPE (arg0);
75c7c595
RG
8299
8300 /* Calculate the result when the argument is a constant. */
b03ff92e 8301 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
5c1a2e63 8302 if (fn == END_BUILTINS)
75c7c595
RG
8303 return NULL_TREE;
8304
5c1a2e63
RS
8305 /* Canonicalize sincos to cexpi. */
8306 if (TREE_CODE (arg0) == REAL_CST)
8307 {
8308 tree complex_type = build_complex_type (type);
d7ebef06 8309 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
5c1a2e63
RS
8310 }
8311 if (!call)
8312 {
8313 if (!targetm.libc_has_function (function_c99_math_complex)
8314 || !builtin_decl_implicit_p (fn))
8315 return NULL_TREE;
8316 fndecl = builtin_decl_explicit (fn);
8317 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8318 call = builtin_save_expr (call);
8319 }
75c7c595 8320
1b17b994
RB
8321 tree ptype = build_pointer_type (type);
8322 arg1 = fold_convert (ptype, arg1);
8323 arg2 = fold_convert (ptype, arg2);
928c19bb 8324 return build2 (COMPOUND_EXPR, void_type_node,
75c7c595 8325 build2 (MODIFY_EXPR, void_type_node,
db3927fb 8326 build_fold_indirect_ref_loc (loc, arg1),
5c1a2e63 8327 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
75c7c595 8328 build2 (MODIFY_EXPR, void_type_node,
db3927fb 8329 build_fold_indirect_ref_loc (loc, arg2),
5c1a2e63 8330 fold_build1_loc (loc, REALPART_EXPR, type, call)));
75c7c595
RG
8331}
8332
5039610b
SL
8333/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8334 Return NULL_TREE if no simplification can be made. */
5bb650ec
RS
8335
8336static tree
db3927fb 8337fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
5bb650ec 8338{
5039610b
SL
8339 if (!validate_arg (arg1, POINTER_TYPE)
8340 || !validate_arg (arg2, POINTER_TYPE)
8341 || !validate_arg (len, INTEGER_TYPE))
8342 return NULL_TREE;
5bb650ec
RS
8343
8344 /* If the LEN parameter is zero, return zero. */
8345 if (integer_zerop (len))
db3927fb 8346 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
01847e9d 8347 arg1, arg2);
5bb650ec
RS
8348
8349 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8350 if (operand_equal_p (arg1, arg2, 0))
db3927fb 8351 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
01847e9d 8352
01847e9d
RS
8353 /* If len parameter is one, return an expression corresponding to
8354 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
ae7e9ddd 8355 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
01847e9d
RS
8356 {
8357 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
8358 tree cst_uchar_ptr_node
8359 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8360
db3927fb
AH
8361 tree ind1
8362 = fold_convert_loc (loc, integer_type_node,
8363 build1 (INDIRECT_REF, cst_uchar_node,
8364 fold_convert_loc (loc,
8365 cst_uchar_ptr_node,
01847e9d 8366 arg1)));
db3927fb
AH
8367 tree ind2
8368 = fold_convert_loc (loc, integer_type_node,
8369 build1 (INDIRECT_REF, cst_uchar_node,
8370 fold_convert_loc (loc,
8371 cst_uchar_ptr_node,
01847e9d 8372 arg2)));
db3927fb 8373 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
01847e9d 8374 }
5bb650ec 8375
5039610b 8376 return NULL_TREE;
5bb650ec
RS
8377}
8378
5039610b 8379/* Fold a call to builtin isascii with argument ARG. */
df0785d6
KG
8380
8381static tree
db3927fb 8382fold_builtin_isascii (location_t loc, tree arg)
df0785d6 8383{
5039610b
SL
8384 if (!validate_arg (arg, INTEGER_TYPE))
8385 return NULL_TREE;
df0785d6
KG
8386 else
8387 {
8388 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
6728ee79 8389 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
45a2c477 8390 build_int_cst (integer_type_node,
6728ee79 8391 ~ (unsigned HOST_WIDE_INT) 0x7f));
db3927fb 8392 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
45a2c477 8393 arg, integer_zero_node);
df0785d6
KG
8394 }
8395}
8396
5039610b 8397/* Fold a call to builtin toascii with argument ARG. */
df0785d6
KG
8398
8399static tree
db3927fb 8400fold_builtin_toascii (location_t loc, tree arg)
df0785d6 8401{
5039610b
SL
8402 if (!validate_arg (arg, INTEGER_TYPE))
8403 return NULL_TREE;
b8698a0f 8404
5039610b 8405 /* Transform toascii(c) -> (c & 0x7f). */
db3927fb 8406 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
45a2c477 8407 build_int_cst (integer_type_node, 0x7f));
df0785d6
KG
8408}
8409
5039610b 8410/* Fold a call to builtin isdigit with argument ARG. */
61218d19
KG
8411
8412static tree
db3927fb 8413fold_builtin_isdigit (location_t loc, tree arg)
61218d19 8414{
5039610b
SL
8415 if (!validate_arg (arg, INTEGER_TYPE))
8416 return NULL_TREE;
61218d19
KG
8417 else
8418 {
8419 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
c5ff069d
ZW
8420 /* According to the C standard, isdigit is unaffected by locale.
8421 However, it definitely is affected by the target character set. */
c5ff069d
ZW
8422 unsigned HOST_WIDE_INT target_digit0
8423 = lang_hooks.to_target_charset ('0');
8424
8425 if (target_digit0 == 0)
8426 return NULL_TREE;
8427
db3927fb 8428 arg = fold_convert_loc (loc, unsigned_type_node, arg);
6728ee79
MM
8429 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8430 build_int_cst (unsigned_type_node, target_digit0));
db3927fb 8431 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
5cdc4a26 8432 build_int_cst (unsigned_type_node, 9));
61218d19
KG
8433 }
8434}
ef79730c 8435
5039610b 8436/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9655d83b
RS
8437
8438static tree
db3927fb 8439fold_builtin_fabs (location_t loc, tree arg, tree type)
9655d83b 8440{
5039610b
SL
8441 if (!validate_arg (arg, REAL_TYPE))
8442 return NULL_TREE;
9655d83b 8443
db3927fb 8444 arg = fold_convert_loc (loc, type, arg);
db3927fb 8445 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
8446}
8447
5039610b 8448/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9655d83b
RS
8449
8450static tree
db3927fb 8451fold_builtin_abs (location_t loc, tree arg, tree type)
9655d83b 8452{
5039610b
SL
8453 if (!validate_arg (arg, INTEGER_TYPE))
8454 return NULL_TREE;
9655d83b 8455
db3927fb 8456 arg = fold_convert_loc (loc, type, arg);
db3927fb 8457 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
8458}
8459
527cab20
KG
8460/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8461
8462static tree
db3927fb 8463fold_builtin_carg (location_t loc, tree arg, tree type)
527cab20 8464{
c128599a
KG
8465 if (validate_arg (arg, COMPLEX_TYPE)
8466 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
527cab20
KG
8467 {
8468 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
b8698a0f 8469
527cab20
KG
8470 if (atan2_fn)
8471 {
5039610b 8472 tree new_arg = builtin_save_expr (arg);
db3927fb
AH
8473 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8474 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8475 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
527cab20
KG
8476 }
8477 }
b8698a0f 8478
527cab20
KG
8479 return NULL_TREE;
8480}
8481
7a2a25ab
KG
8482/* Fold a call to builtin frexp, we can assume the base is 2. */
8483
8484static tree
db3927fb 8485fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7a2a25ab
KG
8486{
8487 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8488 return NULL_TREE;
b8698a0f 8489
7a2a25ab 8490 STRIP_NOPS (arg0);
b8698a0f 8491
7a2a25ab
KG
8492 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8493 return NULL_TREE;
b8698a0f 8494
db3927fb 8495 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7a2a25ab
KG
8496
8497 /* Proceed if a valid pointer type was passed in. */
8498 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8499 {
8500 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8501 tree frac, exp;
b8698a0f 8502
7a2a25ab
KG
8503 switch (value->cl)
8504 {
8505 case rvc_zero:
8506 /* For +-0, return (*exp = 0, +-0). */
8507 exp = integer_zero_node;
8508 frac = arg0;
8509 break;
8510 case rvc_nan:
8511 case rvc_inf:
8512 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
db3927fb 8513 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7a2a25ab
KG
8514 case rvc_normal:
8515 {
8516 /* Since the frexp function always expects base 2, and in
8517 GCC normalized significands are already in the range
8518 [0.5, 1.0), we have exactly what frexp wants. */
8519 REAL_VALUE_TYPE frac_rvt = *value;
8520 SET_REAL_EXP (&frac_rvt, 0);
8521 frac = build_real (rettype, frac_rvt);
45a2c477 8522 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7a2a25ab
KG
8523 }
8524 break;
8525 default:
8526 gcc_unreachable ();
8527 }
b8698a0f 8528
7a2a25ab 8529 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 8530 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7a2a25ab 8531 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 8532 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7a2a25ab
KG
8533 }
8534
8535 return NULL_TREE;
8536}
8537
3d577eaf
KG
8538/* Fold a call to builtin modf. */
8539
8540static tree
db3927fb 8541fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
3d577eaf
KG
8542{
8543 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8544 return NULL_TREE;
b8698a0f 8545
3d577eaf 8546 STRIP_NOPS (arg0);
b8698a0f 8547
3d577eaf
KG
8548 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8549 return NULL_TREE;
b8698a0f 8550
db3927fb 8551 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3d577eaf
KG
8552
8553 /* Proceed if a valid pointer type was passed in. */
8554 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8555 {
8556 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8557 REAL_VALUE_TYPE trunc, frac;
8558
8559 switch (value->cl)
8560 {
8561 case rvc_nan:
8562 case rvc_zero:
8563 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8564 trunc = frac = *value;
8565 break;
8566 case rvc_inf:
8567 /* For +-Inf, return (*arg1 = arg0, +-0). */
8568 frac = dconst0;
8569 frac.sign = value->sign;
8570 trunc = *value;
8571 break;
8572 case rvc_normal:
8573 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8574 real_trunc (&trunc, VOIDmode, value);
8575 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8576 /* If the original number was negative and already
8577 integral, then the fractional part is -0.0. */
8578 if (value->sign && frac.cl == rvc_zero)
8579 frac.sign = value->sign;
8580 break;
8581 }
b8698a0f 8582
3d577eaf 8583 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 8584 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
3d577eaf
KG
8585 build_real (rettype, trunc));
8586 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 8587 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
3d577eaf
KG
8588 build_real (rettype, frac));
8589 }
b8698a0f 8590
3d577eaf
KG
8591 return NULL_TREE;
8592}
8593
903c723b
TC
8594/* Given a location LOC, an interclass builtin function decl FNDECL
8595 and its single argument ARG, return an folded expression computing
8596 the same, or NULL_TREE if we either couldn't or didn't want to fold
8597 (the latter happen if there's an RTL instruction available). */
8598
8599static tree
8600fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8601{
8602 machine_mode mode;
8603
8604 if (!validate_arg (arg, REAL_TYPE))
8605 return NULL_TREE;
8606
8607 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8608 return NULL_TREE;
8609
8610 mode = TYPE_MODE (TREE_TYPE (arg));
8611
8612 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
02cf2861 8613
903c723b
TC
8614 /* If there is no optab, try generic code. */
8615 switch (DECL_FUNCTION_CODE (fndecl))
8616 {
8617 tree result;
44e10129 8618
903c723b
TC
8619 CASE_FLT_FN (BUILT_IN_ISINF):
8620 {
8621 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8622 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8623 tree type = TREE_TYPE (arg);
8624 REAL_VALUE_TYPE r;
8625 char buf[128];
8626
8627 if (is_ibm_extended)
8628 {
8629 /* NaN and Inf are encoded in the high-order double value
8630 only. The low-order value is not significant. */
8631 type = double_type_node;
8632 mode = DFmode;
8633 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8634 }
8635 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8636 real_from_string (&r, buf);
8637 result = build_call_expr (isgr_fn, 2,
8638 fold_build1_loc (loc, ABS_EXPR, type, arg),
8639 build_real (type, r));
8640 return result;
8641 }
8642 CASE_FLT_FN (BUILT_IN_FINITE):
8643 case BUILT_IN_ISFINITE:
8644 {
8645 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8646 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8647 tree type = TREE_TYPE (arg);
8648 REAL_VALUE_TYPE r;
8649 char buf[128];
8650
8651 if (is_ibm_extended)
8652 {
8653 /* NaN and Inf are encoded in the high-order double value
8654 only. The low-order value is not significant. */
8655 type = double_type_node;
8656 mode = DFmode;
8657 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8658 }
8659 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8660 real_from_string (&r, buf);
8661 result = build_call_expr (isle_fn, 2,
8662 fold_build1_loc (loc, ABS_EXPR, type, arg),
8663 build_real (type, r));
8664 /*result = fold_build2_loc (loc, UNGT_EXPR,
8665 TREE_TYPE (TREE_TYPE (fndecl)),
8666 fold_build1_loc (loc, ABS_EXPR, type, arg),
8667 build_real (type, r));
8668 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8669 TREE_TYPE (TREE_TYPE (fndecl)),
8670 result);*/
8671 return result;
8672 }
8673 case BUILT_IN_ISNORMAL:
8674 {
8675 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8676 islessequal(fabs(x),DBL_MAX). */
8677 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8678 tree type = TREE_TYPE (arg);
8679 tree orig_arg, max_exp, min_exp;
8680 machine_mode orig_mode = mode;
8681 REAL_VALUE_TYPE rmax, rmin;
8682 char buf[128];
8683
8684 orig_arg = arg = builtin_save_expr (arg);
8685 if (is_ibm_extended)
8686 {
8687 /* Use double to test the normal range of IBM extended
8688 precision. Emin for IBM extended precision is
8689 different to emin for IEEE double, being 53 higher
8690 since the low double exponent is at least 53 lower
8691 than the high double exponent. */
8692 type = double_type_node;
8693 mode = DFmode;
8694 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8695 }
8696 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8697
8698 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8699 real_from_string (&rmax, buf);
8700 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8701 real_from_string (&rmin, buf);
8702 max_exp = build_real (type, rmax);
8703 min_exp = build_real (type, rmin);
8704
8705 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8706 if (is_ibm_extended)
8707 {
8708 /* Testing the high end of the range is done just using
8709 the high double, using the same test as isfinite().
8710 For the subnormal end of the range we first test the
8711 high double, then if its magnitude is equal to the
8712 limit of 0x1p-969, we test whether the low double is
8713 non-zero and opposite sign to the high double. */
8714 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8715 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8716 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8717 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8718 arg, min_exp);
8719 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8720 complex_double_type_node, orig_arg);
8721 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8722 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8723 tree zero = build_real (type, dconst0);
8724 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8725 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8726 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8727 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8728 fold_build3 (COND_EXPR,
8729 integer_type_node,
8730 hilt, logt, lolt));
8731 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8732 eq_min, ok_lo);
8733 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8734 gt_min, eq_min);
8735 }
8736 else
8737 {
8738 tree const isge_fn
8739 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8740 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8741 }
8742 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8743 max_exp, min_exp);
8744 return result;
8745 }
8746 default:
8747 break;
8748 }
8749
8750 return NULL_TREE;
8751}
8752
8753/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
5039610b 8754 ARG is the argument for the call. */
64a9295a
PB
8755
8756static tree
903c723b 8757fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
64a9295a 8758{
903c723b
TC
8759 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8760
5039610b 8761 if (!validate_arg (arg, REAL_TYPE))
83322951 8762 return NULL_TREE;
64a9295a 8763
64a9295a
PB
8764 switch (builtin_index)
8765 {
903c723b
TC
8766 case BUILT_IN_ISINF:
8767 if (!HONOR_INFINITIES (arg))
8768 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8769
8770 return NULL_TREE;
8771
05f41289
KG
8772 case BUILT_IN_ISINF_SIGN:
8773 {
8774 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8775 /* In a boolean context, GCC will fold the inner COND_EXPR to
8776 1. So e.g. "if (isinf_sign(x))" would be folded to just
8777 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
72f52f30 8778 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
e79983f4 8779 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
05f41289
KG
8780 tree tmp = NULL_TREE;
8781
8782 arg = builtin_save_expr (arg);
8783
8784 if (signbit_fn && isinf_fn)
8785 {
db3927fb
AH
8786 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8787 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
05f41289 8788
db3927fb 8789 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 8790 signbit_call, integer_zero_node);
db3927fb 8791 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 8792 isinf_call, integer_zero_node);
b8698a0f 8793
db3927fb 8794 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
05f41289 8795 integer_minus_one_node, integer_one_node);
db3927fb
AH
8796 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8797 isinf_call, tmp,
05f41289
KG
8798 integer_zero_node);
8799 }
8800
8801 return tmp;
8802 }
8803
903c723b
TC
8804 case BUILT_IN_ISFINITE:
8805 if (!HONOR_NANS (arg)
8806 && !HONOR_INFINITIES (arg))
8807 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8808
8809 return NULL_TREE;
8810
8811 case BUILT_IN_ISNAN:
8812 if (!HONOR_NANS (arg))
8813 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8814
8815 {
8816 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8817 if (is_ibm_extended)
8818 {
8819 /* NaN and Inf are encoded in the high-order double value
8820 only. The low-order value is not significant. */
8821 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8822 }
8823 }
8824 arg = builtin_save_expr (arg);
8825 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8826
64a9295a 8827 default:
298e6adc 8828 gcc_unreachable ();
64a9295a
PB
8829 }
8830}
8831
903c723b
TC
8832/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8833 This builtin will generate code to return the appropriate floating
8834 point classification depending on the value of the floating point
8835 number passed in. The possible return values must be supplied as
8836 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8837 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8838 one floating point argument which is "type generic". */
8839
8840static tree
8841fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8842{
8843 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8844 arg, type, res, tmp;
8845 machine_mode mode;
8846 REAL_VALUE_TYPE r;
8847 char buf[128];
8848
8849 /* Verify the required arguments in the original call. */
8850 if (nargs != 6
8851 || !validate_arg (args[0], INTEGER_TYPE)
8852 || !validate_arg (args[1], INTEGER_TYPE)
8853 || !validate_arg (args[2], INTEGER_TYPE)
8854 || !validate_arg (args[3], INTEGER_TYPE)
8855 || !validate_arg (args[4], INTEGER_TYPE)
8856 || !validate_arg (args[5], REAL_TYPE))
8857 return NULL_TREE;
8858
8859 fp_nan = args[0];
8860 fp_infinite = args[1];
8861 fp_normal = args[2];
8862 fp_subnormal = args[3];
8863 fp_zero = args[4];
8864 arg = args[5];
8865 type = TREE_TYPE (arg);
8866 mode = TYPE_MODE (type);
8867 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8868
8869 /* fpclassify(x) ->
8870 isnan(x) ? FP_NAN :
8871 (fabs(x) == Inf ? FP_INFINITE :
8872 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8873 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8874
8875 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8876 build_real (type, dconst0));
8877 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8878 tmp, fp_zero, fp_subnormal);
8879
8880 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8881 real_from_string (&r, buf);
8882 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8883 arg, build_real (type, r));
8884 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8885
8886 if (HONOR_INFINITIES (mode))
8887 {
8888 real_inf (&r);
8889 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8890 build_real (type, r));
8891 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8892 fp_infinite, res);
8893 }
8894
8895 if (HONOR_NANS (mode))
8896 {
8897 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8898 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8899 }
8900
8901 return res;
8902}
8903
08039bd8 8904/* Fold a call to an unordered comparison function such as
a35da91f 8905 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
5039610b 8906 being called and ARG0 and ARG1 are the arguments for the call.
64a9295a
PB
8907 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8908 the opposite of the desired result. UNORDERED_CODE is used
8909 for modes that can hold NaNs and ORDERED_CODE is used for
8910 the rest. */
08039bd8
RS
8911
8912static tree
db3927fb 8913fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
08039bd8
RS
8914 enum tree_code unordered_code,
8915 enum tree_code ordered_code)
8916{
14f661f1 8917 tree type = TREE_TYPE (TREE_TYPE (fndecl));
08039bd8 8918 enum tree_code code;
1aeaea8d
GK
8919 tree type0, type1;
8920 enum tree_code code0, code1;
8921 tree cmp_type = NULL_TREE;
08039bd8 8922
1aeaea8d
GK
8923 type0 = TREE_TYPE (arg0);
8924 type1 = TREE_TYPE (arg1);
c22cacf3 8925
1aeaea8d
GK
8926 code0 = TREE_CODE (type0);
8927 code1 = TREE_CODE (type1);
c22cacf3 8928
1aeaea8d
GK
8929 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8930 /* Choose the wider of two real types. */
8931 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8932 ? type0 : type1;
8933 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8934 cmp_type = type0;
8935 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8936 cmp_type = type1;
c22cacf3 8937
db3927fb
AH
8938 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8939 arg1 = fold_convert_loc (loc, cmp_type, arg1);
14f661f1
RS
8940
8941 if (unordered_code == UNORDERED_EXPR)
8942 {
1b457aa4 8943 if (!HONOR_NANS (arg0))
db3927fb
AH
8944 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8945 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
14f661f1 8946 }
08039bd8 8947
1b457aa4 8948 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
db3927fb
AH
8949 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8950 fold_build2_loc (loc, code, type, arg0, arg1));
08039bd8
RS
8951}
8952
1304953e
JJ
8953/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8954 arithmetics if it can never overflow, or into internal functions that
8955 return both result of arithmetics and overflowed boolean flag in
44a845ca
MS
8956 a complex integer result, or some other check for overflow.
8957 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8958 checking part of that. */
1304953e
JJ
8959
8960static tree
8961fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8962 tree arg0, tree arg1, tree arg2)
8963{
8964 enum internal_fn ifn = IFN_LAST;
44a845ca
MS
8965 /* The code of the expression corresponding to the type-generic
8966 built-in, or ERROR_MARK for the type-specific ones. */
8967 enum tree_code opcode = ERROR_MARK;
8968 bool ovf_only = false;
8969
1304953e
JJ
8970 switch (fcode)
8971 {
44a845ca
MS
8972 case BUILT_IN_ADD_OVERFLOW_P:
8973 ovf_only = true;
8974 /* FALLTHRU */
1304953e 8975 case BUILT_IN_ADD_OVERFLOW:
44a845ca
MS
8976 opcode = PLUS_EXPR;
8977 /* FALLTHRU */
1304953e
JJ
8978 case BUILT_IN_SADD_OVERFLOW:
8979 case BUILT_IN_SADDL_OVERFLOW:
8980 case BUILT_IN_SADDLL_OVERFLOW:
8981 case BUILT_IN_UADD_OVERFLOW:
8982 case BUILT_IN_UADDL_OVERFLOW:
8983 case BUILT_IN_UADDLL_OVERFLOW:
8984 ifn = IFN_ADD_OVERFLOW;
8985 break;
44a845ca
MS
8986 case BUILT_IN_SUB_OVERFLOW_P:
8987 ovf_only = true;
8988 /* FALLTHRU */
1304953e 8989 case BUILT_IN_SUB_OVERFLOW:
44a845ca
MS
8990 opcode = MINUS_EXPR;
8991 /* FALLTHRU */
1304953e
JJ
8992 case BUILT_IN_SSUB_OVERFLOW:
8993 case BUILT_IN_SSUBL_OVERFLOW:
8994 case BUILT_IN_SSUBLL_OVERFLOW:
8995 case BUILT_IN_USUB_OVERFLOW:
8996 case BUILT_IN_USUBL_OVERFLOW:
8997 case BUILT_IN_USUBLL_OVERFLOW:
8998 ifn = IFN_SUB_OVERFLOW;
8999 break;
44a845ca
MS
9000 case BUILT_IN_MUL_OVERFLOW_P:
9001 ovf_only = true;
9002 /* FALLTHRU */
1304953e 9003 case BUILT_IN_MUL_OVERFLOW:
44a845ca
MS
9004 opcode = MULT_EXPR;
9005 /* FALLTHRU */
1304953e
JJ
9006 case BUILT_IN_SMUL_OVERFLOW:
9007 case BUILT_IN_SMULL_OVERFLOW:
9008 case BUILT_IN_SMULLL_OVERFLOW:
9009 case BUILT_IN_UMUL_OVERFLOW:
9010 case BUILT_IN_UMULL_OVERFLOW:
9011 case BUILT_IN_UMULLL_OVERFLOW:
9012 ifn = IFN_MUL_OVERFLOW;
9013 break;
9014 default:
9015 gcc_unreachable ();
9016 }
44a845ca
MS
9017
9018 /* For the "generic" overloads, the first two arguments can have different
9019 types and the last argument determines the target type to use to check
9020 for overflow. The arguments of the other overloads all have the same
9021 type. */
9022 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9023
9024 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9025 arguments are constant, attempt to fold the built-in call into a constant
9026 expression indicating whether or not it detected an overflow. */
9027 if (ovf_only
9028 && TREE_CODE (arg0) == INTEGER_CST
9029 && TREE_CODE (arg1) == INTEGER_CST)
9030 /* Perform the computation in the target type and check for overflow. */
9031 return omit_one_operand_loc (loc, boolean_type_node,
9032 arith_overflowed_p (opcode, type, arg0, arg1)
9033 ? boolean_true_node : boolean_false_node,
9034 arg2);
9035
1304953e
JJ
9036 tree ctype = build_complex_type (type);
9037 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9038 2, arg0, arg1);
9039 tree tgt = save_expr (call);
9040 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9041 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9042 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
44a845ca
MS
9043
9044 if (ovf_only)
9045 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9046
9047 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
1304953e
JJ
9048 tree store
9049 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9050 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9051}
9052
b25aad5f
MS
9053/* Fold a call to __builtin_FILE to a constant string. */
9054
9055static inline tree
9056fold_builtin_FILE (location_t loc)
9057{
9058 if (const char *fname = LOCATION_FILE (loc))
7365279f
BK
9059 {
9060 /* The documentation says this builtin is equivalent to the preprocessor
9061 __FILE__ macro so it appears appropriate to use the same file prefix
9062 mappings. */
9063 fname = remap_macro_filename (fname);
b25aad5f 9064 return build_string_literal (strlen (fname) + 1, fname);
7365279f 9065 }
b25aad5f
MS
9066
9067 return build_string_literal (1, "");
9068}
9069
9070/* Fold a call to __builtin_FUNCTION to a constant string. */
9071
9072static inline tree
9073fold_builtin_FUNCTION ()
9074{
f76b4224
NS
9075 const char *name = "";
9076
b25aad5f 9077 if (current_function_decl)
f76b4224 9078 name = lang_hooks.decl_printable_name (current_function_decl, 0);
b25aad5f 9079
f76b4224 9080 return build_string_literal (strlen (name) + 1, name);
b25aad5f
MS
9081}
9082
9083/* Fold a call to __builtin_LINE to an integer constant. */
9084
9085static inline tree
9086fold_builtin_LINE (location_t loc, tree type)
9087{
9088 return build_int_cst (type, LOCATION_LINE (loc));
9089}
9090
5039610b 9091/* Fold a call to built-in function FNDECL with 0 arguments.
2625bb5d 9092 This function returns NULL_TREE if no simplification was possible. */
b0b3afb2 9093
6de9cd9a 9094static tree
2625bb5d 9095fold_builtin_0 (location_t loc, tree fndecl)
b0b3afb2 9096{
c0a47a61 9097 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5039610b 9098 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
a0d2281e 9099 switch (fcode)
b0b3afb2 9100 {
b25aad5f
MS
9101 case BUILT_IN_FILE:
9102 return fold_builtin_FILE (loc);
9103
9104 case BUILT_IN_FUNCTION:
9105 return fold_builtin_FUNCTION ();
9106
9107 case BUILT_IN_LINE:
9108 return fold_builtin_LINE (loc, type);
9109
5039610b 9110 CASE_FLT_FN (BUILT_IN_INF):
6dc198e3 9111 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
5039610b
SL
9112 case BUILT_IN_INFD32:
9113 case BUILT_IN_INFD64:
9114 case BUILT_IN_INFD128:
db3927fb 9115 return fold_builtin_inf (loc, type, true);
d3147f64 9116
5039610b 9117 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
6dc198e3 9118 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
db3927fb 9119 return fold_builtin_inf (loc, type, false);
d3147f64 9120
5039610b
SL
9121 case BUILT_IN_CLASSIFY_TYPE:
9122 return fold_builtin_classify_type (NULL_TREE);
d3147f64 9123
5039610b
SL
9124 default:
9125 break;
9126 }
9127 return NULL_TREE;
9128}
d3147f64 9129
5039610b 9130/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
2625bb5d 9131 This function returns NULL_TREE if no simplification was possible. */
d3147f64 9132
5039610b 9133static tree
2625bb5d 9134fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
5039610b
SL
9135{
9136 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9137 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
9138
9139 if (TREE_CODE (arg0) == ERROR_MARK)
9140 return NULL_TREE;
9141
d7ebef06 9142 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
5c1a2e63
RS
9143 return ret;
9144
5039610b
SL
9145 switch (fcode)
9146 {
b0b3afb2 9147 case BUILT_IN_CONSTANT_P:
d3147f64 9148 {
5039610b 9149 tree val = fold_builtin_constant_p (arg0);
d3147f64 9150
d3147f64
EC
9151 /* Gimplification will pull the CALL_EXPR for the builtin out of
9152 an if condition. When not optimizing, we'll not CSE it back.
9153 To avoid link error types of regressions, return false now. */
9154 if (!val && !optimize)
9155 val = integer_zero_node;
9156
9157 return val;
9158 }
b0b3afb2 9159
ad82abb8 9160 case BUILT_IN_CLASSIFY_TYPE:
5039610b 9161 return fold_builtin_classify_type (arg0);
ad82abb8 9162
b0b3afb2 9163 case BUILT_IN_STRLEN:
ab996409 9164 return fold_builtin_strlen (loc, type, arg0);
b0b3afb2 9165
ea6a6627 9166 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 9167 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
9168 case BUILT_IN_FABSD32:
9169 case BUILT_IN_FABSD64:
9170 case BUILT_IN_FABSD128:
db3927fb 9171 return fold_builtin_fabs (loc, arg0, type);
9655d83b
RS
9172
9173 case BUILT_IN_ABS:
9174 case BUILT_IN_LABS:
9175 case BUILT_IN_LLABS:
9176 case BUILT_IN_IMAXABS:
db3927fb 9177 return fold_builtin_abs (loc, arg0, type);
07bae5ad 9178
ea6a6627 9179 CASE_FLT_FN (BUILT_IN_CONJ):
c128599a 9180 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9181 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 9182 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
5039610b 9183 break;
aa6c7c3a 9184
ea6a6627 9185 CASE_FLT_FN (BUILT_IN_CREAL):
c128599a 9186 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9187 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
6f3d1a5e 9188 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
5039610b 9189 break;
aa6c7c3a 9190
ea6a6627 9191 CASE_FLT_FN (BUILT_IN_CIMAG):
376da68e
KG
9192 if (validate_arg (arg0, COMPLEX_TYPE)
9193 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 9194 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
5039610b 9195 break;
aa6c7c3a 9196
5c1a2e63
RS
9197 CASE_FLT_FN (BUILT_IN_CARG):
9198 return fold_builtin_carg (loc, arg0, type);
43272bf5 9199
5c1a2e63
RS
9200 case BUILT_IN_ISASCII:
9201 return fold_builtin_isascii (loc, arg0);
b8698a0f 9202
5c1a2e63
RS
9203 case BUILT_IN_TOASCII:
9204 return fold_builtin_toascii (loc, arg0);
b8698a0f 9205
5c1a2e63
RS
9206 case BUILT_IN_ISDIGIT:
9207 return fold_builtin_isdigit (loc, arg0);
b8698a0f 9208
903c723b
TC
9209 CASE_FLT_FN (BUILT_IN_FINITE):
9210 case BUILT_IN_FINITED32:
9211 case BUILT_IN_FINITED64:
9212 case BUILT_IN_FINITED128:
9213 case BUILT_IN_ISFINITE:
9214 {
9215 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9216 if (ret)
9217 return ret;
9218 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9219 }
9220
9221 CASE_FLT_FN (BUILT_IN_ISINF):
9222 case BUILT_IN_ISINFD32:
9223 case BUILT_IN_ISINFD64:
9224 case BUILT_IN_ISINFD128:
9225 {
9226 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9227 if (ret)
9228 return ret;
9229 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9230 }
9231
9232 case BUILT_IN_ISNORMAL:
9233 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9234
5c1a2e63 9235 case BUILT_IN_ISINF_SIGN:
903c723b
TC
9236 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9237
9238 CASE_FLT_FN (BUILT_IN_ISNAN):
9239 case BUILT_IN_ISNAND32:
9240 case BUILT_IN_ISNAND64:
9241 case BUILT_IN_ISNAND128:
9242 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
b8698a0f 9243
5c1a2e63
RS
9244 case BUILT_IN_FREE:
9245 if (integer_zerop (arg0))
9246 return build_empty_stmt (loc);
abcc43f5 9247 break;
07bae5ad 9248
5c1a2e63 9249 default:
4835c978 9250 break;
5c1a2e63 9251 }
4977bab6 9252
5c1a2e63 9253 return NULL_TREE;
e19f6bde 9254
5c1a2e63 9255}
b53fed56 9256
5c1a2e63
RS
9257/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9258 This function returns NULL_TREE if no simplification was possible. */
5039610b
SL
9259
9260static tree
2625bb5d 9261fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
5039610b
SL
9262{
9263 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9264 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9265
5c1a2e63
RS
9266 if (TREE_CODE (arg0) == ERROR_MARK
9267 || TREE_CODE (arg1) == ERROR_MARK)
9268 return NULL_TREE;
ea91f957 9269
d7ebef06 9270 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
5c1a2e63 9271 return ret;
752b7d38 9272
5c1a2e63
RS
9273 switch (fcode)
9274 {
752b7d38
KG
9275 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9276 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9277 if (validate_arg (arg0, REAL_TYPE)
c3284718 9278 && validate_arg (arg1, POINTER_TYPE))
752b7d38
KG
9279 return do_mpfr_lgamma_r (arg0, arg1, type);
9280 break;
5039610b 9281
7a2a25ab 9282 CASE_FLT_FN (BUILT_IN_FREXP):
db3927fb 9283 return fold_builtin_frexp (loc, arg0, arg1, type);
7a2a25ab 9284
3d577eaf 9285 CASE_FLT_FN (BUILT_IN_MODF):
db3927fb 9286 return fold_builtin_modf (loc, arg0, arg1, type);
3d577eaf 9287
5039610b 9288 case BUILT_IN_STRSPN:
db3927fb 9289 return fold_builtin_strspn (loc, arg0, arg1);
5039610b
SL
9290
9291 case BUILT_IN_STRCSPN:
db3927fb 9292 return fold_builtin_strcspn (loc, arg0, arg1);
5039610b 9293
5039610b 9294 case BUILT_IN_STRPBRK:
db3927fb 9295 return fold_builtin_strpbrk (loc, arg0, arg1, type);
5039610b
SL
9296
9297 case BUILT_IN_EXPECT:
ed9c79e1 9298 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
5039610b 9299
08039bd8 9300 case BUILT_IN_ISGREATER:
db3927fb
AH
9301 return fold_builtin_unordered_cmp (loc, fndecl,
9302 arg0, arg1, UNLE_EXPR, LE_EXPR);
08039bd8 9303 case BUILT_IN_ISGREATEREQUAL:
db3927fb
AH
9304 return fold_builtin_unordered_cmp (loc, fndecl,
9305 arg0, arg1, UNLT_EXPR, LT_EXPR);
08039bd8 9306 case BUILT_IN_ISLESS:
db3927fb
AH
9307 return fold_builtin_unordered_cmp (loc, fndecl,
9308 arg0, arg1, UNGE_EXPR, GE_EXPR);
08039bd8 9309 case BUILT_IN_ISLESSEQUAL:
db3927fb
AH
9310 return fold_builtin_unordered_cmp (loc, fndecl,
9311 arg0, arg1, UNGT_EXPR, GT_EXPR);
08039bd8 9312 case BUILT_IN_ISLESSGREATER:
db3927fb
AH
9313 return fold_builtin_unordered_cmp (loc, fndecl,
9314 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
08039bd8 9315 case BUILT_IN_ISUNORDERED:
db3927fb
AH
9316 return fold_builtin_unordered_cmp (loc, fndecl,
9317 arg0, arg1, UNORDERED_EXPR,
a35da91f 9318 NOP_EXPR);
08039bd8 9319
d3147f64
EC
9320 /* We do the folding for va_start in the expander. */
9321 case BUILT_IN_VA_START:
9322 break;
a32e70c3 9323
10a0d495 9324 case BUILT_IN_OBJECT_SIZE:
5039610b 9325 return fold_builtin_object_size (arg0, arg1);
10a0d495 9326
86951993
AM
9327 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9328 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9329
9330 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9331 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9332
5039610b
SL
9333 default:
9334 break;
9335 }
9336 return NULL_TREE;
9337}
9338
9339/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
2625bb5d 9340 and ARG2.
5039610b
SL
9341 This function returns NULL_TREE if no simplification was possible. */
9342
9343static tree
db3927fb 9344fold_builtin_3 (location_t loc, tree fndecl,
2625bb5d 9345 tree arg0, tree arg1, tree arg2)
5039610b
SL
9346{
9347 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9348 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
9349
9350 if (TREE_CODE (arg0) == ERROR_MARK
9351 || TREE_CODE (arg1) == ERROR_MARK
9352 || TREE_CODE (arg2) == ERROR_MARK)
9353 return NULL_TREE;
9354
d7ebef06
RS
9355 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9356 arg0, arg1, arg2))
5c1a2e63
RS
9357 return ret;
9358
5039610b
SL
9359 switch (fcode)
9360 {
9361
9362 CASE_FLT_FN (BUILT_IN_SINCOS):
db3927fb 9363 return fold_builtin_sincos (loc, arg0, arg1, arg2);
5039610b 9364
ea91f957
KG
9365 CASE_FLT_FN (BUILT_IN_REMQUO):
9366 if (validate_arg (arg0, REAL_TYPE)
c3284718
RS
9367 && validate_arg (arg1, REAL_TYPE)
9368 && validate_arg (arg2, POINTER_TYPE))
ea91f957
KG
9369 return do_mpfr_remquo (arg0, arg1, arg2);
9370 break;
ea91f957 9371
5039610b 9372 case BUILT_IN_MEMCMP:
5de73c05 9373 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
5039610b 9374
ed9c79e1
JJ
9375 case BUILT_IN_EXPECT:
9376 return fold_builtin_expect (loc, arg0, arg1, arg2);
9377
1304953e
JJ
9378 case BUILT_IN_ADD_OVERFLOW:
9379 case BUILT_IN_SUB_OVERFLOW:
9380 case BUILT_IN_MUL_OVERFLOW:
44a845ca
MS
9381 case BUILT_IN_ADD_OVERFLOW_P:
9382 case BUILT_IN_SUB_OVERFLOW_P:
9383 case BUILT_IN_MUL_OVERFLOW_P:
1304953e
JJ
9384 case BUILT_IN_SADD_OVERFLOW:
9385 case BUILT_IN_SADDL_OVERFLOW:
9386 case BUILT_IN_SADDLL_OVERFLOW:
9387 case BUILT_IN_SSUB_OVERFLOW:
9388 case BUILT_IN_SSUBL_OVERFLOW:
9389 case BUILT_IN_SSUBLL_OVERFLOW:
9390 case BUILT_IN_SMUL_OVERFLOW:
9391 case BUILT_IN_SMULL_OVERFLOW:
9392 case BUILT_IN_SMULLL_OVERFLOW:
9393 case BUILT_IN_UADD_OVERFLOW:
9394 case BUILT_IN_UADDL_OVERFLOW:
9395 case BUILT_IN_UADDLL_OVERFLOW:
9396 case BUILT_IN_USUB_OVERFLOW:
9397 case BUILT_IN_USUBL_OVERFLOW:
9398 case BUILT_IN_USUBLL_OVERFLOW:
9399 case BUILT_IN_UMUL_OVERFLOW:
9400 case BUILT_IN_UMULL_OVERFLOW:
9401 case BUILT_IN_UMULLL_OVERFLOW:
9402 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9403
b0b3afb2
BS
9404 default:
9405 break;
9406 }
5039610b
SL
9407 return NULL_TREE;
9408}
b0b3afb2 9409
5039610b 9410/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
a6a0570f
RB
9411 arguments. IGNORE is true if the result of the
9412 function call is ignored. This function returns NULL_TREE if no
9413 simplification was possible. */
b8698a0f 9414
3d2cf79f 9415tree
2625bb5d 9416fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
5039610b
SL
9417{
9418 tree ret = NULL_TREE;
f4577fcd 9419
5039610b
SL
9420 switch (nargs)
9421 {
9422 case 0:
2625bb5d 9423 ret = fold_builtin_0 (loc, fndecl);
5039610b
SL
9424 break;
9425 case 1:
2625bb5d 9426 ret = fold_builtin_1 (loc, fndecl, args[0]);
5039610b
SL
9427 break;
9428 case 2:
2625bb5d 9429 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
5039610b
SL
9430 break;
9431 case 3:
2625bb5d 9432 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
5039610b 9433 break;
5039610b 9434 default:
903c723b 9435 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
5039610b
SL
9436 break;
9437 }
9438 if (ret)
9439 {
726a989a 9440 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
db3927fb 9441 SET_EXPR_LOCATION (ret, loc);
5039610b
SL
9442 return ret;
9443 }
9444 return NULL_TREE;
9445}
9446
862d0b35
DN
9447/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9448 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9449 of arguments in ARGS to be omitted. OLDNARGS is the number of
9450 elements in ARGS. */
5039610b
SL
9451
9452static tree
862d0b35
DN
9453rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9454 int skip, tree fndecl, int n, va_list newargs)
5039610b 9455{
862d0b35
DN
9456 int nargs = oldnargs - skip + n;
9457 tree *buffer;
5039610b 9458
862d0b35 9459 if (n > 0)
5039610b 9460 {
862d0b35 9461 int i, j;
5039610b 9462
862d0b35
DN
9463 buffer = XALLOCAVEC (tree, nargs);
9464 for (i = 0; i < n; i++)
9465 buffer[i] = va_arg (newargs, tree);
9466 for (j = skip; j < oldnargs; j++, i++)
9467 buffer[i] = args[j];
9468 }
9469 else
9470 buffer = args + skip;
3bf5906b 9471
862d0b35
DN
9472 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9473}
5039610b 9474
0889e9bc
JJ
9475/* Return true if FNDECL shouldn't be folded right now.
9476 If a built-in function has an inline attribute always_inline
9477 wrapper, defer folding it after always_inline functions have
9478 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9479 might not be performed. */
9480
e7f9dae0 9481bool
0889e9bc
JJ
9482avoid_folding_inline_builtin (tree fndecl)
9483{
9484 return (DECL_DECLARED_INLINE_P (fndecl)
9485 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9486 && cfun
9487 && !cfun->always_inline_functions_inlined
9488 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9489}
9490
6de9cd9a 9491/* A wrapper function for builtin folding that prevents warnings for
caf93cb0 9492 "statement without effect" and the like, caused by removing the
6de9cd9a
DN
9493 call node earlier than the warning is generated. */
9494
9495tree
db3927fb 9496fold_call_expr (location_t loc, tree exp, bool ignore)
6de9cd9a 9497{
5039610b
SL
9498 tree ret = NULL_TREE;
9499 tree fndecl = get_callee_fndecl (exp);
9500 if (fndecl
9501 && TREE_CODE (fndecl) == FUNCTION_DECL
6ef5231b
JJ
9502 && DECL_BUILT_IN (fndecl)
9503 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9504 yet. Defer folding until we see all the arguments
9505 (after inlining). */
9506 && !CALL_EXPR_VA_ARG_PACK (exp))
9507 {
9508 int nargs = call_expr_nargs (exp);
9509
9510 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9511 instead last argument is __builtin_va_arg_pack (). Defer folding
9512 even in that case, until arguments are finalized. */
9513 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9514 {
9515 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9516 if (fndecl2
9517 && TREE_CODE (fndecl2) == FUNCTION_DECL
9518 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9519 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9520 return NULL_TREE;
9521 }
9522
0889e9bc
JJ
9523 if (avoid_folding_inline_builtin (fndecl))
9524 return NULL_TREE;
9525
5039610b 9526 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
f311c3b4
NF
9527 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9528 CALL_EXPR_ARGP (exp), ignore);
5039610b
SL
9529 else
9530 {
a6a0570f
RB
9531 tree *args = CALL_EXPR_ARGP (exp);
9532 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
5039610b 9533 if (ret)
db3927fb 9534 return ret;
5039610b 9535 }
6de9cd9a 9536 }
5039610b
SL
9537 return NULL_TREE;
9538}
b8698a0f 9539
a6a0570f
RB
9540/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9541 N arguments are passed in the array ARGARRAY. Return a folded
9542 expression or NULL_TREE if no simplification was possible. */
4977bab6
ZW
9543
9544tree
a6a0570f 9545fold_builtin_call_array (location_t loc, tree,
94a0dd7b
SL
9546 tree fn,
9547 int n,
9548 tree *argarray)
6385a28f 9549{
a6a0570f
RB
9550 if (TREE_CODE (fn) != ADDR_EXPR)
9551 return NULL_TREE;
5039610b 9552
a6a0570f
RB
9553 tree fndecl = TREE_OPERAND (fn, 0);
9554 if (TREE_CODE (fndecl) == FUNCTION_DECL
9555 && DECL_BUILT_IN (fndecl))
9556 {
9557 /* If last argument is __builtin_va_arg_pack (), arguments to this
9558 function are not finalized yet. Defer folding until they are. */
9559 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9560 {
9561 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9562 if (fndecl2
9563 && TREE_CODE (fndecl2) == FUNCTION_DECL
9564 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9565 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9566 return NULL_TREE;
9567 }
9568 if (avoid_folding_inline_builtin (fndecl))
9569 return NULL_TREE;
9570 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9571 return targetm.fold_builtin (fndecl, n, argarray, false);
9572 else
9573 return fold_builtin_n (loc, fndecl, argarray, n, false);
9574 }
5039610b 9575
a6a0570f 9576 return NULL_TREE;
5039610b
SL
9577}
9578
43ea30dc
NF
9579/* Construct a new CALL_EXPR using the tail of the argument list of EXP
9580 along with N new arguments specified as the "..." parameters. SKIP
9581 is the number of arguments in EXP to be omitted. This function is used
9582 to do varargs-to-varargs transformations. */
9583
9584static tree
9585rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9586{
9587 va_list ap;
9588 tree t;
9589
9590 va_start (ap, n);
9591 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9592 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9593 va_end (ap);
5039610b 9594
43ea30dc 9595 return t;
5039610b
SL
9596}
9597
9598/* Validate a single argument ARG against a tree code CODE representing
0dba7960 9599 a type. Return true when argument is valid. */
b8698a0f 9600
5039610b 9601static bool
0dba7960 9602validate_arg (const_tree arg, enum tree_code code)
5039610b
SL
9603{
9604 if (!arg)
9605 return false;
9606 else if (code == POINTER_TYPE)
0dba7960 9607 return POINTER_TYPE_P (TREE_TYPE (arg));
4cd8e76f
RG
9608 else if (code == INTEGER_TYPE)
9609 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
5039610b 9610 return code == TREE_CODE (TREE_TYPE (arg));
6385a28f 9611}
019fa094 9612
726a989a
RB
9613/* This function validates the types of a function call argument list
9614 against a specified list of tree_codes. If the last specifier is a 0,
9615 that represents an ellipses, otherwise the last specifier must be a
9616 VOID_TYPE.
9617
9618 This is the GIMPLE version of validate_arglist. Eventually we want to
9619 completely convert builtins.c to work from GIMPLEs and the tree based
9620 validate_arglist will then be removed. */
9621
9622bool
538dd0b7 9623validate_gimple_arglist (const gcall *call, ...)
726a989a
RB
9624{
9625 enum tree_code code;
9626 bool res = 0;
9627 va_list ap;
9628 const_tree arg;
9629 size_t i;
9630
9631 va_start (ap, call);
9632 i = 0;
9633
9634 do
9635 {
72b5577d 9636 code = (enum tree_code) va_arg (ap, int);
726a989a
RB
9637 switch (code)
9638 {
9639 case 0:
9640 /* This signifies an ellipses, any further arguments are all ok. */
9641 res = true;
9642 goto end;
9643 case VOID_TYPE:
9644 /* This signifies an endlink, if no arguments remain, return
9645 true, otherwise return false. */
9646 res = (i == gimple_call_num_args (call));
9647 goto end;
9648 default:
9649 /* If no parameters remain or the parameter's code does not
9650 match the specified code, return false. Otherwise continue
9651 checking any remaining arguments. */
9652 arg = gimple_call_arg (call, i++);
9653 if (!validate_arg (arg, code))
9654 goto end;
9655 break;
9656 }
9657 }
9658 while (1);
9659
9660 /* We need gotos here since we can only have one VA_CLOSE in a
9661 function. */
9662 end: ;
9663 va_end (ap);
9664
9665 return res;
9666}
9667
f6155fda
SS
9668/* Default target-specific builtin expander that does nothing. */
9669
9670rtx
4682ae04
AJ
9671default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9672 rtx target ATTRIBUTE_UNUSED,
9673 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 9674 machine_mode mode ATTRIBUTE_UNUSED,
4682ae04 9675 int ignore ATTRIBUTE_UNUSED)
f6155fda
SS
9676{
9677 return NULL_RTX;
9678}
34ee7f82 9679
7dc61d6c
KG
9680/* Returns true is EXP represents data that would potentially reside
9681 in a readonly section. */
9682
fef5a0d9 9683bool
7dc61d6c
KG
9684readonly_data_expr (tree exp)
9685{
9686 STRIP_NOPS (exp);
9687
aef0afc4
UW
9688 if (TREE_CODE (exp) != ADDR_EXPR)
9689 return false;
9690
9691 exp = get_base_address (TREE_OPERAND (exp, 0));
9692 if (!exp)
9693 return false;
9694
9695 /* Make sure we call decl_readonly_section only for trees it
9696 can handle (since it returns true for everything it doesn't
9697 understand). */
caf93cb0 9698 if (TREE_CODE (exp) == STRING_CST
aef0afc4 9699 || TREE_CODE (exp) == CONSTRUCTOR
8813a647 9700 || (VAR_P (exp) && TREE_STATIC (exp)))
aef0afc4 9701 return decl_readonly_section (exp, 0);
7dc61d6c
KG
9702 else
9703 return false;
9704}
6de9cd9a 9705
5039610b
SL
9706/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9707 to the call, and TYPE is its return type.
6de9cd9a 9708
5039610b 9709 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
9710 simplified form of the call as a tree.
9711
9712 The simplified form may be a constant or other expression which
9713 computes the same value, but in a more efficient manner (including
9714 calls to other builtin functions).
9715
9716 The call may contain arguments which need to be evaluated, but
9717 which are not useful to determine the result of the call. In
9718 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9719 COMPOUND_EXPR will be an argument which must be evaluated.
9720 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9721 COMPOUND_EXPR in the chain will contain the tree for the simplified
9722 form of the builtin function call. */
9723
9724static tree
db3927fb 9725fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
6de9cd9a 9726{
5039610b
SL
9727 if (!validate_arg (s1, POINTER_TYPE)
9728 || !validate_arg (s2, POINTER_TYPE))
9729 return NULL_TREE;
6de9cd9a
DN
9730 else
9731 {
6de9cd9a
DN
9732 tree fn;
9733 const char *p1, *p2;
9734
9735 p2 = c_getstr (s2);
9736 if (p2 == NULL)
5039610b 9737 return NULL_TREE;
6de9cd9a
DN
9738
9739 p1 = c_getstr (s1);
9740 if (p1 != NULL)
9741 {
9742 const char *r = strpbrk (p1, p2);
5fcfe0b2 9743 tree tem;
6de9cd9a
DN
9744
9745 if (r == NULL)
5212068f 9746 return build_int_cst (TREE_TYPE (s1), 0);
6de9cd9a
DN
9747
9748 /* Return an offset into the constant string argument. */
5d49b6a7 9749 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
db3927fb 9750 return fold_convert_loc (loc, type, tem);
6de9cd9a
DN
9751 }
9752
9753 if (p2[0] == '\0')
d6dc556b
RS
9754 /* strpbrk(x, "") == NULL.
9755 Evaluate and ignore s1 in case it had side-effects. */
a8ed1cbd 9756 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
6de9cd9a
DN
9757
9758 if (p2[1] != '\0')
5039610b 9759 return NULL_TREE; /* Really call strpbrk. */
6de9cd9a 9760
e79983f4 9761 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
6de9cd9a 9762 if (!fn)
5039610b 9763 return NULL_TREE;
6de9cd9a
DN
9764
9765 /* New argument list transforming strpbrk(s1, s2) to
9766 strchr(s1, s2[0]). */
45a2c477
RG
9767 return build_call_expr_loc (loc, fn, 2, s1,
9768 build_int_cst (integer_type_node, p2[0]));
6de9cd9a
DN
9769 }
9770}
9771
5039610b
SL
9772/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9773 to the call.
6de9cd9a 9774
5039610b 9775 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
9776 simplified form of the call as a tree.
9777
9778 The simplified form may be a constant or other expression which
9779 computes the same value, but in a more efficient manner (including
9780 calls to other builtin functions).
9781
9782 The call may contain arguments which need to be evaluated, but
9783 which are not useful to determine the result of the call. In
9784 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9785 COMPOUND_EXPR will be an argument which must be evaluated.
9786 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9787 COMPOUND_EXPR in the chain will contain the tree for the simplified
9788 form of the builtin function call. */
9789
9790static tree
db3927fb 9791fold_builtin_strspn (location_t loc, tree s1, tree s2)
6de9cd9a 9792{
5039610b
SL
9793 if (!validate_arg (s1, POINTER_TYPE)
9794 || !validate_arg (s2, POINTER_TYPE))
9795 return NULL_TREE;
6de9cd9a
DN
9796 else
9797 {
6de9cd9a
DN
9798 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9799
5039610b 9800 /* If either argument is "", return NULL_TREE. */
6de9cd9a 9801 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
08039bd8
RS
9802 /* Evaluate and ignore both arguments in case either one has
9803 side-effects. */
db3927fb 9804 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
08039bd8 9805 s1, s2);
5039610b 9806 return NULL_TREE;
6de9cd9a
DN
9807 }
9808}
9809
5039610b
SL
9810/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9811 to the call.
6de9cd9a 9812
5039610b 9813 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
9814 simplified form of the call as a tree.
9815
9816 The simplified form may be a constant or other expression which
9817 computes the same value, but in a more efficient manner (including
9818 calls to other builtin functions).
9819
9820 The call may contain arguments which need to be evaluated, but
9821 which are not useful to determine the result of the call. In
9822 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9823 COMPOUND_EXPR will be an argument which must be evaluated.
9824 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9825 COMPOUND_EXPR in the chain will contain the tree for the simplified
9826 form of the builtin function call. */
9827
9828static tree
db3927fb 9829fold_builtin_strcspn (location_t loc, tree s1, tree s2)
6de9cd9a 9830{
5039610b
SL
9831 if (!validate_arg (s1, POINTER_TYPE)
9832 || !validate_arg (s2, POINTER_TYPE))
9833 return NULL_TREE;
6de9cd9a
DN
9834 else
9835 {
5039610b 9836 /* If the first argument is "", return NULL_TREE. */
df838ef0 9837 const char *p1 = c_getstr (s1);
6de9cd9a
DN
9838 if (p1 && *p1 == '\0')
9839 {
9840 /* Evaluate and ignore argument s2 in case it has
9841 side-effects. */
db3927fb 9842 return omit_one_operand_loc (loc, size_type_node,
002bd9f0 9843 size_zero_node, s2);
6de9cd9a
DN
9844 }
9845
9846 /* If the second argument is "", return __builtin_strlen(s1). */
df838ef0 9847 const char *p2 = c_getstr (s2);
6de9cd9a
DN
9848 if (p2 && *p2 == '\0')
9849 {
e79983f4 9850 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
6de9cd9a
DN
9851
9852 /* If the replacement _DECL isn't initialized, don't do the
9853 transformation. */
9854 if (!fn)
5039610b 9855 return NULL_TREE;
6de9cd9a 9856
db3927fb 9857 return build_call_expr_loc (loc, fn, 1, s1);
6de9cd9a 9858 }
5039610b 9859 return NULL_TREE;
6de9cd9a
DN
9860 }
9861}
9862
5039610b 9863/* Fold the next_arg or va_start call EXP. Returns true if there was an error
2efcfa4e
AP
9864 produced. False otherwise. This is done so that we don't output the error
9865 or warning twice or three times. */
726a989a 9866
2efcfa4e 9867bool
5039610b 9868fold_builtin_next_arg (tree exp, bool va_start_p)
6de9cd9a
DN
9869{
9870 tree fntype = TREE_TYPE (current_function_decl);
5039610b
SL
9871 int nargs = call_expr_nargs (exp);
9872 tree arg;
34c88790
DS
9873 /* There is good chance the current input_location points inside the
9874 definition of the va_start macro (perhaps on the token for
9875 builtin) in a system header, so warnings will not be emitted.
9876 Use the location in real source code. */
9877 source_location current_location =
9878 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9879 NULL);
6de9cd9a 9880
f38958e8 9881 if (!stdarg_p (fntype))
2efcfa4e
AP
9882 {
9883 error ("%<va_start%> used in function with fixed args");
9884 return true;
9885 }
5039610b
SL
9886
9887 if (va_start_p)
8870e212 9888 {
5039610b
SL
9889 if (va_start_p && (nargs != 2))
9890 {
9891 error ("wrong number of arguments to function %<va_start%>");
9892 return true;
9893 }
9894 arg = CALL_EXPR_ARG (exp, 1);
8870e212
JJ
9895 }
9896 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9897 when we checked the arguments and if needed issued a warning. */
5039610b 9898 else
6de9cd9a 9899 {
5039610b
SL
9900 if (nargs == 0)
9901 {
9902 /* Evidently an out of date version of <stdarg.h>; can't validate
9903 va_start's second argument, but can still work as intended. */
34c88790 9904 warning_at (current_location,
b9c8da34
DS
9905 OPT_Wvarargs,
9906 "%<__builtin_next_arg%> called without an argument");
5039610b
SL
9907 return true;
9908 }
9909 else if (nargs > 1)
c22cacf3 9910 {
5039610b 9911 error ("wrong number of arguments to function %<__builtin_next_arg%>");
c22cacf3
MS
9912 return true;
9913 }
5039610b
SL
9914 arg = CALL_EXPR_ARG (exp, 0);
9915 }
9916
4e3825db
MM
9917 if (TREE_CODE (arg) == SSA_NAME)
9918 arg = SSA_NAME_VAR (arg);
9919
5039610b 9920 /* We destructively modify the call to be __builtin_va_start (ap, 0)
b8698a0f 9921 or __builtin_next_arg (0) the first time we see it, after checking
5039610b
SL
9922 the arguments and if needed issuing a warning. */
9923 if (!integer_zerop (arg))
9924 {
9925 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8870e212 9926
6de9cd9a
DN
9927 /* Strip off all nops for the sake of the comparison. This
9928 is not quite the same as STRIP_NOPS. It does more.
9929 We must also strip off INDIRECT_EXPR for C++ reference
9930 parameters. */
1043771b 9931 while (CONVERT_EXPR_P (arg)
6de9cd9a
DN
9932 || TREE_CODE (arg) == INDIRECT_REF)
9933 arg = TREE_OPERAND (arg, 0);
9934 if (arg != last_parm)
c22cacf3 9935 {
118f3b19
KH
9936 /* FIXME: Sometimes with the tree optimizers we can get the
9937 not the last argument even though the user used the last
9938 argument. We just warn and set the arg to be the last
9939 argument so that we will get wrong-code because of
9940 it. */
34c88790 9941 warning_at (current_location,
b9c8da34 9942 OPT_Wvarargs,
34c88790 9943 "second parameter of %<va_start%> not last named argument");
2efcfa4e 9944 }
2985f531
MLI
9945
9946 /* Undefined by C99 7.15.1.4p4 (va_start):
9947 "If the parameter parmN is declared with the register storage
9948 class, with a function or array type, or with a type that is
9949 not compatible with the type that results after application of
9950 the default argument promotions, the behavior is undefined."
9951 */
9952 else if (DECL_REGISTER (arg))
34c88790
DS
9953 {
9954 warning_at (current_location,
b9c8da34 9955 OPT_Wvarargs,
9c582551 9956 "undefined behavior when second parameter of "
34c88790
DS
9957 "%<va_start%> is declared with %<register%> storage");
9958 }
2985f531 9959
8870e212 9960 /* We want to verify the second parameter just once before the tree
c22cacf3
MS
9961 optimizers are run and then avoid keeping it in the tree,
9962 as otherwise we could warn even for correct code like:
9963 void foo (int i, ...)
9964 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
5039610b
SL
9965 if (va_start_p)
9966 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9967 else
9968 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
2efcfa4e
AP
9969 }
9970 return false;
6de9cd9a
DN
9971}
9972
9973
5039610b 9974/* Expand a call EXP to __builtin_object_size. */
10a0d495 9975
9b2b7279 9976static rtx
10a0d495
JJ
9977expand_builtin_object_size (tree exp)
9978{
9979 tree ost;
9980 int object_size_type;
9981 tree fndecl = get_callee_fndecl (exp);
10a0d495 9982
5039610b 9983 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10a0d495 9984 {
0f2c4a8f 9985 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
c94ed7a1 9986 exp, fndecl);
10a0d495
JJ
9987 expand_builtin_trap ();
9988 return const0_rtx;
9989 }
9990
5039610b 9991 ost = CALL_EXPR_ARG (exp, 1);
10a0d495
JJ
9992 STRIP_NOPS (ost);
9993
9994 if (TREE_CODE (ost) != INTEGER_CST
9995 || tree_int_cst_sgn (ost) < 0
9996 || compare_tree_int (ost, 3) > 0)
9997 {
0f2c4a8f 9998 error ("%Klast argument of %qD is not integer constant between 0 and 3",
c94ed7a1 9999 exp, fndecl);
10a0d495
JJ
10000 expand_builtin_trap ();
10001 return const0_rtx;
10002 }
10003
9439e9a1 10004 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
10005
10006 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10007}
10008
10009/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10010 FCODE is the BUILT_IN_* to use.
5039610b 10011 Return NULL_RTX if we failed; the caller should emit a normal call,
10a0d495
JJ
10012 otherwise try to get the result in TARGET, if convenient (and in
10013 mode MODE if that's convenient). */
10014
10015static rtx
ef4bddc2 10016expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10a0d495
JJ
10017 enum built_in_function fcode)
10018{
5039610b 10019 if (!validate_arglist (exp,
10a0d495
JJ
10020 POINTER_TYPE,
10021 fcode == BUILT_IN_MEMSET_CHK
10022 ? INTEGER_TYPE : POINTER_TYPE,
10023 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5039610b 10024 return NULL_RTX;
10a0d495 10025
cc8bea0a
MS
10026 tree dest = CALL_EXPR_ARG (exp, 0);
10027 tree src = CALL_EXPR_ARG (exp, 1);
10028 tree len = CALL_EXPR_ARG (exp, 2);
10029 tree size = CALL_EXPR_ARG (exp, 3);
10a0d495 10030
cc8bea0a
MS
10031 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10032 /*str=*/NULL_TREE, size);
ee92e7ba
MS
10033
10034 if (!tree_fits_uhwi_p (size))
5039610b 10035 return NULL_RTX;
10a0d495 10036
cc269bb6 10037 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10a0d495 10038 {
ee92e7ba
MS
10039 /* Avoid transforming the checking call to an ordinary one when
10040 an overflow has been detected or when the call couldn't be
10041 validated because the size is not constant. */
10042 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10043 return NULL_RTX;
10a0d495 10044
ee92e7ba 10045 tree fn = NULL_TREE;
10a0d495
JJ
10046 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10047 mem{cpy,pcpy,move,set} is available. */
10048 switch (fcode)
10049 {
10050 case BUILT_IN_MEMCPY_CHK:
e79983f4 10051 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10a0d495
JJ
10052 break;
10053 case BUILT_IN_MEMPCPY_CHK:
e79983f4 10054 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10a0d495
JJ
10055 break;
10056 case BUILT_IN_MEMMOVE_CHK:
e79983f4 10057 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10a0d495
JJ
10058 break;
10059 case BUILT_IN_MEMSET_CHK:
e79983f4 10060 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10a0d495
JJ
10061 break;
10062 default:
10063 break;
10064 }
10065
10066 if (! fn)
5039610b 10067 return NULL_RTX;
10a0d495 10068
aa493694 10069 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
44e10129
MM
10070 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10071 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
10072 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10073 }
10074 else if (fcode == BUILT_IN_MEMSET_CHK)
5039610b 10075 return NULL_RTX;
10a0d495
JJ
10076 else
10077 {
0eb77834 10078 unsigned int dest_align = get_pointer_alignment (dest);
10a0d495
JJ
10079
10080 /* If DEST is not a pointer type, call the normal function. */
10081 if (dest_align == 0)
5039610b 10082 return NULL_RTX;
10a0d495
JJ
10083
10084 /* If SRC and DEST are the same (and not volatile), do nothing. */
10085 if (operand_equal_p (src, dest, 0))
10086 {
10087 tree expr;
10088
10089 if (fcode != BUILT_IN_MEMPCPY_CHK)
10090 {
10091 /* Evaluate and ignore LEN in case it has side-effects. */
10092 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10093 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10094 }
10095
5d49b6a7 10096 expr = fold_build_pointer_plus (dest, len);
10a0d495
JJ
10097 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10098 }
10099
10100 /* __memmove_chk special case. */
10101 if (fcode == BUILT_IN_MEMMOVE_CHK)
10102 {
0eb77834 10103 unsigned int src_align = get_pointer_alignment (src);
10a0d495
JJ
10104
10105 if (src_align == 0)
5039610b 10106 return NULL_RTX;
10a0d495
JJ
10107
10108 /* If src is categorized for a readonly section we can use
10109 normal __memcpy_chk. */
10110 if (readonly_data_expr (src))
10111 {
e79983f4 10112 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10a0d495 10113 if (!fn)
5039610b 10114 return NULL_RTX;
aa493694
JJ
10115 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10116 dest, src, len, size);
44e10129
MM
10117 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10118 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
10119 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10120 }
10121 }
5039610b 10122 return NULL_RTX;
10a0d495
JJ
10123 }
10124}
10125
10126/* Emit warning if a buffer overflow is detected at compile time. */
10127
10128static void
10129maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10130{
ee92e7ba
MS
10131 /* The source string. */
10132 tree srcstr = NULL_TREE;
10133 /* The size of the destination object. */
10134 tree objsize = NULL_TREE;
10135 /* The string that is being concatenated with (as in __strcat_chk)
10136 or null if it isn't. */
10137 tree catstr = NULL_TREE;
10138 /* The maximum length of the source sequence in a bounded operation
10139 (such as __strncat_chk) or null if the operation isn't bounded
10140 (such as __strcat_chk). */
cc8bea0a 10141 tree maxread = NULL_TREE;
9c1caf50
MS
10142 /* The exact size of the access (such as in __strncpy_chk). */
10143 tree size = NULL_TREE;
10a0d495
JJ
10144
10145 switch (fcode)
10146 {
10147 case BUILT_IN_STRCPY_CHK:
10148 case BUILT_IN_STPCPY_CHK:
ee92e7ba
MS
10149 srcstr = CALL_EXPR_ARG (exp, 1);
10150 objsize = CALL_EXPR_ARG (exp, 2);
10151 break;
10152
10a0d495 10153 case BUILT_IN_STRCAT_CHK:
ee92e7ba
MS
10154 /* For __strcat_chk the warning will be emitted only if overflowing
10155 by at least strlen (dest) + 1 bytes. */
10156 catstr = CALL_EXPR_ARG (exp, 0);
10157 srcstr = CALL_EXPR_ARG (exp, 1);
10158 objsize = CALL_EXPR_ARG (exp, 2);
10a0d495 10159 break;
ee92e7ba 10160
1c2fc017 10161 case BUILT_IN_STRNCAT_CHK:
ee92e7ba
MS
10162 catstr = CALL_EXPR_ARG (exp, 0);
10163 srcstr = CALL_EXPR_ARG (exp, 1);
cc8bea0a 10164 maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
10165 objsize = CALL_EXPR_ARG (exp, 3);
10166 break;
10167
10a0d495 10168 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 10169 case BUILT_IN_STPNCPY_CHK:
ee92e7ba 10170 srcstr = CALL_EXPR_ARG (exp, 1);
9c1caf50 10171 size = CALL_EXPR_ARG (exp, 2);
ee92e7ba 10172 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495 10173 break;
ee92e7ba 10174
10a0d495
JJ
10175 case BUILT_IN_SNPRINTF_CHK:
10176 case BUILT_IN_VSNPRINTF_CHK:
cc8bea0a 10177 maxread = CALL_EXPR_ARG (exp, 1);
ee92e7ba 10178 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495
JJ
10179 break;
10180 default:
10181 gcc_unreachable ();
10182 }
10183
cc8bea0a 10184 if (catstr && maxread)
10a0d495 10185 {
ee92e7ba
MS
10186 /* Check __strncat_chk. There is no way to determine the length
10187 of the string to which the source string is being appended so
10188 just warn when the length of the source string is not known. */
d9c5a8b9
MS
10189 check_strncat_sizes (exp, objsize);
10190 return;
10a0d495 10191 }
10a0d495 10192
cc8bea0a
MS
10193 /* The destination argument is the first one for all built-ins above. */
10194 tree dst = CALL_EXPR_ARG (exp, 0);
10195
10196 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10a0d495
JJ
10197}
10198
10199/* Emit warning if a buffer overflow is detected at compile time
10200 in __sprintf_chk/__vsprintf_chk calls. */
10201
10202static void
10203maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10204{
451409e4 10205 tree size, len, fmt;
10a0d495 10206 const char *fmt_str;
5039610b 10207 int nargs = call_expr_nargs (exp);
10a0d495
JJ
10208
10209 /* Verify the required arguments in the original call. */
b8698a0f 10210
5039610b 10211 if (nargs < 4)
10a0d495 10212 return;
5039610b
SL
10213 size = CALL_EXPR_ARG (exp, 2);
10214 fmt = CALL_EXPR_ARG (exp, 3);
10a0d495 10215
cc269bb6 10216 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10a0d495
JJ
10217 return;
10218
10219 /* Check whether the format is a literal string constant. */
10220 fmt_str = c_getstr (fmt);
10221 if (fmt_str == NULL)
10222 return;
10223
62e5bf5d 10224 if (!init_target_chars ())
000ba23d
KG
10225 return;
10226
10a0d495 10227 /* If the format doesn't contain % args or %%, we know its size. */
000ba23d 10228 if (strchr (fmt_str, target_percent) == 0)
10a0d495
JJ
10229 len = build_int_cstu (size_type_node, strlen (fmt_str));
10230 /* If the format is "%s" and first ... argument is a string literal,
10231 we know it too. */
5039610b
SL
10232 else if (fcode == BUILT_IN_SPRINTF_CHK
10233 && strcmp (fmt_str, target_percent_s) == 0)
10a0d495
JJ
10234 {
10235 tree arg;
10236
5039610b 10237 if (nargs < 5)
10a0d495 10238 return;
5039610b 10239 arg = CALL_EXPR_ARG (exp, 4);
10a0d495
JJ
10240 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10241 return;
10242
10243 len = c_strlen (arg, 1);
cc269bb6 10244 if (!len || ! tree_fits_uhwi_p (len))
10a0d495
JJ
10245 return;
10246 }
10247 else
10248 return;
10249
ee92e7ba
MS
10250 /* Add one for the terminating nul. */
10251 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
cc8bea0a
MS
10252
10253 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10254 /*maxread=*/NULL_TREE, len, size);
10a0d495
JJ
10255}
10256
f9555f40
JJ
10257/* Emit warning if a free is called with address of a variable. */
10258
10259static void
10260maybe_emit_free_warning (tree exp)
10261{
10262 tree arg = CALL_EXPR_ARG (exp, 0);
10263
10264 STRIP_NOPS (arg);
10265 if (TREE_CODE (arg) != ADDR_EXPR)
10266 return;
10267
10268 arg = get_base_address (TREE_OPERAND (arg, 0));
70f34814 10269 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
f9555f40
JJ
10270 return;
10271
10272 if (SSA_VAR_P (arg))
a3a704a4
MH
10273 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10274 "%Kattempt to free a non-heap object %qD", exp, arg);
f9555f40 10275 else
a3a704a4
MH
10276 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10277 "%Kattempt to free a non-heap object", exp);
f9555f40
JJ
10278}
10279
5039610b
SL
10280/* Fold a call to __builtin_object_size with arguments PTR and OST,
10281 if possible. */
10a0d495 10282
9b2b7279 10283static tree
5039610b 10284fold_builtin_object_size (tree ptr, tree ost)
10a0d495 10285{
88e06841 10286 unsigned HOST_WIDE_INT bytes;
10a0d495
JJ
10287 int object_size_type;
10288
5039610b
SL
10289 if (!validate_arg (ptr, POINTER_TYPE)
10290 || !validate_arg (ost, INTEGER_TYPE))
10291 return NULL_TREE;
10a0d495 10292
10a0d495
JJ
10293 STRIP_NOPS (ost);
10294
10295 if (TREE_CODE (ost) != INTEGER_CST
10296 || tree_int_cst_sgn (ost) < 0
10297 || compare_tree_int (ost, 3) > 0)
5039610b 10298 return NULL_TREE;
10a0d495 10299
9439e9a1 10300 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
10301
10302 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10303 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10304 and (size_t) 0 for types 2 and 3. */
10305 if (TREE_SIDE_EFFECTS (ptr))
2ac7cbb5 10306 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10a0d495
JJ
10307
10308 if (TREE_CODE (ptr) == ADDR_EXPR)
88e06841 10309 {
05a64756 10310 compute_builtin_object_size (ptr, object_size_type, &bytes);
807e902e 10311 if (wi::fits_to_tree_p (bytes, size_type_node))
88e06841
AS
10312 return build_int_cstu (size_type_node, bytes);
10313 }
10a0d495
JJ
10314 else if (TREE_CODE (ptr) == SSA_NAME)
10315 {
10a0d495
JJ
10316 /* If object size is not known yet, delay folding until
10317 later. Maybe subsequent passes will help determining
10318 it. */
05a64756
MS
10319 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10320 && wi::fits_to_tree_p (bytes, size_type_node))
88e06841 10321 return build_int_cstu (size_type_node, bytes);
10a0d495
JJ
10322 }
10323
88e06841 10324 return NULL_TREE;
10a0d495
JJ
10325}
10326
903c723b
TC
10327/* Builtins with folding operations that operate on "..." arguments
10328 need special handling; we need to store the arguments in a convenient
10329 data structure before attempting any folding. Fortunately there are
10330 only a few builtins that fall into this category. FNDECL is the
10331 function, EXP is the CALL_EXPR for the call. */
10332
10333static tree
10334fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10335{
10336 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10337 tree ret = NULL_TREE;
10338
10339 switch (fcode)
10340 {
10341 case BUILT_IN_FPCLASSIFY:
10342 ret = fold_builtin_fpclassify (loc, args, nargs);
10343 break;
10344
10345 default:
10346 break;
10347 }
10348 if (ret)
10349 {
10350 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10351 SET_EXPR_LOCATION (ret, loc);
10352 TREE_NO_WARNING (ret) = 1;
10353 return ret;
10354 }
10355 return NULL_TREE;
10356}
10357
000ba23d
KG
10358/* Initialize format string characters in the target charset. */
10359
fef5a0d9 10360bool
000ba23d
KG
10361init_target_chars (void)
10362{
10363 static bool init;
10364 if (!init)
10365 {
10366 target_newline = lang_hooks.to_target_charset ('\n');
10367 target_percent = lang_hooks.to_target_charset ('%');
10368 target_c = lang_hooks.to_target_charset ('c');
10369 target_s = lang_hooks.to_target_charset ('s');
10370 if (target_newline == 0 || target_percent == 0 || target_c == 0
10371 || target_s == 0)
10372 return false;
10373
10374 target_percent_c[0] = target_percent;
10375 target_percent_c[1] = target_c;
10376 target_percent_c[2] = '\0';
10377
10378 target_percent_s[0] = target_percent;
10379 target_percent_s[1] = target_s;
10380 target_percent_s[2] = '\0';
10381
10382 target_percent_s_newline[0] = target_percent;
10383 target_percent_s_newline[1] = target_s;
10384 target_percent_s_newline[2] = target_newline;
10385 target_percent_s_newline[3] = '\0';
c22cacf3 10386
000ba23d
KG
10387 init = true;
10388 }
10389 return true;
10390}
1f3f1f68 10391
4413d881
KG
10392/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10393 and no overflow/underflow occurred. INEXACT is true if M was not
2f8e468b 10394 exactly calculated. TYPE is the tree type for the result. This
4413d881
KG
10395 function assumes that you cleared the MPFR flags and then
10396 calculated M to see if anything subsequently set a flag prior to
10397 entering this function. Return NULL_TREE if any checks fail. */
10398
10399static tree
62e5bf5d 10400do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
4413d881
KG
10401{
10402 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10403 overflow/underflow occurred. If -frounding-math, proceed iff the
10404 result of calling FUNC was exact. */
62e5bf5d 10405 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
4413d881
KG
10406 && (!flag_rounding_math || !inexact))
10407 {
10408 REAL_VALUE_TYPE rr;
10409
205a4d09 10410 real_from_mpfr (&rr, m, type, GMP_RNDN);
4413d881
KG
10411 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10412 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10413 but the mpft_t is not, then we underflowed in the
10414 conversion. */
4c8c70e0 10415 if (real_isfinite (&rr)
4413d881
KG
10416 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10417 {
10418 REAL_VALUE_TYPE rmode;
10419
10420 real_convert (&rmode, TYPE_MODE (type), &rr);
10421 /* Proceed iff the specified mode can hold the value. */
10422 if (real_identical (&rmode, &rr))
10423 return build_real (type, rmode);
10424 }
10425 }
10426 return NULL_TREE;
10427}
10428
c128599a
KG
10429/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10430 number and no overflow/underflow occurred. INEXACT is true if M
10431 was not exactly calculated. TYPE is the tree type for the result.
10432 This function assumes that you cleared the MPFR flags and then
10433 calculated M to see if anything subsequently set a flag prior to
ca75b926
KG
10434 entering this function. Return NULL_TREE if any checks fail, if
10435 FORCE_CONVERT is true, then bypass the checks. */
c128599a
KG
10436
10437static tree
ca75b926 10438do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
c128599a
KG
10439{
10440 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10441 overflow/underflow occurred. If -frounding-math, proceed iff the
10442 result of calling FUNC was exact. */
ca75b926
KG
10443 if (force_convert
10444 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10445 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10446 && (!flag_rounding_math || !inexact)))
c128599a
KG
10447 {
10448 REAL_VALUE_TYPE re, im;
10449
14aa6352
DE
10450 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10451 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
c128599a
KG
10452 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10453 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10454 but the mpft_t is not, then we underflowed in the
10455 conversion. */
ca75b926
KG
10456 if (force_convert
10457 || (real_isfinite (&re) && real_isfinite (&im)
10458 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10459 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
c128599a
KG
10460 {
10461 REAL_VALUE_TYPE re_mode, im_mode;
10462
10463 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10464 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10465 /* Proceed iff the specified mode can hold the value. */
ca75b926
KG
10466 if (force_convert
10467 || (real_identical (&re_mode, &re)
10468 && real_identical (&im_mode, &im)))
c128599a
KG
10469 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10470 build_real (TREE_TYPE (type), im_mode));
10471 }
10472 }
10473 return NULL_TREE;
10474}
c128599a 10475
ea91f957
KG
10476/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10477 the pointer *(ARG_QUO) and return the result. The type is taken
10478 from the type of ARG0 and is used for setting the precision of the
10479 calculation and results. */
10480
10481static tree
10482do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10483{
10484 tree const type = TREE_TYPE (arg0);
10485 tree result = NULL_TREE;
b8698a0f 10486
ea91f957
KG
10487 STRIP_NOPS (arg0);
10488 STRIP_NOPS (arg1);
b8698a0f 10489
ea91f957
KG
10490 /* To proceed, MPFR must exactly represent the target floating point
10491 format, which only happens when the target base equals two. */
10492 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10493 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10494 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10495 {
10496 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10497 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10498
4c8c70e0 10499 if (real_isfinite (ra0) && real_isfinite (ra1))
ea91f957 10500 {
3e479de3
UW
10501 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10502 const int prec = fmt->p;
10503 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
ea91f957
KG
10504 tree result_rem;
10505 long integer_quo;
10506 mpfr_t m0, m1;
10507
10508 mpfr_inits2 (prec, m0, m1, NULL);
10509 mpfr_from_real (m0, ra0, GMP_RNDN);
10510 mpfr_from_real (m1, ra1, GMP_RNDN);
10511 mpfr_clear_flags ();
3e479de3 10512 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
ea91f957
KG
10513 /* Remquo is independent of the rounding mode, so pass
10514 inexact=0 to do_mpfr_ckconv(). */
10515 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10516 mpfr_clears (m0, m1, NULL);
10517 if (result_rem)
10518 {
10519 /* MPFR calculates quo in the host's long so it may
10520 return more bits in quo than the target int can hold
10521 if sizeof(host long) > sizeof(target int). This can
10522 happen even for native compilers in LP64 mode. In
10523 these cases, modulo the quo value with the largest
10524 number that the target int can hold while leaving one
10525 bit for the sign. */
10526 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10527 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10528
10529 /* Dereference the quo pointer argument. */
10530 arg_quo = build_fold_indirect_ref (arg_quo);
10531 /* Proceed iff a valid pointer type was passed in. */
10532 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10533 {
10534 /* Set the value. */
45a2c477
RG
10535 tree result_quo
10536 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10537 build_int_cst (TREE_TYPE (arg_quo),
10538 integer_quo));
ea91f957
KG
10539 TREE_SIDE_EFFECTS (result_quo) = 1;
10540 /* Combine the quo assignment with the rem. */
10541 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10542 result_quo, result_rem));
10543 }
10544 }
10545 }
10546 }
10547 return result;
10548}
752b7d38
KG
10549
10550/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10551 resulting value as a tree with type TYPE. The mpfr precision is
10552 set to the precision of TYPE. We assume that this mpfr function
10553 returns zero if the result could be calculated exactly within the
10554 requested precision. In addition, the integer pointer represented
10555 by ARG_SG will be dereferenced and set to the appropriate signgam
10556 (-1,1) value. */
10557
10558static tree
10559do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10560{
10561 tree result = NULL_TREE;
10562
10563 STRIP_NOPS (arg);
b8698a0f 10564
752b7d38
KG
10565 /* To proceed, MPFR must exactly represent the target floating point
10566 format, which only happens when the target base equals two. Also
10567 verify ARG is a constant and that ARG_SG is an int pointer. */
10568 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10569 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10570 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10571 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10572 {
10573 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10574
10575 /* In addition to NaN and Inf, the argument cannot be zero or a
10576 negative integer. */
4c8c70e0 10577 if (real_isfinite (ra)
752b7d38 10578 && ra->cl != rvc_zero
c3284718 10579 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
752b7d38 10580 {
3e479de3
UW
10581 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10582 const int prec = fmt->p;
10583 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
752b7d38
KG
10584 int inexact, sg;
10585 mpfr_t m;
10586 tree result_lg;
10587
10588 mpfr_init2 (m, prec);
10589 mpfr_from_real (m, ra, GMP_RNDN);
10590 mpfr_clear_flags ();
3e479de3 10591 inexact = mpfr_lgamma (m, &sg, m, rnd);
752b7d38
KG
10592 result_lg = do_mpfr_ckconv (m, type, inexact);
10593 mpfr_clear (m);
10594 if (result_lg)
10595 {
10596 tree result_sg;
10597
10598 /* Dereference the arg_sg pointer argument. */
10599 arg_sg = build_fold_indirect_ref (arg_sg);
10600 /* Assign the signgam value into *arg_sg. */
10601 result_sg = fold_build2 (MODIFY_EXPR,
10602 TREE_TYPE (arg_sg), arg_sg,
45a2c477 10603 build_int_cst (TREE_TYPE (arg_sg), sg));
752b7d38
KG
10604 TREE_SIDE_EFFECTS (result_sg) = 1;
10605 /* Combine the signgam assignment with the lgamma result. */
10606 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10607 result_sg, result_lg));
10608 }
10609 }
10610 }
10611
10612 return result;
10613}
726a989a 10614
a41d064d
KG
10615/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10616 mpc function FUNC on it and return the resulting value as a tree
10617 with type TYPE. The mpfr precision is set to the precision of
10618 TYPE. We assume that function FUNC returns zero if the result
ca75b926
KG
10619 could be calculated exactly within the requested precision. If
10620 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10621 in the arguments and/or results. */
a41d064d 10622
2f440f6a 10623tree
ca75b926 10624do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
a41d064d
KG
10625 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10626{
10627 tree result = NULL_TREE;
b8698a0f 10628
a41d064d
KG
10629 STRIP_NOPS (arg0);
10630 STRIP_NOPS (arg1);
10631
10632 /* To proceed, MPFR must exactly represent the target floating point
10633 format, which only happens when the target base equals two. */
10634 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10635 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10636 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10637 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10638 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10639 {
10640 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10641 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10642 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10643 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10644
ca75b926
KG
10645 if (do_nonfinite
10646 || (real_isfinite (re0) && real_isfinite (im0)
10647 && real_isfinite (re1) && real_isfinite (im1)))
a41d064d
KG
10648 {
10649 const struct real_format *const fmt =
10650 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10651 const int prec = fmt->p;
10652 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10653 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10654 int inexact;
10655 mpc_t m0, m1;
b8698a0f 10656
a41d064d
KG
10657 mpc_init2 (m0, prec);
10658 mpc_init2 (m1, prec);
c3284718
RS
10659 mpfr_from_real (mpc_realref (m0), re0, rnd);
10660 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10661 mpfr_from_real (mpc_realref (m1), re1, rnd);
10662 mpfr_from_real (mpc_imagref (m1), im1, rnd);
a41d064d
KG
10663 mpfr_clear_flags ();
10664 inexact = func (m0, m0, m1, crnd);
ca75b926 10665 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
a41d064d
KG
10666 mpc_clear (m0);
10667 mpc_clear (m1);
10668 }
10669 }
10670
10671 return result;
10672}
c128599a 10673
726a989a
RB
10674/* A wrapper function for builtin folding that prevents warnings for
10675 "statement without effect" and the like, caused by removing the
10676 call node earlier than the warning is generated. */
10677
10678tree
538dd0b7 10679fold_call_stmt (gcall *stmt, bool ignore)
726a989a
RB
10680{
10681 tree ret = NULL_TREE;
10682 tree fndecl = gimple_call_fndecl (stmt);
db3927fb 10683 location_t loc = gimple_location (stmt);
726a989a
RB
10684 if (fndecl
10685 && TREE_CODE (fndecl) == FUNCTION_DECL
10686 && DECL_BUILT_IN (fndecl)
10687 && !gimple_call_va_arg_pack_p (stmt))
10688 {
10689 int nargs = gimple_call_num_args (stmt);
8897c9ce
NF
10690 tree *args = (nargs > 0
10691 ? gimple_call_arg_ptr (stmt, 0)
10692 : &error_mark_node);
726a989a 10693
0889e9bc
JJ
10694 if (avoid_folding_inline_builtin (fndecl))
10695 return NULL_TREE;
726a989a
RB
10696 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10697 {
8897c9ce 10698 return targetm.fold_builtin (fndecl, nargs, args, ignore);
726a989a
RB
10699 }
10700 else
10701 {
a6a0570f 10702 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
726a989a
RB
10703 if (ret)
10704 {
10705 /* Propagate location information from original call to
10706 expansion of builtin. Otherwise things like
10707 maybe_emit_chk_warning, that operate on the expansion
10708 of a builtin, will use the wrong location information. */
10709 if (gimple_has_location (stmt))
10710 {
10711 tree realret = ret;
10712 if (TREE_CODE (ret) == NOP_EXPR)
10713 realret = TREE_OPERAND (ret, 0);
10714 if (CAN_HAVE_LOCATION_P (realret)
10715 && !EXPR_HAS_LOCATION (realret))
db3927fb 10716 SET_EXPR_LOCATION (realret, loc);
726a989a
RB
10717 return realret;
10718 }
10719 return ret;
10720 }
10721 }
10722 }
10723 return NULL_TREE;
10724}
d7f09764 10725
e79983f4 10726/* Look up the function in builtin_decl that corresponds to DECL
d7f09764
DN
10727 and set ASMSPEC as its user assembler name. DECL must be a
10728 function decl that declares a builtin. */
10729
10730void
10731set_builtin_user_assembler_name (tree decl, const char *asmspec)
10732{
d7f09764
DN
10733 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10734 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10735 && asmspec != 0);
10736
ee516de9 10737 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
ce835863 10738 set_user_assembler_name (builtin, asmspec);
ee516de9
EB
10739
10740 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10741 && INT_TYPE_SIZE < BITS_PER_WORD)
d7f09764 10742 {
fffbab82 10743 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
ee516de9 10744 set_user_assembler_libfunc ("ffs", asmspec);
fffbab82 10745 set_optab_libfunc (ffs_optab, mode, "ffs");
d7f09764
DN
10746 }
10747}
bec922f0
SL
10748
10749/* Return true if DECL is a builtin that expands to a constant or similarly
10750 simple code. */
10751bool
10752is_simple_builtin (tree decl)
10753{
10754 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10755 switch (DECL_FUNCTION_CODE (decl))
10756 {
10757 /* Builtins that expand to constants. */
10758 case BUILT_IN_CONSTANT_P:
10759 case BUILT_IN_EXPECT:
10760 case BUILT_IN_OBJECT_SIZE:
10761 case BUILT_IN_UNREACHABLE:
10762 /* Simple register moves or loads from stack. */
45d439ac 10763 case BUILT_IN_ASSUME_ALIGNED:
bec922f0
SL
10764 case BUILT_IN_RETURN_ADDRESS:
10765 case BUILT_IN_EXTRACT_RETURN_ADDR:
10766 case BUILT_IN_FROB_RETURN_ADDR:
10767 case BUILT_IN_RETURN:
10768 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10769 case BUILT_IN_FRAME_ADDRESS:
10770 case BUILT_IN_VA_END:
10771 case BUILT_IN_STACK_SAVE:
10772 case BUILT_IN_STACK_RESTORE:
10773 /* Exception state returns or moves registers around. */
10774 case BUILT_IN_EH_FILTER:
10775 case BUILT_IN_EH_POINTER:
10776 case BUILT_IN_EH_COPY_VALUES:
10777 return true;
10778
10779 default:
10780 return false;
10781 }
10782
10783 return false;
10784}
10785
10786/* Return true if DECL is a builtin that is not expensive, i.e., they are
10787 most probably expanded inline into reasonably simple code. This is a
10788 superset of is_simple_builtin. */
10789bool
10790is_inexpensive_builtin (tree decl)
10791{
10792 if (!decl)
10793 return false;
10794 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10795 return true;
10796 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10797 switch (DECL_FUNCTION_CODE (decl))
10798 {
10799 case BUILT_IN_ABS:
9e878cf1 10800 CASE_BUILT_IN_ALLOCA:
ac868f29 10801 case BUILT_IN_BSWAP16:
bec922f0
SL
10802 case BUILT_IN_BSWAP32:
10803 case BUILT_IN_BSWAP64:
10804 case BUILT_IN_CLZ:
10805 case BUILT_IN_CLZIMAX:
10806 case BUILT_IN_CLZL:
10807 case BUILT_IN_CLZLL:
10808 case BUILT_IN_CTZ:
10809 case BUILT_IN_CTZIMAX:
10810 case BUILT_IN_CTZL:
10811 case BUILT_IN_CTZLL:
10812 case BUILT_IN_FFS:
10813 case BUILT_IN_FFSIMAX:
10814 case BUILT_IN_FFSL:
10815 case BUILT_IN_FFSLL:
10816 case BUILT_IN_IMAXABS:
10817 case BUILT_IN_FINITE:
10818 case BUILT_IN_FINITEF:
10819 case BUILT_IN_FINITEL:
10820 case BUILT_IN_FINITED32:
10821 case BUILT_IN_FINITED64:
10822 case BUILT_IN_FINITED128:
10823 case BUILT_IN_FPCLASSIFY:
10824 case BUILT_IN_ISFINITE:
10825 case BUILT_IN_ISINF_SIGN:
10826 case BUILT_IN_ISINF:
10827 case BUILT_IN_ISINFF:
10828 case BUILT_IN_ISINFL:
10829 case BUILT_IN_ISINFD32:
10830 case BUILT_IN_ISINFD64:
10831 case BUILT_IN_ISINFD128:
10832 case BUILT_IN_ISNAN:
10833 case BUILT_IN_ISNANF:
10834 case BUILT_IN_ISNANL:
10835 case BUILT_IN_ISNAND32:
10836 case BUILT_IN_ISNAND64:
10837 case BUILT_IN_ISNAND128:
10838 case BUILT_IN_ISNORMAL:
10839 case BUILT_IN_ISGREATER:
10840 case BUILT_IN_ISGREATEREQUAL:
10841 case BUILT_IN_ISLESS:
10842 case BUILT_IN_ISLESSEQUAL:
10843 case BUILT_IN_ISLESSGREATER:
10844 case BUILT_IN_ISUNORDERED:
10845 case BUILT_IN_VA_ARG_PACK:
10846 case BUILT_IN_VA_ARG_PACK_LEN:
10847 case BUILT_IN_VA_COPY:
10848 case BUILT_IN_TRAP:
10849 case BUILT_IN_SAVEREGS:
10850 case BUILT_IN_POPCOUNTL:
10851 case BUILT_IN_POPCOUNTLL:
10852 case BUILT_IN_POPCOUNTIMAX:
10853 case BUILT_IN_POPCOUNT:
10854 case BUILT_IN_PARITYL:
10855 case BUILT_IN_PARITYLL:
10856 case BUILT_IN_PARITYIMAX:
10857 case BUILT_IN_PARITY:
10858 case BUILT_IN_LABS:
10859 case BUILT_IN_LLABS:
10860 case BUILT_IN_PREFETCH:
41dbbb37 10861 case BUILT_IN_ACC_ON_DEVICE:
bec922f0
SL
10862 return true;
10863
10864 default:
10865 return is_simple_builtin (decl);
10866 }
10867
10868 return false;
10869}
488c6247
ML
10870
10871/* Return true if T is a constant and the value cast to a target char
10872 can be represented by a host char.
10873 Store the casted char constant in *P if so. */
10874
10875bool
10876target_char_cst_p (tree t, char *p)
10877{
10878 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10879 return false;
10880
10881 *p = (char)tree_to_uhwi (t);
10882 return true;
10883}
cc8bea0a
MS
10884
10885/* Return the maximum object size. */
10886
10887tree
10888max_object_size (void)
10889{
10890 /* To do: Make this a configurable parameter. */
10891 return TYPE_MAX_VALUE (ptrdiff_type_node);
10892}