]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
Come up with fndecl_built_in_p.
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
28f4ec01 1/* Expand builtin functions.
85ec4feb 2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
28f4ec01 3
1322177d 4This file is part of GCC.
28f4ec01 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
28f4ec01 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
28f4ec01
BS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
28f4ec01 19
25ab3b0a
RB
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
28f4ec01
BS
24#include "config.h"
25#include "system.h"
4977bab6 26#include "coretypes.h"
c7131fb2 27#include "backend.h"
957060b5
AM
28#include "target.h"
29#include "rtl.h"
c7131fb2 30#include "tree.h"
e73cf9a2 31#include "memmodel.h"
c7131fb2 32#include "gimple.h"
957060b5 33#include "predict.h"
b2272b13 34#include "params.h"
957060b5
AM
35#include "tm_p.h"
36#include "stringpool.h"
f90aa46c 37#include "tree-vrp.h"
957060b5
AM
38#include "tree-ssanames.h"
39#include "expmed.h"
40#include "optabs.h"
957060b5
AM
41#include "emit-rtl.h"
42#include "recog.h"
957060b5 43#include "diagnostic-core.h"
40e23961 44#include "alias.h"
40e23961 45#include "fold-const.h"
5c1a2e63 46#include "fold-const-call.h"
cc8bea0a 47#include "gimple-ssa-warn-restrict.h"
d8a2d370
DN
48#include "stor-layout.h"
49#include "calls.h"
50#include "varasm.h"
51#include "tree-object-size.h"
d49b6e1e 52#include "realmpfr.h"
60393bbc 53#include "cfgrtl.h"
28f4ec01 54#include "except.h"
36566b39
PK
55#include "dojump.h"
56#include "explow.h"
36566b39 57#include "stmt.h"
28f4ec01 58#include "expr.h"
e78d8e51 59#include "libfuncs.h"
28f4ec01
BS
60#include "output.h"
61#include "typeclass.h"
ab393bf1 62#include "langhooks.h"
079a182e 63#include "value-prof.h"
fa19795e 64#include "builtins.h"
314e6352
ML
65#include "stringpool.h"
66#include "attribs.h"
bdea98ca 67#include "asan.h"
686ee971 68#include "internal-fn.h"
b03ff92e 69#include "case-cfn-macros.h"
44a845ca 70#include "gimple-fold.h"
ee92e7ba 71#include "intl.h"
7365279f 72#include "file-prefix-map.h" /* remap_macro_filename() */
1f62d637
TV
73#include "gomp-constants.h"
74#include "omp-general.h"
81f5094d 75
fa19795e
RS
76struct target_builtins default_target_builtins;
77#if SWITCHABLE_TARGET
78struct target_builtins *this_target_builtins = &default_target_builtins;
79#endif
80
9df2c88c 81/* Define the names of the builtin function types and codes. */
5e351e96 82const char *const built_in_class_names[BUILT_IN_LAST]
9df2c88c
RK
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84
c6a912da 85#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
7e7e470f 86const char * built_in_names[(int) END_BUILTINS] =
cb1072f4
KG
87{
88#include "builtins.def"
89};
9df2c88c 90
cbf5d0e7 91/* Setup an array of builtin_info_type, make sure each element decl is
3ff5f682 92 initialized to NULL_TREE. */
cbf5d0e7 93builtin_info_type builtin_info[(int)END_BUILTINS];
3ff5f682 94
4e7d7b3d
JJ
95/* Non-zero if __builtin_constant_p should be folded right away. */
96bool force_folding_builtin_constant_p;
97
095a2d76 98static rtx c_readstr (const char *, scalar_int_mode);
4682ae04 99static int target_char_cast (tree, char *);
435bb2a1 100static rtx get_memory_rtx (tree, tree);
4682ae04
AJ
101static int apply_args_size (void);
102static int apply_result_size (void);
4682ae04 103static rtx result_vector (int, rtx);
4682ae04
AJ
104static void expand_builtin_prefetch (tree);
105static rtx expand_builtin_apply_args (void);
106static rtx expand_builtin_apply_args_1 (void);
107static rtx expand_builtin_apply (rtx, rtx, rtx);
108static void expand_builtin_return (rtx);
109static enum type_class type_to_class (tree);
110static rtx expand_builtin_classify_type (tree);
6c7cf1f0 111static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
1b1562a5 112static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
4359dc2a 113static rtx expand_builtin_interclass_mathfn (tree, rtx);
403e54f0 114static rtx expand_builtin_sincos (tree);
4359dc2a 115static rtx expand_builtin_cexpi (tree, rtx);
1856c8dc
JH
116static rtx expand_builtin_int_roundingfn (tree, rtx);
117static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
8870e212 118static rtx expand_builtin_next_arg (void);
4682ae04
AJ
119static rtx expand_builtin_va_start (tree);
120static rtx expand_builtin_va_end (tree);
121static rtx expand_builtin_va_copy (tree);
523a59ff 122static rtx inline_expand_builtin_string_cmp (tree, rtx);
44e10129 123static rtx expand_builtin_strcmp (tree, rtx);
ef4bddc2 124static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
095a2d76 125static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
d9c5a8b9 126static rtx expand_builtin_memchr (tree, rtx);
44e10129 127static rtx expand_builtin_memcpy (tree, rtx);
671a00ee
ML
128static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
e50d56a5 130static rtx expand_builtin_memmove (tree, rtx);
671a00ee 131static rtx expand_builtin_mempcpy (tree, rtx);
671a00ee 132static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
ee92e7ba 133static rtx expand_builtin_strcat (tree, rtx);
44e10129
MM
134static rtx expand_builtin_strcpy (tree, rtx);
135static rtx expand_builtin_strcpy_args (tree, tree, rtx);
ef4bddc2 136static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
e50d56a5 137static rtx expand_builtin_stpncpy (tree, rtx);
ee92e7ba 138static rtx expand_builtin_strncat (tree, rtx);
44e10129 139static rtx expand_builtin_strncpy (tree, rtx);
095a2d76 140static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
ef4bddc2
RS
141static rtx expand_builtin_memset (tree, rtx, machine_mode);
142static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
4682ae04 143static rtx expand_builtin_bzero (tree);
ef4bddc2 144static rtx expand_builtin_strlen (tree, rtx, machine_mode);
781ff3d8 145static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
b7e52782 146static rtx expand_builtin_alloca (tree);
ef4bddc2 147static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
4682ae04 148static rtx expand_builtin_frame_address (tree, tree);
db3927fb 149static tree stabilize_va_list_loc (location_t, tree, int);
4682ae04 150static rtx expand_builtin_expect (tree, rtx);
1e9168b2 151static rtx expand_builtin_expect_with_probability (tree, rtx);
4682ae04
AJ
152static tree fold_builtin_constant_p (tree);
153static tree fold_builtin_classify_type (tree);
ab996409 154static tree fold_builtin_strlen (location_t, tree, tree);
db3927fb 155static tree fold_builtin_inf (location_t, tree, int);
db3927fb 156static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
0dba7960 157static bool validate_arg (const_tree, enum tree_code code);
4682ae04 158static rtx expand_builtin_fabs (tree, rtx, rtx);
ef79730c 159static rtx expand_builtin_signbit (tree, rtx);
db3927fb 160static tree fold_builtin_memcmp (location_t, tree, tree, tree);
db3927fb
AH
161static tree fold_builtin_isascii (location_t, tree);
162static tree fold_builtin_toascii (location_t, tree);
163static tree fold_builtin_isdigit (location_t, tree);
164static tree fold_builtin_fabs (location_t, tree, tree);
165static tree fold_builtin_abs (location_t, tree, tree);
166static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
a35da91f 167 enum tree_code);
2625bb5d
RB
168static tree fold_builtin_0 (location_t, tree);
169static tree fold_builtin_1 (location_t, tree, tree);
170static tree fold_builtin_2 (location_t, tree, tree, tree);
171static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
903c723b 172static tree fold_builtin_varargs (location_t, tree, tree*, int);
db3927fb
AH
173
174static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
db3927fb
AH
175static tree fold_builtin_strspn (location_t, tree, tree);
176static tree fold_builtin_strcspn (location_t, tree, tree);
6de9cd9a 177
10a0d495 178static rtx expand_builtin_object_size (tree);
ef4bddc2 179static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
10a0d495
JJ
180 enum built_in_function);
181static void maybe_emit_chk_warning (tree, enum built_in_function);
182static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
f9555f40 183static void maybe_emit_free_warning (tree);
5039610b 184static tree fold_builtin_object_size (tree, tree);
000ba23d 185
ad03a744 186unsigned HOST_WIDE_INT target_newline;
fef5a0d9 187unsigned HOST_WIDE_INT target_percent;
000ba23d
KG
188static unsigned HOST_WIDE_INT target_c;
189static unsigned HOST_WIDE_INT target_s;
edd7ae68 190char target_percent_c[3];
fef5a0d9 191char target_percent_s[3];
ad03a744 192char target_percent_s_newline[4];
ea91f957 193static tree do_mpfr_remquo (tree, tree, tree);
752b7d38 194static tree do_mpfr_lgamma_r (tree, tree, tree);
86951993 195static void expand_builtin_sync_synchronize (void);
10a0d495 196
d7f09764
DN
197/* Return true if NAME starts with __builtin_ or __sync_. */
198
0c1e7e42 199static bool
bbf7ce11 200is_builtin_name (const char *name)
48ae6c13 201{
48ae6c13
RH
202 if (strncmp (name, "__builtin_", 10) == 0)
203 return true;
204 if (strncmp (name, "__sync_", 7) == 0)
205 return true;
86951993
AM
206 if (strncmp (name, "__atomic_", 9) == 0)
207 return true;
48ae6c13
RH
208 return false;
209}
6de9cd9a 210
bbf7ce11
RAE
211/* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
214
4cfe7a6c 215bool
bbf7ce11
RAE
216called_as_built_in (tree node)
217{
218 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219 we want the name used to call the function, not the name it
220 will have. */
221 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222 return is_builtin_name (name);
223}
224
644ffefd
MJ
225/* Compute values M and N such that M divides (address of EXP - N) and such
226 that N < M. If these numbers can be determined, store M in alignp and N in
227 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
228 *alignp and any bit-offset to *bitposp.
73f6eabc
RS
229
230 Note that the address (and thus the alignment) computed here is based
231 on the address to which a symbol resolves, whereas DECL_ALIGN is based
232 on the address at which an object is actually located. These two
233 addresses are not always the same. For example, on ARM targets,
234 the address &foo of a Thumb function foo() has the lowest bit set,
b0f4a35f 235 whereas foo() itself starts on an even address.
df96b059 236
b0f4a35f
RG
237 If ADDR_P is true we are taking the address of the memory reference EXP
238 and thus cannot rely on the access taking place. */
239
240static bool
241get_object_alignment_2 (tree exp, unsigned int *alignp,
242 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
df96b059 243{
7df9b6f1 244 poly_int64 bitsize, bitpos;
e80c2726 245 tree offset;
ef4bddc2 246 machine_mode mode;
ee45a32d 247 int unsignedp, reversep, volatilep;
eae76e53 248 unsigned int align = BITS_PER_UNIT;
644ffefd 249 bool known_alignment = false;
df96b059 250
e80c2726
RG
251 /* Get the innermost object and the constant (bitpos) and possibly
252 variable (offset) offset of the access. */
ee45a32d 253 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
25b75a48 254 &unsignedp, &reversep, &volatilep);
e80c2726
RG
255
256 /* Extract alignment information from the innermost object and
257 possibly adjust bitpos and offset. */
b0f4a35f 258 if (TREE_CODE (exp) == FUNCTION_DECL)
73f6eabc 259 {
b0f4a35f
RG
260 /* Function addresses can encode extra information besides their
261 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 allows the low bit to be used as a virtual bit, we know
263 that the address itself must be at least 2-byte aligned. */
264 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 align = 2 * BITS_PER_UNIT;
73f6eabc 266 }
b0f4a35f
RG
267 else if (TREE_CODE (exp) == LABEL_DECL)
268 ;
269 else if (TREE_CODE (exp) == CONST_DECL)
e80c2726 270 {
b0f4a35f
RG
271 /* The alignment of a CONST_DECL is determined by its initializer. */
272 exp = DECL_INITIAL (exp);
e80c2726 273 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 274 if (CONSTANT_CLASS_P (exp))
58e17cf8 275 align = targetm.constant_alignment (exp, align);
6b00e42d 276
b0f4a35f 277 known_alignment = true;
e80c2726 278 }
b0f4a35f 279 else if (DECL_P (exp))
644ffefd 280 {
b0f4a35f 281 align = DECL_ALIGN (exp);
644ffefd 282 known_alignment = true;
644ffefd 283 }
b0f4a35f
RG
284 else if (TREE_CODE (exp) == INDIRECT_REF
285 || TREE_CODE (exp) == MEM_REF
286 || TREE_CODE (exp) == TARGET_MEM_REF)
e80c2726
RG
287 {
288 tree addr = TREE_OPERAND (exp, 0);
644ffefd
MJ
289 unsigned ptr_align;
290 unsigned HOST_WIDE_INT ptr_bitpos;
4ceae7e9 291 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
644ffefd 292
4ceae7e9 293 /* If the address is explicitely aligned, handle that. */
e80c2726
RG
294 if (TREE_CODE (addr) == BIT_AND_EXPR
295 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
296 {
4ceae7e9
RB
297 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 ptr_bitmask *= BITS_PER_UNIT;
146ec50f 299 align = least_bit_hwi (ptr_bitmask);
e80c2726
RG
300 addr = TREE_OPERAND (addr, 0);
301 }
644ffefd 302
b0f4a35f
RG
303 known_alignment
304 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
b0f4a35f
RG
305 align = MAX (ptr_align, align);
306
4ceae7e9
RB
307 /* Re-apply explicit alignment to the bitpos. */
308 ptr_bitpos &= ptr_bitmask;
309
3c82efd9
RG
310 /* The alignment of the pointer operand in a TARGET_MEM_REF
311 has to take the variable offset parts into account. */
b0f4a35f 312 if (TREE_CODE (exp) == TARGET_MEM_REF)
1be38ccb 313 {
b0f4a35f
RG
314 if (TMR_INDEX (exp))
315 {
316 unsigned HOST_WIDE_INT step = 1;
317 if (TMR_STEP (exp))
318 step = TREE_INT_CST_LOW (TMR_STEP (exp));
146ec50f 319 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
b0f4a35f
RG
320 }
321 if (TMR_INDEX2 (exp))
322 align = BITS_PER_UNIT;
323 known_alignment = false;
1be38ccb 324 }
644ffefd 325
b0f4a35f
RG
326 /* When EXP is an actual memory reference then we can use
327 TYPE_ALIGN of a pointer indirection to derive alignment.
328 Do so only if get_pointer_alignment_1 did not reveal absolute
3c82efd9
RG
329 alignment knowledge and if using that alignment would
330 improve the situation. */
a4cf4b64 331 unsigned int talign;
3c82efd9 332 if (!addr_p && !known_alignment
a4cf4b64
RB
333 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 && talign > align)
335 align = talign;
3c82efd9
RG
336 else
337 {
338 /* Else adjust bitpos accordingly. */
339 bitpos += ptr_bitpos;
340 if (TREE_CODE (exp) == MEM_REF
341 || TREE_CODE (exp) == TARGET_MEM_REF)
aca52e6f 342 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
3c82efd9 343 }
e80c2726 344 }
b0f4a35f 345 else if (TREE_CODE (exp) == STRING_CST)
1be38ccb 346 {
b0f4a35f
RG
347 /* STRING_CST are the only constant objects we allow to be not
348 wrapped inside a CONST_DECL. */
349 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 350 if (CONSTANT_CLASS_P (exp))
58e17cf8 351 align = targetm.constant_alignment (exp, align);
6b00e42d 352
b0f4a35f 353 known_alignment = true;
e80c2726 354 }
e80c2726
RG
355
356 /* If there is a non-constant offset part extract the maximum
357 alignment that can prevail. */
eae76e53 358 if (offset)
e80c2726 359 {
e75fde1a 360 unsigned int trailing_zeros = tree_ctz (offset);
eae76e53 361 if (trailing_zeros < HOST_BITS_PER_INT)
e80c2726 362 {
eae76e53
JJ
363 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 if (inner)
365 align = MIN (align, inner);
e80c2726 366 }
e80c2726
RG
367 }
368
7df9b6f1
RS
369 /* Account for the alignment of runtime coefficients, so that the constant
370 bitpos is guaranteed to be accurate. */
371 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372 if (alt_align != 0 && alt_align < align)
373 {
374 align = alt_align;
375 known_alignment = false;
376 }
377
b0f4a35f 378 *alignp = align;
7df9b6f1 379 *bitposp = bitpos.coeffs[0] & (align - 1);
644ffefd 380 return known_alignment;
daade206
RG
381}
382
b0f4a35f
RG
383/* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
387
388bool
389get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
391{
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
393}
394
0eb77834 395/* Return the alignment in bits of EXP, an object. */
daade206
RG
396
397unsigned int
0eb77834 398get_object_alignment (tree exp)
daade206
RG
399{
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
402
644ffefd 403 get_object_alignment_1 (exp, &align, &bitpos);
daade206 404
e80c2726
RG
405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
407
408 if (bitpos != 0)
146ec50f 409 align = least_bit_hwi (bitpos);
0eb77834 410 return align;
df96b059
JJ
411}
412
644ffefd
MJ
413/* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
b0f4a35f
RG
415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
28f4ec01 417
644ffefd 418 If EXP is not a pointer, false is returned too. */
28f4ec01 419
644ffefd
MJ
420bool
421get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
28f4ec01 423{
1be38ccb 424 STRIP_NOPS (exp);
6026b73e 425
1be38ccb 426 if (TREE_CODE (exp) == ADDR_EXPR)
b0f4a35f
RG
427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
5fa79de8
RB
429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430 {
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
438 {
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
441 {
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
445 }
446 }
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
450 }
1be38ccb
RG
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
28f4ec01 453 {
644ffefd 454 unsigned int ptr_align, ptr_misalign;
1be38ccb 455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
644ffefd
MJ
456
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 {
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
5505978a
RB
461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
b0f4a35f 465 /* We cannot really tell whether this result is an approximation. */
5f9a167b 466 return false;
644ffefd
MJ
467 }
468 else
87c0fb4b
RG
469 {
470 *bitposp = 0;
644ffefd
MJ
471 *alignp = BITS_PER_UNIT;
472 return false;
87c0fb4b 473 }
28f4ec01 474 }
44fabee4
RG
475 else if (TREE_CODE (exp) == INTEGER_CST)
476 {
477 *alignp = BIGGEST_ALIGNMENT;
478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
481 }
1be38ccb 482
87c0fb4b 483 *bitposp = 0;
644ffefd
MJ
484 *alignp = BITS_PER_UNIT;
485 return false;
28f4ec01
BS
486}
487
87c0fb4b
RG
488/* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
491
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
494
495unsigned int
496get_pointer_alignment (tree exp)
497{
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
644ffefd
MJ
500
501 get_pointer_alignment_1 (exp, &align, &bitpos);
87c0fb4b
RG
502
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
505
506 if (bitpos != 0)
146ec50f 507 align = least_bit_hwi (bitpos);
87c0fb4b
RG
508
509 return align;
510}
511
bfb9bd47 512/* Return the number of leading non-zero elements in the sequence
1eb4547b
MS
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
515
bfb9bd47 516unsigned
1eb4547b
MS
517string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
518{
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
520
521 unsigned n;
522
523 if (eltsize == 1)
524 {
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
527 {
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
531 }
532 }
533 else
534 {
535 for (n = 0; n < maxelts; n++)
536 {
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
540 }
541 }
542 return n;
543}
544
545/* Compute the length of a null-terminated character string or wide
546 character string handling character sizes of 1, 2, and 4 bytes.
547 TREE_STRING_LENGTH is not the right way because it evaluates to
548 the size of the character array in bytes (as opposed to characters)
549 and because it can contain a zero byte in the middle.
28f4ec01 550
f1ba665b 551 ONLY_VALUE should be nonzero if the result is not going to be emitted
88373ed0 552 into the instruction stream and zero if it is going to be expanded.
f1ba665b 553 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
ae808627
JJ
554 is returned, otherwise NULL, since
555 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
556 evaluate the side-effects.
557
21e8fb22
RB
558 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 accesses. Note that this implies the result is not going to be emitted
560 into the instruction stream.
561
4148b00d
BE
562 ELTSIZE is 1 for normal single byte character strings, and 2 or
563 4 for wide characer strings. ELTSIZE is by default 1.
fed3cef0 564
4148b00d 565 The value returned is of type `ssizetype'. */
28f4ec01 566
6de9cd9a 567tree
4148b00d 568c_strlen (tree src, int only_value, unsigned eltsize)
28f4ec01 569{
4148b00d 570 gcc_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
ae808627
JJ
571 STRIP_NOPS (src);
572 if (TREE_CODE (src) == COND_EXPR
573 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
574 {
575 tree len1, len2;
576
4148b00d
BE
577 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, eltsize);
578 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, eltsize);
33521f7d 579 if (tree_int_cst_equal (len1, len2))
ae808627
JJ
580 return len1;
581 }
582
583 if (TREE_CODE (src) == COMPOUND_EXPR
584 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
4148b00d 585 return c_strlen (TREE_OPERAND (src, 1), only_value, eltsize);
ae808627 586
1eb4547b 587 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
59d49708 588
1eb4547b
MS
589 /* Offset from the beginning of the string in bytes. */
590 tree byteoff;
4148b00d
BE
591 tree memsize;
592 src = string_constant (src, &byteoff, &memsize);
28f4ec01 593 if (src == 0)
5039610b 594 return NULL_TREE;
fed3cef0 595
1eb4547b 596 /* Determine the size of the string element. */
4148b00d
BE
597 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
598 return NULL_TREE;
1eb4547b
MS
599
600 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
35b4d3a6 601 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
bfb9bd47
MS
602 in case the latter is less than the size of the array, such as when
603 SRC refers to a short string literal used to initialize a large array.
604 In that case, the elements of the array after the terminating NUL are
605 all NUL. */
606 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
607 strelts = strelts / eltsize - 1;
608
4148b00d
BE
609 if (!tree_fits_uhwi_p (memsize))
610 return NULL_TREE;
611
612 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize - 1;
1eb4547b
MS
613
614 /* PTR can point to the byte representation of any string type, including
615 char* and wchar_t*. */
616 const char *ptr = TREE_STRING_POINTER (src);
fed3cef0 617
1eb4547b 618 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
28f4ec01 619 {
4148b00d
BE
620 /* For empty strings the result should be zero. */
621 if (maxelts == 0)
622 return ssize_int (0);
623
624 /* The code below works only for single byte character types. */
625 if (eltsize != 1)
626 return NULL_TREE;
627
bfb9bd47
MS
628 /* If the string has an internal NUL character followed by any
629 non-NUL characters (e.g., "foo\0bar"), we can't compute
630 the offset to the following NUL if we don't know where to
28f4ec01 631 start searching for it. */
bfb9bd47 632 unsigned len = string_length (ptr, eltsize, strelts);
fed3cef0 633
4148b00d
BE
634 /* Return when an embedded null character is found or none at all. */
635 if (len < strelts || len > maxelts)
636 return NULL_TREE;
c42d0aa0 637
28f4ec01 638 /* We don't know the starting offset, but we do know that the string
bfb9bd47
MS
639 has no internal zero bytes. If the offset falls within the bounds
640 of the string subtract the offset from the length of the string,
641 and return that. Otherwise the length is zero. Take care to
642 use SAVE_EXPR in case the OFFSET has side-effects. */
643 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
644 offsave = fold_convert (ssizetype, offsave);
645 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
4148b00d
BE
646 build_int_cst (ssizetype, len));
647 tree lenexp = size_diffop_loc (loc, ssize_int (strelts), offsave);
bfb9bd47
MS
648 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
649 build_zero_cst (ssizetype));
28f4ec01
BS
650 }
651
1eb4547b
MS
652 /* Offset from the beginning of the string in elements. */
653 HOST_WIDE_INT eltoff;
654
28f4ec01 655 /* We have a known offset into the string. Start searching there for
5197bd50 656 a null character if we can represent it as a single HOST_WIDE_INT. */
1eb4547b
MS
657 if (byteoff == 0)
658 eltoff = 0;
659 else if (! tree_fits_shwi_p (byteoff))
660 eltoff = -1;
28f4ec01 661 else
1eb4547b 662 eltoff = tree_to_shwi (byteoff) / eltsize;
fed3cef0 663
b2ed71b6
BE
664 /* If the offset is known to be out of bounds, warn, and call strlen at
665 runtime. */
1eb4547b 666 if (eltoff < 0 || eltoff > maxelts)
28f4ec01 667 {
b2ed71b6 668 /* Suppress multiple warnings for propagated constant strings. */
3b57ff81
RB
669 if (only_value != 2
670 && !TREE_NO_WARNING (src))
b2ed71b6 671 {
c42d0aa0
MS
672 warning_at (loc, OPT_Warray_bounds,
673 "offset %qwi outside bounds of constant string",
1eb4547b 674 eltoff);
b2ed71b6
BE
675 TREE_NO_WARNING (src) = 1;
676 }
5039610b 677 return NULL_TREE;
28f4ec01 678 }
fed3cef0 679
4148b00d
BE
680 /* If eltoff is larger than strelts but less than maxelts the
681 string length is zero, since the excess memory will be zero. */
682 if (eltoff > strelts)
683 return ssize_int (0);
684
28f4ec01
BS
685 /* Use strlen to search for the first zero byte. Since any strings
686 constructed with build_string will have nulls appended, we win even
687 if we get handed something like (char[4])"abcd".
688
1eb4547b 689 Since ELTOFF is our starting index into the string, no further
28f4ec01 690 calculation is needed. */
1eb4547b 691 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
4148b00d 692 strelts - eltoff);
1eb4547b
MS
693
694 return ssize_int (len);
28f4ec01
BS
695}
696
807e902e 697/* Return a constant integer corresponding to target reading
bf06b5d8 698 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ab937357 699
57814e5e 700static rtx
095a2d76 701c_readstr (const char *str, scalar_int_mode mode)
57814e5e 702{
57814e5e
JJ
703 HOST_WIDE_INT ch;
704 unsigned int i, j;
807e902e 705 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
57814e5e 706
298e6adc 707 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
807e902e
KZ
708 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
709 / HOST_BITS_PER_WIDE_INT;
710
711 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
712 for (i = 0; i < len; i++)
713 tmp[i] = 0;
5906d013 714
57814e5e
JJ
715 ch = 1;
716 for (i = 0; i < GET_MODE_SIZE (mode); i++)
717 {
718 j = i;
719 if (WORDS_BIG_ENDIAN)
720 j = GET_MODE_SIZE (mode) - i - 1;
721 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
e046112d 722 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
57814e5e
JJ
723 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
724 j *= BITS_PER_UNIT;
5906d013 725
57814e5e
JJ
726 if (ch)
727 ch = (unsigned char) str[i];
807e902e 728 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
57814e5e 729 }
807e902e
KZ
730
731 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
732 return immed_wide_int_const (c, mode);
57814e5e
JJ
733}
734
ab937357 735/* Cast a target constant CST to target CHAR and if that value fits into
206048bd 736 host char type, return zero and put that value into variable pointed to by
ab937357
JJ
737 P. */
738
739static int
4682ae04 740target_char_cast (tree cst, char *p)
ab937357
JJ
741{
742 unsigned HOST_WIDE_INT val, hostval;
743
de77ab75 744 if (TREE_CODE (cst) != INTEGER_CST
ab937357
JJ
745 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
746 return 1;
747
807e902e 748 /* Do not care if it fits or not right here. */
de77ab75 749 val = TREE_INT_CST_LOW (cst);
807e902e 750
ab937357 751 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
fecfbfa4 752 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ab937357
JJ
753
754 hostval = val;
755 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
fecfbfa4 756 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ab937357
JJ
757
758 if (val != hostval)
759 return 1;
760
761 *p = hostval;
762 return 0;
763}
764
6de9cd9a
DN
765/* Similar to save_expr, but assumes that arbitrary code is not executed
766 in between the multiple evaluations. In particular, we assume that a
767 non-addressable local variable will not be modified. */
768
769static tree
770builtin_save_expr (tree exp)
771{
5cbf5c20
RG
772 if (TREE_CODE (exp) == SSA_NAME
773 || (TREE_ADDRESSABLE (exp) == 0
774 && (TREE_CODE (exp) == PARM_DECL
8813a647 775 || (VAR_P (exp) && !TREE_STATIC (exp)))))
6de9cd9a
DN
776 return exp;
777
778 return save_expr (exp);
779}
780
28f4ec01
BS
781/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
782 times to get the address of either a higher stack frame, or a return
783 address located within it (depending on FNDECL_CODE). */
fed3cef0 784
54e62799 785static rtx
c6d01079 786expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
28f4ec01
BS
787{
788 int i;
c6d01079 789 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
579f2946 790 if (tem == NULL_RTX)
c8f27794 791 {
579f2946
TS
792 /* For a zero count with __builtin_return_address, we don't care what
793 frame address we return, because target-specific definitions will
794 override us. Therefore frame pointer elimination is OK, and using
795 the soft frame pointer is OK.
796
797 For a nonzero count, or a zero count with __builtin_frame_address,
798 we require a stable offset from the current frame pointer to the
799 previous one, so we must use the hard frame pointer, and
800 we must disable frame pointer elimination. */
801 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
802 tem = frame_pointer_rtx;
803 else
804 {
805 tem = hard_frame_pointer_rtx;
c8f27794 806
579f2946
TS
807 /* Tell reload not to eliminate the frame pointer. */
808 crtl->accesses_prior_frames = 1;
809 }
c8f27794 810 }
c6d01079 811
28f4ec01
BS
812 if (count > 0)
813 SETUP_FRAME_ADDRESSES ();
28f4ec01 814
224869d9 815 /* On the SPARC, the return address is not in the frame, it is in a
28f4ec01
BS
816 register. There is no way to access it off of the current frame
817 pointer, but it can be accessed off the previous frame pointer by
818 reading the value from the register window save area. */
2e612c47 819 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
28f4ec01 820 count--;
28f4ec01
BS
821
822 /* Scan back COUNT frames to the specified frame. */
823 for (i = 0; i < count; i++)
824 {
825 /* Assume the dynamic chain pointer is in the word that the
826 frame address points to, unless otherwise specified. */
28f4ec01 827 tem = DYNAMIC_CHAIN_ADDRESS (tem);
28f4ec01 828 tem = memory_address (Pmode, tem);
bf877a76 829 tem = gen_frame_mem (Pmode, tem);
432fd734 830 tem = copy_to_reg (tem);
28f4ec01
BS
831 }
832
224869d9
EB
833 /* For __builtin_frame_address, return what we've got. But, on
834 the SPARC for example, we may have to add a bias. */
28f4ec01 835 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
224869d9 836 return FRAME_ADDR_RTX (tem);
28f4ec01 837
224869d9 838 /* For __builtin_return_address, get the return address from that frame. */
28f4ec01
BS
839#ifdef RETURN_ADDR_RTX
840 tem = RETURN_ADDR_RTX (count, tem);
841#else
842 tem = memory_address (Pmode,
0a81f074 843 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
bf877a76 844 tem = gen_frame_mem (Pmode, tem);
28f4ec01
BS
845#endif
846 return tem;
847}
848
3bdf5ad1 849/* Alias set used for setjmp buffer. */
4862826d 850static alias_set_type setjmp_alias_set = -1;
3bdf5ad1 851
250d07b6 852/* Construct the leading half of a __builtin_setjmp call. Control will
4f6c2131
EB
853 return to RECEIVER_LABEL. This is also called directly by the SJLJ
854 exception handling code. */
28f4ec01 855
250d07b6 856void
4682ae04 857expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
28f4ec01 858{
ef4bddc2 859 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 860 rtx stack_save;
3bdf5ad1 861 rtx mem;
28f4ec01 862
3bdf5ad1
RK
863 if (setjmp_alias_set == -1)
864 setjmp_alias_set = new_alias_set ();
865
5ae6cd0d 866 buf_addr = convert_memory_address (Pmode, buf_addr);
28f4ec01 867
7d505b82 868 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
28f4ec01 869
250d07b6
RH
870 /* We store the frame pointer and the address of receiver_label in
871 the buffer and use the rest of it for the stack save area, which
872 is machine-dependent. */
28f4ec01 873
3bdf5ad1 874 mem = gen_rtx_MEM (Pmode, buf_addr);
ba4828e0 875 set_mem_alias_set (mem, setjmp_alias_set);
d6da68b9 876 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
3bdf5ad1 877
0a81f074
RS
878 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
879 GET_MODE_SIZE (Pmode))),
ba4828e0 880 set_mem_alias_set (mem, setjmp_alias_set);
3bdf5ad1
RK
881
882 emit_move_insn (validize_mem (mem),
250d07b6 883 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
28f4ec01
BS
884
885 stack_save = gen_rtx_MEM (sa_mode,
0a81f074 886 plus_constant (Pmode, buf_addr,
28f4ec01 887 2 * GET_MODE_SIZE (Pmode)));
ba4828e0 888 set_mem_alias_set (stack_save, setjmp_alias_set);
9eac0f2a 889 emit_stack_save (SAVE_NONLOCAL, &stack_save);
28f4ec01
BS
890
891 /* If there is further processing to do, do it. */
95a3fb9d
RS
892 if (targetm.have_builtin_setjmp_setup ())
893 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
28f4ec01 894
ecaebb9e 895 /* We have a nonlocal label. */
e3b5732b 896 cfun->has_nonlocal_label = 1;
250d07b6 897}
28f4ec01 898
4f6c2131 899/* Construct the trailing part of a __builtin_setjmp call. This is
e90d1568
HPN
900 also called directly by the SJLJ exception handling code.
901 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
250d07b6
RH
902
903void
95a3fb9d 904expand_builtin_setjmp_receiver (rtx receiver_label)
250d07b6 905{
531ca746
RH
906 rtx chain;
907
e90d1568 908 /* Mark the FP as used when we get here, so we have to make sure it's
28f4ec01 909 marked as used by this function. */
c41c1387 910 emit_use (hard_frame_pointer_rtx);
28f4ec01
BS
911
912 /* Mark the static chain as clobbered here so life information
913 doesn't get messed up for it. */
4b522b8f 914 chain = rtx_for_static_chain (current_function_decl, true);
531ca746
RH
915 if (chain && REG_P (chain))
916 emit_clobber (chain);
28f4ec01
BS
917
918 /* Now put in the code to restore the frame pointer, and argument
caf93cb0 919 pointer, if needed. */
95a3fb9d 920 if (! targetm.have_nonlocal_goto ())
f1257268
RS
921 {
922 /* First adjust our frame pointer to its actual value. It was
923 previously set to the start of the virtual area corresponding to
924 the stacked variables when we branched here and now needs to be
925 adjusted to the actual hardware fp value.
926
927 Assignments to virtual registers are converted by
928 instantiate_virtual_regs into the corresponding assignment
929 to the underlying register (fp in this case) that makes
930 the original assignment true.
931 So the following insn will actually be decrementing fp by
2a31c321 932 TARGET_STARTING_FRAME_OFFSET. */
f1257268
RS
933 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
934
935 /* Restoring the frame pointer also modifies the hard frame pointer.
936 Mark it used (so that the previous assignment remains live once
937 the frame pointer is eliminated) and clobbered (to represent the
938 implicit update from the assignment). */
939 emit_use (hard_frame_pointer_rtx);
940 emit_clobber (hard_frame_pointer_rtx);
941 }
28f4ec01 942
38b0b093 943 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
28f4ec01 944 {
e90d1568
HPN
945 /* If the argument pointer can be eliminated in favor of the
946 frame pointer, we don't need to restore it. We assume here
947 that if such an elimination is present, it can always be used.
948 This is the case on all known machines; if we don't make this
949 assumption, we do unnecessary saving on many machines. */
28f4ec01 950 size_t i;
8b60264b 951 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
28f4ec01 952
b6a1cbae 953 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
28f4ec01
BS
954 if (elim_regs[i].from == ARG_POINTER_REGNUM
955 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
956 break;
957
b6a1cbae 958 if (i == ARRAY_SIZE (elim_regs))
28f4ec01
BS
959 {
960 /* Now restore our arg pointer from the address at which it
278ed218 961 was saved in our stack frame. */
2e3f842f 962 emit_move_insn (crtl->args.internal_arg_pointer,
bd60bab2 963 copy_to_reg (get_arg_pointer_save_area ()));
28f4ec01
BS
964 }
965 }
28f4ec01 966
95a3fb9d
RS
967 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
968 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
969 else if (targetm.have_nonlocal_goto_receiver ())
970 emit_insn (targetm.gen_nonlocal_goto_receiver ());
28f4ec01 971 else
95a3fb9d 972 { /* Nothing */ }
bcd7edfe 973
6fb5fa3c
DB
974 /* We must not allow the code we just generated to be reordered by
975 scheduling. Specifically, the update of the frame pointer must
f1257268 976 happen immediately, not later. */
6fb5fa3c 977 emit_insn (gen_blockage ());
250d07b6 978}
28f4ec01 979
28f4ec01
BS
980/* __builtin_longjmp is passed a pointer to an array of five words (not
981 all will be used on all machines). It operates similarly to the C
982 library function of the same name, but is more efficient. Much of
4f6c2131 983 the code below is copied from the handling of non-local gotos. */
28f4ec01 984
54e62799 985static void
4682ae04 986expand_builtin_longjmp (rtx buf_addr, rtx value)
28f4ec01 987{
58f4cf2a
DM
988 rtx fp, lab, stack;
989 rtx_insn *insn, *last;
ef4bddc2 990 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 991
b8698a0f 992 /* DRAP is needed for stack realign if longjmp is expanded to current
2e3f842f
L
993 function */
994 if (SUPPORTS_STACK_ALIGNMENT)
995 crtl->need_drap = true;
996
3bdf5ad1
RK
997 if (setjmp_alias_set == -1)
998 setjmp_alias_set = new_alias_set ();
999
5ae6cd0d 1000 buf_addr = convert_memory_address (Pmode, buf_addr);
4b6c1672 1001
28f4ec01
BS
1002 buf_addr = force_reg (Pmode, buf_addr);
1003
531ca746
RH
1004 /* We require that the user must pass a second argument of 1, because
1005 that is what builtin_setjmp will return. */
298e6adc 1006 gcc_assert (value == const1_rtx);
28f4ec01 1007
d337d653 1008 last = get_last_insn ();
95a3fb9d
RS
1009 if (targetm.have_builtin_longjmp ())
1010 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
28f4ec01 1011 else
28f4ec01
BS
1012 {
1013 fp = gen_rtx_MEM (Pmode, buf_addr);
0a81f074 1014 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
28f4ec01
BS
1015 GET_MODE_SIZE (Pmode)));
1016
0a81f074 1017 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
28f4ec01 1018 2 * GET_MODE_SIZE (Pmode)));
ba4828e0
RK
1019 set_mem_alias_set (fp, setjmp_alias_set);
1020 set_mem_alias_set (lab, setjmp_alias_set);
1021 set_mem_alias_set (stack, setjmp_alias_set);
28f4ec01
BS
1022
1023 /* Pick up FP, label, and SP from the block and jump. This code is
1024 from expand_goto in stmt.c; see there for detailed comments. */
95a3fb9d 1025 if (targetm.have_nonlocal_goto ())
28f4ec01
BS
1026 /* We have to pass a value to the nonlocal_goto pattern that will
1027 get copied into the static_chain pointer, but it does not matter
1028 what that value is, because builtin_setjmp does not use it. */
95a3fb9d 1029 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
28f4ec01 1030 else
28f4ec01
BS
1031 {
1032 lab = copy_to_reg (lab);
1033
c41c1387
RS
1034 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1035 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
41439bf6 1036
28f4ec01 1037 emit_move_insn (hard_frame_pointer_rtx, fp);
9eac0f2a 1038 emit_stack_restore (SAVE_NONLOCAL, stack);
28f4ec01 1039
c41c1387
RS
1040 emit_use (hard_frame_pointer_rtx);
1041 emit_use (stack_pointer_rtx);
28f4ec01
BS
1042 emit_indirect_jump (lab);
1043 }
1044 }
4b01bd16
RH
1045
1046 /* Search backwards and mark the jump insn as a non-local goto.
1047 Note that this precludes the use of __builtin_longjmp to a
1048 __builtin_setjmp target in the same function. However, we've
1049 already cautioned the user that these functions are for
1050 internal exception handling use only. */
8206fc89
AM
1051 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1052 {
298e6adc 1053 gcc_assert (insn != last);
5906d013 1054
4b4bf941 1055 if (JUMP_P (insn))
8206fc89 1056 {
65c5f2a6 1057 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
8206fc89
AM
1058 break;
1059 }
4b4bf941 1060 else if (CALL_P (insn))
ca7fd9cd 1061 break;
8206fc89 1062 }
28f4ec01
BS
1063}
1064
862d0b35
DN
1065static inline bool
1066more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1067{
1068 return (iter->i < iter->n);
1069}
1070
1071/* This function validates the types of a function call argument list
1072 against a specified list of tree_codes. If the last specifier is a 0,
474da67e 1073 that represents an ellipsis, otherwise the last specifier must be a
862d0b35
DN
1074 VOID_TYPE. */
1075
1076static bool
1077validate_arglist (const_tree callexpr, ...)
1078{
1079 enum tree_code code;
1080 bool res = 0;
1081 va_list ap;
1082 const_call_expr_arg_iterator iter;
1083 const_tree arg;
1084
1085 va_start (ap, callexpr);
1086 init_const_call_expr_arg_iterator (callexpr, &iter);
1087
474da67e 1088 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
0dba7960
JJ
1089 tree fn = CALL_EXPR_FN (callexpr);
1090 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
474da67e
MS
1091
1092 for (unsigned argno = 1; ; ++argno)
862d0b35
DN
1093 {
1094 code = (enum tree_code) va_arg (ap, int);
474da67e 1095
862d0b35
DN
1096 switch (code)
1097 {
1098 case 0:
1099 /* This signifies an ellipses, any further arguments are all ok. */
1100 res = true;
1101 goto end;
1102 case VOID_TYPE:
1103 /* This signifies an endlink, if no arguments remain, return
1104 true, otherwise return false. */
1105 res = !more_const_call_expr_args_p (&iter);
1106 goto end;
474da67e
MS
1107 case POINTER_TYPE:
1108 /* The actual argument must be nonnull when either the whole
1109 called function has been declared nonnull, or when the formal
1110 argument corresponding to the actual argument has been. */
0dba7960
JJ
1111 if (argmap
1112 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1113 {
1114 arg = next_const_call_expr_arg (&iter);
1115 if (!validate_arg (arg, code) || integer_zerop (arg))
1116 goto end;
1117 break;
1118 }
474da67e 1119 /* FALLTHRU */
862d0b35
DN
1120 default:
1121 /* If no parameters remain or the parameter's code does not
1122 match the specified code, return false. Otherwise continue
1123 checking any remaining arguments. */
1124 arg = next_const_call_expr_arg (&iter);
0dba7960 1125 if (!validate_arg (arg, code))
862d0b35
DN
1126 goto end;
1127 break;
1128 }
1129 }
862d0b35
DN
1130
1131 /* We need gotos here since we can only have one VA_CLOSE in a
1132 function. */
1133 end: ;
1134 va_end (ap);
1135
474da67e
MS
1136 BITMAP_FREE (argmap);
1137
862d0b35
DN
1138 return res;
1139}
1140
6de9cd9a
DN
1141/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1142 and the address of the save area. */
1143
1144static rtx
5039610b 1145expand_builtin_nonlocal_goto (tree exp)
6de9cd9a
DN
1146{
1147 tree t_label, t_save_area;
58f4cf2a
DM
1148 rtx r_label, r_save_area, r_fp, r_sp;
1149 rtx_insn *insn;
6de9cd9a 1150
5039610b 1151 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
1152 return NULL_RTX;
1153
5039610b
SL
1154 t_label = CALL_EXPR_ARG (exp, 0);
1155 t_save_area = CALL_EXPR_ARG (exp, 1);
6de9cd9a 1156
84217346 1157 r_label = expand_normal (t_label);
5e89a381 1158 r_label = convert_memory_address (Pmode, r_label);
84217346 1159 r_save_area = expand_normal (t_save_area);
5e89a381 1160 r_save_area = convert_memory_address (Pmode, r_save_area);
bc6d3f91
EB
1161 /* Copy the address of the save location to a register just in case it was
1162 based on the frame pointer. */
cba2d79f 1163 r_save_area = copy_to_reg (r_save_area);
6de9cd9a
DN
1164 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1165 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
0a81f074
RS
1166 plus_constant (Pmode, r_save_area,
1167 GET_MODE_SIZE (Pmode)));
6de9cd9a 1168
e3b5732b 1169 crtl->has_nonlocal_goto = 1;
6de9cd9a 1170
6de9cd9a 1171 /* ??? We no longer need to pass the static chain value, afaik. */
95a3fb9d
RS
1172 if (targetm.have_nonlocal_goto ())
1173 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
6de9cd9a 1174 else
6de9cd9a
DN
1175 {
1176 r_label = copy_to_reg (r_label);
1177
c41c1387
RS
1178 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1179 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
caf93cb0 1180
bc6d3f91 1181 /* Restore frame pointer for containing function. */
6de9cd9a 1182 emit_move_insn (hard_frame_pointer_rtx, r_fp);
9eac0f2a 1183 emit_stack_restore (SAVE_NONLOCAL, r_sp);
caf93cb0 1184
6de9cd9a
DN
1185 /* USE of hard_frame_pointer_rtx added for consistency;
1186 not clear if really needed. */
c41c1387
RS
1187 emit_use (hard_frame_pointer_rtx);
1188 emit_use (stack_pointer_rtx);
eae645b6
RS
1189
1190 /* If the architecture is using a GP register, we must
1191 conservatively assume that the target function makes use of it.
1192 The prologue of functions with nonlocal gotos must therefore
1193 initialize the GP register to the appropriate value, and we
1194 must then make sure that this value is live at the point
1195 of the jump. (Note that this doesn't necessarily apply
1196 to targets with a nonlocal_goto pattern; they are free
1197 to implement it in their own way. Note also that this is
1198 a no-op if the GP register is a global invariant.) */
959c1e20
AH
1199 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1200 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
c41c1387 1201 emit_use (pic_offset_table_rtx);
eae645b6 1202
6de9cd9a
DN
1203 emit_indirect_jump (r_label);
1204 }
caf93cb0 1205
6de9cd9a
DN
1206 /* Search backwards to the jump insn and mark it as a
1207 non-local goto. */
1208 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1209 {
4b4bf941 1210 if (JUMP_P (insn))
6de9cd9a 1211 {
65c5f2a6 1212 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
6de9cd9a
DN
1213 break;
1214 }
4b4bf941 1215 else if (CALL_P (insn))
6de9cd9a
DN
1216 break;
1217 }
1218
1219 return const0_rtx;
1220}
1221
2b92e7f5
RK
1222/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1223 (not all will be used on all machines) that was passed to __builtin_setjmp.
d33606c3
EB
1224 It updates the stack pointer in that block to the current value. This is
1225 also called directly by the SJLJ exception handling code. */
2b92e7f5 1226
d33606c3 1227void
2b92e7f5
RK
1228expand_builtin_update_setjmp_buf (rtx buf_addr)
1229{
ef4bddc2 1230 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
4887028b 1231 buf_addr = convert_memory_address (Pmode, buf_addr);
bc6d3f91 1232 rtx stack_save
2b92e7f5
RK
1233 = gen_rtx_MEM (sa_mode,
1234 memory_address
1235 (sa_mode,
0a81f074
RS
1236 plus_constant (Pmode, buf_addr,
1237 2 * GET_MODE_SIZE (Pmode))));
2b92e7f5 1238
9eac0f2a 1239 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2b92e7f5
RK
1240}
1241
a9ccbb60
JJ
1242/* Expand a call to __builtin_prefetch. For a target that does not support
1243 data prefetch, evaluate the memory address argument in case it has side
1244 effects. */
1245
1246static void
5039610b 1247expand_builtin_prefetch (tree exp)
a9ccbb60
JJ
1248{
1249 tree arg0, arg1, arg2;
5039610b 1250 int nargs;
a9ccbb60
JJ
1251 rtx op0, op1, op2;
1252
5039610b 1253 if (!validate_arglist (exp, POINTER_TYPE, 0))
e83d297b
JJ
1254 return;
1255
5039610b
SL
1256 arg0 = CALL_EXPR_ARG (exp, 0);
1257
e83d297b
JJ
1258 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1259 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1260 locality). */
5039610b
SL
1261 nargs = call_expr_nargs (exp);
1262 if (nargs > 1)
1263 arg1 = CALL_EXPR_ARG (exp, 1);
e83d297b 1264 else
5039610b
SL
1265 arg1 = integer_zero_node;
1266 if (nargs > 2)
1267 arg2 = CALL_EXPR_ARG (exp, 2);
1268 else
9a9d280e 1269 arg2 = integer_three_node;
a9ccbb60
JJ
1270
1271 /* Argument 0 is an address. */
1272 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1273
1274 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1275 if (TREE_CODE (arg1) != INTEGER_CST)
1276 {
40b97a2e 1277 error ("second argument to %<__builtin_prefetch%> must be a constant");
ca7fd9cd 1278 arg1 = integer_zero_node;
a9ccbb60 1279 }
84217346 1280 op1 = expand_normal (arg1);
a9ccbb60
JJ
1281 /* Argument 1 must be either zero or one. */
1282 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1283 {
d4ee4d25 1284 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
40b97a2e 1285 " using zero");
a9ccbb60
JJ
1286 op1 = const0_rtx;
1287 }
1288
1289 /* Argument 2 (locality) must be a compile-time constant int. */
1290 if (TREE_CODE (arg2) != INTEGER_CST)
1291 {
40b97a2e 1292 error ("third argument to %<__builtin_prefetch%> must be a constant");
a9ccbb60
JJ
1293 arg2 = integer_zero_node;
1294 }
84217346 1295 op2 = expand_normal (arg2);
a9ccbb60
JJ
1296 /* Argument 2 must be 0, 1, 2, or 3. */
1297 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1298 {
d4ee4d25 1299 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
a9ccbb60
JJ
1300 op2 = const0_rtx;
1301 }
1302
134b044d 1303 if (targetm.have_prefetch ())
a9ccbb60 1304 {
a5c7d693
RS
1305 struct expand_operand ops[3];
1306
1307 create_address_operand (&ops[0], op0);
1308 create_integer_operand (&ops[1], INTVAL (op1));
1309 create_integer_operand (&ops[2], INTVAL (op2));
134b044d 1310 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
a5c7d693 1311 return;
a9ccbb60 1312 }
ad76cef8 1313
5ab2f7b7
KH
1314 /* Don't do anything with direct references to volatile memory, but
1315 generate code to handle other side effects. */
3c0cb5de 1316 if (!MEM_P (op0) && side_effects_p (op0))
5ab2f7b7 1317 emit_insn (op0);
a9ccbb60
JJ
1318}
1319
3bdf5ad1 1320/* Get a MEM rtx for expression EXP which is the address of an operand
435bb2a1
JJ
1321 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1322 the maximum length of the block of memory that might be accessed or
1323 NULL if unknown. */
3bdf5ad1 1324
28f4ec01 1325static rtx
435bb2a1 1326get_memory_rtx (tree exp, tree len)
28f4ec01 1327{
805903b5
JJ
1328 tree orig_exp = exp;
1329 rtx addr, mem;
805903b5
JJ
1330
1331 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1332 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1333 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1334 exp = TREE_OPERAND (exp, 0);
1335
1336 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1337 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
8ac61af7 1338
3bdf5ad1 1339 /* Get an expression we can use to find the attributes to assign to MEM.
625ed172 1340 First remove any nops. */
1043771b 1341 while (CONVERT_EXPR_P (exp)
3bdf5ad1
RK
1342 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1343 exp = TREE_OPERAND (exp, 0);
1344
625ed172
MM
1345 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1346 (as builtin stringops may alias with anything). */
1347 exp = fold_build2 (MEM_REF,
1348 build_array_type (char_type_node,
1349 build_range_type (sizetype,
1350 size_one_node, len)),
1351 exp, build_int_cst (ptr_type_node, 0));
1352
1353 /* If the MEM_REF has no acceptable address, try to get the base object
1354 from the original address we got, and build an all-aliasing
1355 unknown-sized access to that one. */
1356 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1357 set_mem_attributes (mem, exp, 0);
1358 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1359 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1360 0))))
343fb412 1361 {
625ed172
MM
1362 exp = build_fold_addr_expr (exp);
1363 exp = fold_build2 (MEM_REF,
1364 build_array_type (char_type_node,
1365 build_range_type (sizetype,
1366 size_zero_node,
1367 NULL)),
1368 exp, build_int_cst (ptr_type_node, 0));
931e6c29 1369 set_mem_attributes (mem, exp, 0);
343fb412 1370 }
625ed172 1371 set_mem_alias_set (mem, 0);
28f4ec01
BS
1372 return mem;
1373}
1374\f
1375/* Built-in functions to perform an untyped call and return. */
1376
fa19795e
RS
1377#define apply_args_mode \
1378 (this_target_builtins->x_apply_args_mode)
1379#define apply_result_mode \
1380 (this_target_builtins->x_apply_result_mode)
28f4ec01 1381
28f4ec01
BS
1382/* Return the size required for the block returned by __builtin_apply_args,
1383 and initialize apply_args_mode. */
1384
1385static int
4682ae04 1386apply_args_size (void)
28f4ec01
BS
1387{
1388 static int size = -1;
cbf5468f
AH
1389 int align;
1390 unsigned int regno;
28f4ec01
BS
1391
1392 /* The values computed by this function never change. */
1393 if (size < 0)
1394 {
1395 /* The first value is the incoming arg-pointer. */
1396 size = GET_MODE_SIZE (Pmode);
1397
1398 /* The second value is the structure value address unless this is
1399 passed as an "invisible" first argument. */
92f6864c 1400 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1401 size += GET_MODE_SIZE (Pmode);
1402
1403 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1404 if (FUNCTION_ARG_REGNO_P (regno))
1405 {
b660eccf 1406 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
33521f7d 1407
298e6adc 1408 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1409
1410 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1411 if (size % align != 0)
1412 size = CEIL (size, align) * align;
28f4ec01
BS
1413 size += GET_MODE_SIZE (mode);
1414 apply_args_mode[regno] = mode;
1415 }
1416 else
1417 {
b660eccf 1418 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1419 }
1420 }
1421 return size;
1422}
1423
1424/* Return the size required for the block returned by __builtin_apply,
1425 and initialize apply_result_mode. */
1426
1427static int
4682ae04 1428apply_result_size (void)
28f4ec01
BS
1429{
1430 static int size = -1;
1431 int align, regno;
28f4ec01
BS
1432
1433 /* The values computed by this function never change. */
1434 if (size < 0)
1435 {
1436 size = 0;
1437
1438 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
82f81f18 1439 if (targetm.calls.function_value_regno_p (regno))
28f4ec01 1440 {
b660eccf 1441 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
33521f7d 1442
298e6adc 1443 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1444
1445 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1446 if (size % align != 0)
1447 size = CEIL (size, align) * align;
1448 size += GET_MODE_SIZE (mode);
1449 apply_result_mode[regno] = mode;
1450 }
1451 else
b660eccf 1452 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1453
1454 /* Allow targets that use untyped_call and untyped_return to override
1455 the size so that machine-specific information can be stored here. */
1456#ifdef APPLY_RESULT_SIZE
1457 size = APPLY_RESULT_SIZE;
1458#endif
1459 }
1460 return size;
1461}
1462
28f4ec01
BS
1463/* Create a vector describing the result block RESULT. If SAVEP is true,
1464 the result block is used to save the values; otherwise it is used to
1465 restore the values. */
1466
1467static rtx
4682ae04 1468result_vector (int savep, rtx result)
28f4ec01
BS
1469{
1470 int regno, size, align, nelts;
b660eccf 1471 fixed_size_mode mode;
28f4ec01 1472 rtx reg, mem;
f883e0a7 1473 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
8d51ecf8 1474
28f4ec01
BS
1475 size = nelts = 0;
1476 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1477 if ((mode = apply_result_mode[regno]) != VOIDmode)
1478 {
1479 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1480 if (size % align != 0)
1481 size = CEIL (size, align) * align;
1482 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
f4ef873c 1483 mem = adjust_address (result, mode, size);
28f4ec01 1484 savevec[nelts++] = (savep
f7df4a84
RS
1485 ? gen_rtx_SET (mem, reg)
1486 : gen_rtx_SET (reg, mem));
28f4ec01
BS
1487 size += GET_MODE_SIZE (mode);
1488 }
1489 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1490}
28f4ec01
BS
1491
1492/* Save the state required to perform an untyped call with the same
1493 arguments as were passed to the current function. */
1494
1495static rtx
4682ae04 1496expand_builtin_apply_args_1 (void)
28f4ec01 1497{
88e541e1 1498 rtx registers, tem;
28f4ec01 1499 int size, align, regno;
b660eccf 1500 fixed_size_mode mode;
92f6864c 1501 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
28f4ec01
BS
1502
1503 /* Create a block where the arg-pointer, structure value address,
1504 and argument registers can be saved. */
1505 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1506
1507 /* Walk past the arg-pointer and structure value address. */
1508 size = GET_MODE_SIZE (Pmode);
92f6864c 1509 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1510 size += GET_MODE_SIZE (Pmode);
1511
1512 /* Save each register used in calling a function to the block. */
1513 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1514 if ((mode = apply_args_mode[regno]) != VOIDmode)
1515 {
28f4ec01
BS
1516 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1517 if (size % align != 0)
1518 size = CEIL (size, align) * align;
1519
1520 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1521
f4ef873c 1522 emit_move_insn (adjust_address (registers, mode, size), tem);
28f4ec01
BS
1523 size += GET_MODE_SIZE (mode);
1524 }
1525
1526 /* Save the arg pointer to the block. */
2e3f842f 1527 tem = copy_to_reg (crtl->args.internal_arg_pointer);
88e541e1 1528 /* We need the pointer as the caller actually passed them to us, not
ac3f5df7
HPN
1529 as we might have pretended they were passed. Make sure it's a valid
1530 operand, as emit_move_insn isn't expected to handle a PLUS. */
581edfa3
TS
1531 if (STACK_GROWS_DOWNWARD)
1532 tem
1533 = force_operand (plus_constant (Pmode, tem,
1534 crtl->args.pretend_args_size),
1535 NULL_RTX);
88e541e1 1536 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
33521f7d 1537
28f4ec01
BS
1538 size = GET_MODE_SIZE (Pmode);
1539
1540 /* Save the structure value address unless this is passed as an
1541 "invisible" first argument. */
61f71b34 1542 if (struct_incoming_value)
28f4ec01 1543 {
f4ef873c 1544 emit_move_insn (adjust_address (registers, Pmode, size),
61f71b34 1545 copy_to_reg (struct_incoming_value));
28f4ec01
BS
1546 size += GET_MODE_SIZE (Pmode);
1547 }
1548
1549 /* Return the address of the block. */
1550 return copy_addr_to_reg (XEXP (registers, 0));
1551}
1552
1553/* __builtin_apply_args returns block of memory allocated on
1554 the stack into which is stored the arg pointer, structure
1555 value address, static chain, and all the registers that might
1556 possibly be used in performing a function call. The code is
1557 moved to the start of the function so the incoming values are
1558 saved. */
5197bd50 1559
28f4ec01 1560static rtx
4682ae04 1561expand_builtin_apply_args (void)
28f4ec01
BS
1562{
1563 /* Don't do __builtin_apply_args more than once in a function.
1564 Save the result of the first call and reuse it. */
1565 if (apply_args_value != 0)
1566 return apply_args_value;
1567 {
1568 /* When this function is called, it means that registers must be
1569 saved on entry to this function. So we migrate the
1570 call to the first insn of this function. */
1571 rtx temp;
28f4ec01
BS
1572
1573 start_sequence ();
1574 temp = expand_builtin_apply_args_1 ();
e67d1102 1575 rtx_insn *seq = get_insns ();
28f4ec01
BS
1576 end_sequence ();
1577
1578 apply_args_value = temp;
1579
2f937369
DM
1580 /* Put the insns after the NOTE that starts the function.
1581 If this is inside a start_sequence, make the outer-level insn
28f4ec01 1582 chain current, so the code is placed at the start of the
1f21b6f4
JJ
1583 function. If internal_arg_pointer is a non-virtual pseudo,
1584 it needs to be placed after the function that initializes
1585 that pseudo. */
28f4ec01 1586 push_topmost_sequence ();
1f21b6f4
JJ
1587 if (REG_P (crtl->args.internal_arg_pointer)
1588 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1589 emit_insn_before (seq, parm_birth_insn);
1590 else
1591 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
28f4ec01
BS
1592 pop_topmost_sequence ();
1593 return temp;
1594 }
1595}
1596
1597/* Perform an untyped call and save the state required to perform an
1598 untyped return of whatever value was returned by the given function. */
1599
1600static rtx
4682ae04 1601expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
28f4ec01
BS
1602{
1603 int size, align, regno;
b660eccf 1604 fixed_size_mode mode;
58f4cf2a
DM
1605 rtx incoming_args, result, reg, dest, src;
1606 rtx_call_insn *call_insn;
28f4ec01
BS
1607 rtx old_stack_level = 0;
1608 rtx call_fusage = 0;
92f6864c 1609 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
28f4ec01 1610
5ae6cd0d 1611 arguments = convert_memory_address (Pmode, arguments);
ce2d32cd 1612
28f4ec01
BS
1613 /* Create a block where the return registers can be saved. */
1614 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1615
28f4ec01
BS
1616 /* Fetch the arg pointer from the ARGUMENTS block. */
1617 incoming_args = gen_reg_rtx (Pmode);
ce2d32cd 1618 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
581edfa3
TS
1619 if (!STACK_GROWS_DOWNWARD)
1620 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1621 incoming_args, 0, OPTAB_LIB_WIDEN);
28f4ec01 1622
9d53e585
JM
1623 /* Push a new argument block and copy the arguments. Do not allow
1624 the (potential) memcpy call below to interfere with our stack
1625 manipulations. */
28f4ec01 1626 do_pending_stack_adjust ();
9d53e585 1627 NO_DEFER_POP;
28f4ec01 1628
f9da5064 1629 /* Save the stack with nonlocal if available. */
4476e1a0 1630 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1631 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
28f4ec01 1632 else
9eac0f2a 1633 emit_stack_save (SAVE_BLOCK, &old_stack_level);
28f4ec01 1634
316d0b19 1635 /* Allocate a block of memory onto the stack and copy the memory
d3c12306
EB
1636 arguments to the outgoing arguments address. We can pass TRUE
1637 as the 4th argument because we just saved the stack pointer
1638 and will restore it right after the call. */
9e878cf1 1639 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2e3f842f
L
1640
1641 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1642 may have already set current_function_calls_alloca to true.
1643 current_function_calls_alloca won't be set if argsize is zero,
1644 so we have to guarantee need_drap is true here. */
1645 if (SUPPORTS_STACK_ALIGNMENT)
1646 crtl->need_drap = true;
1647
316d0b19 1648 dest = virtual_outgoing_args_rtx;
581edfa3
TS
1649 if (!STACK_GROWS_DOWNWARD)
1650 {
1651 if (CONST_INT_P (argsize))
1652 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1653 else
1654 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1655 }
8ac61af7
RK
1656 dest = gen_rtx_MEM (BLKmode, dest);
1657 set_mem_align (dest, PARM_BOUNDARY);
1658 src = gen_rtx_MEM (BLKmode, incoming_args);
1659 set_mem_align (src, PARM_BOUNDARY);
44bb111a 1660 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
28f4ec01
BS
1661
1662 /* Refer to the argument block. */
1663 apply_args_size ();
1664 arguments = gen_rtx_MEM (BLKmode, arguments);
8ac61af7 1665 set_mem_align (arguments, PARM_BOUNDARY);
28f4ec01
BS
1666
1667 /* Walk past the arg-pointer and structure value address. */
1668 size = GET_MODE_SIZE (Pmode);
61f71b34 1669 if (struct_value)
28f4ec01
BS
1670 size += GET_MODE_SIZE (Pmode);
1671
1672 /* Restore each of the registers previously saved. Make USE insns
1673 for each of these registers for use in making the call. */
1674 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1675 if ((mode = apply_args_mode[regno]) != VOIDmode)
1676 {
1677 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1678 if (size % align != 0)
1679 size = CEIL (size, align) * align;
1680 reg = gen_rtx_REG (mode, regno);
f4ef873c 1681 emit_move_insn (reg, adjust_address (arguments, mode, size));
28f4ec01
BS
1682 use_reg (&call_fusage, reg);
1683 size += GET_MODE_SIZE (mode);
1684 }
1685
1686 /* Restore the structure value address unless this is passed as an
1687 "invisible" first argument. */
1688 size = GET_MODE_SIZE (Pmode);
61f71b34 1689 if (struct_value)
28f4ec01
BS
1690 {
1691 rtx value = gen_reg_rtx (Pmode);
f4ef873c 1692 emit_move_insn (value, adjust_address (arguments, Pmode, size));
61f71b34 1693 emit_move_insn (struct_value, value);
f8cfc6aa 1694 if (REG_P (struct_value))
61f71b34 1695 use_reg (&call_fusage, struct_value);
28f4ec01
BS
1696 size += GET_MODE_SIZE (Pmode);
1697 }
1698
1699 /* All arguments and registers used for the call are set up by now! */
531ca746 1700 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
28f4ec01
BS
1701
1702 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1703 and we don't want to load it into a register as an optimization,
1704 because prepare_call_address already did it if it should be done. */
1705 if (GET_CODE (function) != SYMBOL_REF)
1706 function = memory_address (FUNCTION_MODE, function);
1707
1708 /* Generate the actual call instruction and save the return value. */
43c7dca8
RS
1709 if (targetm.have_untyped_call ())
1710 {
1711 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1712 emit_call_insn (targetm.gen_untyped_call (mem, result,
1713 result_vector (1, result)));
1714 }
58d745ec 1715 else if (targetm.have_call_value ())
28f4ec01
BS
1716 {
1717 rtx valreg = 0;
1718
1719 /* Locate the unique return register. It is not possible to
1720 express a call that sets more than one return register using
1721 call_value; use untyped_call for that. In fact, untyped_call
1722 only needs to save the return registers in the given block. */
1723 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1724 if ((mode = apply_result_mode[regno]) != VOIDmode)
1725 {
58d745ec 1726 gcc_assert (!valreg); /* have_untyped_call required. */
5906d013 1727
28f4ec01
BS
1728 valreg = gen_rtx_REG (mode, regno);
1729 }
1730
58d745ec
RS
1731 emit_insn (targetm.gen_call_value (valreg,
1732 gen_rtx_MEM (FUNCTION_MODE, function),
1733 const0_rtx, NULL_RTX, const0_rtx));
28f4ec01 1734
f4ef873c 1735 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
28f4ec01
BS
1736 }
1737 else
298e6adc 1738 gcc_unreachable ();
28f4ec01 1739
ee960939
OH
1740 /* Find the CALL insn we just emitted, and attach the register usage
1741 information. */
1742 call_insn = last_call_insn ();
1743 add_function_usage_to (call_insn, call_fusage);
28f4ec01
BS
1744
1745 /* Restore the stack. */
4476e1a0 1746 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1747 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
28f4ec01 1748 else
9eac0f2a 1749 emit_stack_restore (SAVE_BLOCK, old_stack_level);
c3284718 1750 fixup_args_size_notes (call_insn, get_last_insn (), 0);
28f4ec01 1751
9d53e585
JM
1752 OK_DEFER_POP;
1753
28f4ec01 1754 /* Return the address of the result block. */
5ae6cd0d
MM
1755 result = copy_addr_to_reg (XEXP (result, 0));
1756 return convert_memory_address (ptr_mode, result);
28f4ec01
BS
1757}
1758
1759/* Perform an untyped return. */
1760
1761static void
4682ae04 1762expand_builtin_return (rtx result)
28f4ec01
BS
1763{
1764 int size, align, regno;
b660eccf 1765 fixed_size_mode mode;
28f4ec01 1766 rtx reg;
fee3e72c 1767 rtx_insn *call_fusage = 0;
28f4ec01 1768
5ae6cd0d 1769 result = convert_memory_address (Pmode, result);
ce2d32cd 1770
28f4ec01
BS
1771 apply_result_size ();
1772 result = gen_rtx_MEM (BLKmode, result);
1773
43c7dca8 1774 if (targetm.have_untyped_return ())
28f4ec01 1775 {
43c7dca8
RS
1776 rtx vector = result_vector (0, result);
1777 emit_jump_insn (targetm.gen_untyped_return (result, vector));
28f4ec01
BS
1778 emit_barrier ();
1779 return;
1780 }
28f4ec01
BS
1781
1782 /* Restore the return value and note that each value is used. */
1783 size = 0;
1784 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1785 if ((mode = apply_result_mode[regno]) != VOIDmode)
1786 {
1787 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1788 if (size % align != 0)
1789 size = CEIL (size, align) * align;
1790 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
f4ef873c 1791 emit_move_insn (reg, adjust_address (result, mode, size));
28f4ec01
BS
1792
1793 push_to_sequence (call_fusage);
c41c1387 1794 emit_use (reg);
28f4ec01
BS
1795 call_fusage = get_insns ();
1796 end_sequence ();
1797 size += GET_MODE_SIZE (mode);
1798 }
1799
1800 /* Put the USE insns before the return. */
2f937369 1801 emit_insn (call_fusage);
28f4ec01
BS
1802
1803 /* Return whatever values was restored by jumping directly to the end
1804 of the function. */
6e3077c6 1805 expand_naked_return ();
28f4ec01
BS
1806}
1807
ad82abb8 1808/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
5197bd50 1809
ad82abb8 1810static enum type_class
4682ae04 1811type_to_class (tree type)
ad82abb8
ZW
1812{
1813 switch (TREE_CODE (type))
1814 {
1815 case VOID_TYPE: return void_type_class;
1816 case INTEGER_TYPE: return integer_type_class;
ad82abb8
ZW
1817 case ENUMERAL_TYPE: return enumeral_type_class;
1818 case BOOLEAN_TYPE: return boolean_type_class;
1819 case POINTER_TYPE: return pointer_type_class;
1820 case REFERENCE_TYPE: return reference_type_class;
1821 case OFFSET_TYPE: return offset_type_class;
1822 case REAL_TYPE: return real_type_class;
1823 case COMPLEX_TYPE: return complex_type_class;
1824 case FUNCTION_TYPE: return function_type_class;
1825 case METHOD_TYPE: return method_type_class;
1826 case RECORD_TYPE: return record_type_class;
1827 case UNION_TYPE:
1828 case QUAL_UNION_TYPE: return union_type_class;
1829 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1830 ? string_type_class : array_type_class);
ad82abb8
ZW
1831 case LANG_TYPE: return lang_type_class;
1832 default: return no_type_class;
1833 }
1834}
8d51ecf8 1835
5039610b 1836/* Expand a call EXP to __builtin_classify_type. */
5197bd50 1837
28f4ec01 1838static rtx
5039610b 1839expand_builtin_classify_type (tree exp)
28f4ec01 1840{
5039610b
SL
1841 if (call_expr_nargs (exp))
1842 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
28f4ec01
BS
1843 return GEN_INT (no_type_class);
1844}
1845
ee5fd23a
MM
1846/* This helper macro, meant to be used in mathfn_built_in below, determines
1847 which among a set of builtin math functions is appropriate for a given type
1848 mode. The `F' (float) and `L' (long double) are automatically generated
1849 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1850 types, there are additional types that are considered with 'F32', 'F64',
1851 'F128', etc. suffixes. */
b03ff92e
RS
1852#define CASE_MATHFN(MATHFN) \
1853 CASE_CFN_##MATHFN: \
1854 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1855 fcodel = BUILT_IN_##MATHFN##L ; break;
ee5fd23a
MM
1856/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1857 types. */
1858#define CASE_MATHFN_FLOATN(MATHFN) \
1859 CASE_CFN_##MATHFN: \
1860 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1861 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1862 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1863 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1864 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1865 break;
bf460eec 1866/* Similar to above, but appends _R after any F/L suffix. */
b03ff92e
RS
1867#define CASE_MATHFN_REENT(MATHFN) \
1868 case CFN_BUILT_IN_##MATHFN##_R: \
1869 case CFN_BUILT_IN_##MATHFN##F_R: \
1870 case CFN_BUILT_IN_##MATHFN##L_R: \
1871 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1872 fcodel = BUILT_IN_##MATHFN##L_R ; break;
daa027cc 1873
5c1a2e63
RS
1874/* Return a function equivalent to FN but operating on floating-point
1875 values of type TYPE, or END_BUILTINS if no such function exists.
b03ff92e
RS
1876 This is purely an operation on function codes; it does not guarantee
1877 that the target actually has an implementation of the function. */
05f41289 1878
5c1a2e63 1879static built_in_function
b03ff92e 1880mathfn_built_in_2 (tree type, combined_fn fn)
272f51a3 1881{
ee5fd23a 1882 tree mtype;
5c1a2e63 1883 built_in_function fcode, fcodef, fcodel;
ee5fd23a
MM
1884 built_in_function fcodef16 = END_BUILTINS;
1885 built_in_function fcodef32 = END_BUILTINS;
1886 built_in_function fcodef64 = END_BUILTINS;
1887 built_in_function fcodef128 = END_BUILTINS;
1888 built_in_function fcodef32x = END_BUILTINS;
1889 built_in_function fcodef64x = END_BUILTINS;
1890 built_in_function fcodef128x = END_BUILTINS;
daa027cc
KG
1891
1892 switch (fn)
1893 {
b03ff92e
RS
1894 CASE_MATHFN (ACOS)
1895 CASE_MATHFN (ACOSH)
1896 CASE_MATHFN (ASIN)
1897 CASE_MATHFN (ASINH)
1898 CASE_MATHFN (ATAN)
1899 CASE_MATHFN (ATAN2)
1900 CASE_MATHFN (ATANH)
1901 CASE_MATHFN (CBRT)
c6cfa2bf 1902 CASE_MATHFN_FLOATN (CEIL)
b03ff92e 1903 CASE_MATHFN (CEXPI)
ee5fd23a 1904 CASE_MATHFN_FLOATN (COPYSIGN)
b03ff92e
RS
1905 CASE_MATHFN (COS)
1906 CASE_MATHFN (COSH)
1907 CASE_MATHFN (DREM)
1908 CASE_MATHFN (ERF)
1909 CASE_MATHFN (ERFC)
1910 CASE_MATHFN (EXP)
1911 CASE_MATHFN (EXP10)
1912 CASE_MATHFN (EXP2)
1913 CASE_MATHFN (EXPM1)
1914 CASE_MATHFN (FABS)
1915 CASE_MATHFN (FDIM)
c6cfa2bf 1916 CASE_MATHFN_FLOATN (FLOOR)
ee5fd23a
MM
1917 CASE_MATHFN_FLOATN (FMA)
1918 CASE_MATHFN_FLOATN (FMAX)
1919 CASE_MATHFN_FLOATN (FMIN)
b03ff92e
RS
1920 CASE_MATHFN (FMOD)
1921 CASE_MATHFN (FREXP)
1922 CASE_MATHFN (GAMMA)
1923 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1924 CASE_MATHFN (HUGE_VAL)
1925 CASE_MATHFN (HYPOT)
1926 CASE_MATHFN (ILOGB)
1927 CASE_MATHFN (ICEIL)
1928 CASE_MATHFN (IFLOOR)
1929 CASE_MATHFN (INF)
1930 CASE_MATHFN (IRINT)
1931 CASE_MATHFN (IROUND)
1932 CASE_MATHFN (ISINF)
1933 CASE_MATHFN (J0)
1934 CASE_MATHFN (J1)
1935 CASE_MATHFN (JN)
1936 CASE_MATHFN (LCEIL)
1937 CASE_MATHFN (LDEXP)
1938 CASE_MATHFN (LFLOOR)
1939 CASE_MATHFN (LGAMMA)
1940 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1941 CASE_MATHFN (LLCEIL)
1942 CASE_MATHFN (LLFLOOR)
1943 CASE_MATHFN (LLRINT)
1944 CASE_MATHFN (LLROUND)
1945 CASE_MATHFN (LOG)
1946 CASE_MATHFN (LOG10)
1947 CASE_MATHFN (LOG1P)
1948 CASE_MATHFN (LOG2)
1949 CASE_MATHFN (LOGB)
1950 CASE_MATHFN (LRINT)
1951 CASE_MATHFN (LROUND)
1952 CASE_MATHFN (MODF)
1953 CASE_MATHFN (NAN)
1954 CASE_MATHFN (NANS)
c6cfa2bf 1955 CASE_MATHFN_FLOATN (NEARBYINT)
b03ff92e
RS
1956 CASE_MATHFN (NEXTAFTER)
1957 CASE_MATHFN (NEXTTOWARD)
1958 CASE_MATHFN (POW)
1959 CASE_MATHFN (POWI)
1960 CASE_MATHFN (POW10)
1961 CASE_MATHFN (REMAINDER)
1962 CASE_MATHFN (REMQUO)
c6cfa2bf
MM
1963 CASE_MATHFN_FLOATN (RINT)
1964 CASE_MATHFN_FLOATN (ROUND)
b03ff92e
RS
1965 CASE_MATHFN (SCALB)
1966 CASE_MATHFN (SCALBLN)
1967 CASE_MATHFN (SCALBN)
1968 CASE_MATHFN (SIGNBIT)
1969 CASE_MATHFN (SIGNIFICAND)
1970 CASE_MATHFN (SIN)
1971 CASE_MATHFN (SINCOS)
1972 CASE_MATHFN (SINH)
ee5fd23a 1973 CASE_MATHFN_FLOATN (SQRT)
b03ff92e
RS
1974 CASE_MATHFN (TAN)
1975 CASE_MATHFN (TANH)
1976 CASE_MATHFN (TGAMMA)
c6cfa2bf 1977 CASE_MATHFN_FLOATN (TRUNC)
b03ff92e
RS
1978 CASE_MATHFN (Y0)
1979 CASE_MATHFN (Y1)
1980 CASE_MATHFN (YN)
daa027cc 1981
b03ff92e
RS
1982 default:
1983 return END_BUILTINS;
1984 }
daa027cc 1985
ee5fd23a
MM
1986 mtype = TYPE_MAIN_VARIANT (type);
1987 if (mtype == double_type_node)
5c1a2e63 1988 return fcode;
ee5fd23a 1989 else if (mtype == float_type_node)
5c1a2e63 1990 return fcodef;
ee5fd23a 1991 else if (mtype == long_double_type_node)
5c1a2e63 1992 return fcodel;
ee5fd23a
MM
1993 else if (mtype == float16_type_node)
1994 return fcodef16;
1995 else if (mtype == float32_type_node)
1996 return fcodef32;
1997 else if (mtype == float64_type_node)
1998 return fcodef64;
1999 else if (mtype == float128_type_node)
2000 return fcodef128;
2001 else if (mtype == float32x_type_node)
2002 return fcodef32x;
2003 else if (mtype == float64x_type_node)
2004 return fcodef64x;
2005 else if (mtype == float128x_type_node)
2006 return fcodef128x;
daa027cc 2007 else
5c1a2e63
RS
2008 return END_BUILTINS;
2009}
2010
2011/* Return mathematic function equivalent to FN but operating directly on TYPE,
2012 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2013 otherwise use the explicit declaration. If we can't do the conversion,
2014 return null. */
2015
2016static tree
b03ff92e 2017mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
5c1a2e63
RS
2018{
2019 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2020 if (fcode2 == END_BUILTINS)
5039610b 2021 return NULL_TREE;
e79983f4
MM
2022
2023 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2024 return NULL_TREE;
2025
2026 return builtin_decl_explicit (fcode2);
272f51a3
JH
2027}
2028
b03ff92e 2029/* Like mathfn_built_in_1, but always use the implicit array. */
05f41289
KG
2030
2031tree
b03ff92e 2032mathfn_built_in (tree type, combined_fn fn)
05f41289
KG
2033{
2034 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2035}
2036
b03ff92e
RS
2037/* Like mathfn_built_in_1, but take a built_in_function and
2038 always use the implicit array. */
2039
2040tree
2041mathfn_built_in (tree type, enum built_in_function fn)
2042{
2043 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2044}
2045
686ee971
RS
2046/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2047 return its code, otherwise return IFN_LAST. Note that this function
2048 only tests whether the function is defined in internals.def, not whether
2049 it is actually available on the target. */
2050
2051internal_fn
2052associated_internal_fn (tree fndecl)
2053{
2054 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2055 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2056 switch (DECL_FUNCTION_CODE (fndecl))
2057 {
2058#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2059 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
ee5fd23a
MM
2060#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2061 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2062 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
4959a752
RS
2063#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2064 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
686ee971
RS
2065#include "internal-fn.def"
2066
2067 CASE_FLT_FN (BUILT_IN_POW10):
2068 return IFN_EXP10;
2069
2070 CASE_FLT_FN (BUILT_IN_DREM):
2071 return IFN_REMAINDER;
2072
2073 CASE_FLT_FN (BUILT_IN_SCALBN):
2074 CASE_FLT_FN (BUILT_IN_SCALBLN):
2075 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2076 return IFN_LDEXP;
2077 return IFN_LAST;
2078
2079 default:
2080 return IFN_LAST;
2081 }
2082}
2083
2084/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2085 on the current target by a call to an internal function, return the
2086 code of that internal function, otherwise return IFN_LAST. The caller
2087 is responsible for ensuring that any side-effects of the built-in
2088 call are dealt with correctly. E.g. if CALL sets errno, the caller
2089 must decide that the errno result isn't needed or make it available
2090 in some other way. */
2091
2092internal_fn
2093replacement_internal_fn (gcall *call)
2094{
2095 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2096 {
2097 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2098 if (ifn != IFN_LAST)
2099 {
2100 tree_pair types = direct_internal_fn_types (ifn, call);
d95ab70a
RS
2101 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2102 if (direct_internal_fn_supported_p (ifn, types, opt_type))
686ee971
RS
2103 return ifn;
2104 }
2105 }
2106 return IFN_LAST;
2107}
2108
1b1562a5
MM
2109/* Expand a call to the builtin trinary math functions (fma).
2110 Return NULL_RTX if a normal call should be emitted rather than expanding the
2111 function in-line. EXP is the expression that is a call to the builtin
2112 function; if convenient, the result should be placed in TARGET.
2113 SUBTARGET may be used as the target for computing one of EXP's
2114 operands. */
2115
2116static rtx
2117expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2118{
2119 optab builtin_optab;
58f4cf2a
DM
2120 rtx op0, op1, op2, result;
2121 rtx_insn *insns;
1b1562a5
MM
2122 tree fndecl = get_callee_fndecl (exp);
2123 tree arg0, arg1, arg2;
ef4bddc2 2124 machine_mode mode;
1b1562a5
MM
2125
2126 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2127 return NULL_RTX;
2128
2129 arg0 = CALL_EXPR_ARG (exp, 0);
2130 arg1 = CALL_EXPR_ARG (exp, 1);
2131 arg2 = CALL_EXPR_ARG (exp, 2);
2132
2133 switch (DECL_FUNCTION_CODE (fndecl))
2134 {
2135 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 2136 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
2137 builtin_optab = fma_optab; break;
2138 default:
2139 gcc_unreachable ();
2140 }
2141
2142 /* Make a suitable register to place result in. */
2143 mode = TYPE_MODE (TREE_TYPE (exp));
2144
2145 /* Before working hard, check whether the instruction is available. */
2146 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2147 return NULL_RTX;
2148
04b80dbb 2149 result = gen_reg_rtx (mode);
1b1562a5
MM
2150
2151 /* Always stabilize the argument list. */
2152 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2153 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2154 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2155
2156 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2157 op1 = expand_normal (arg1);
2158 op2 = expand_normal (arg2);
2159
2160 start_sequence ();
2161
04b80dbb
RS
2162 /* Compute into RESULT.
2163 Set RESULT to wherever the result comes back. */
2164 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2165 result, 0);
1b1562a5
MM
2166
2167 /* If we were unable to expand via the builtin, stop the sequence
2168 (without outputting the insns) and call to the library function
2169 with the stabilized argument list. */
04b80dbb 2170 if (result == 0)
1b1562a5
MM
2171 {
2172 end_sequence ();
2173 return expand_call (exp, target, target == const0_rtx);
2174 }
2175
2176 /* Output the entire sequence. */
2177 insns = get_insns ();
2178 end_sequence ();
2179 emit_insn (insns);
2180
04b80dbb 2181 return result;
1b1562a5
MM
2182}
2183
6c7cf1f0 2184/* Expand a call to the builtin sin and cos math functions.
5039610b 2185 Return NULL_RTX if a normal call should be emitted rather than expanding the
6c7cf1f0
UB
2186 function in-line. EXP is the expression that is a call to the builtin
2187 function; if convenient, the result should be placed in TARGET.
2188 SUBTARGET may be used as the target for computing one of EXP's
2189 operands. */
2190
2191static rtx
2192expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2193{
2194 optab builtin_optab;
58f4cf2a
DM
2195 rtx op0;
2196 rtx_insn *insns;
6c7cf1f0 2197 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2198 machine_mode mode;
5799f732 2199 tree arg;
6c7cf1f0 2200
5039610b
SL
2201 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2202 return NULL_RTX;
6c7cf1f0 2203
5039610b 2204 arg = CALL_EXPR_ARG (exp, 0);
6c7cf1f0
UB
2205
2206 switch (DECL_FUNCTION_CODE (fndecl))
2207 {
ea6a6627
VR
2208 CASE_FLT_FN (BUILT_IN_SIN):
2209 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2210 builtin_optab = sincos_optab; break;
2211 default:
298e6adc 2212 gcc_unreachable ();
6c7cf1f0
UB
2213 }
2214
2215 /* Make a suitable register to place result in. */
2216 mode = TYPE_MODE (TREE_TYPE (exp));
2217
6c7cf1f0 2218 /* Check if sincos insn is available, otherwise fallback
9cf737f8 2219 to sin or cos insn. */
947131ba 2220 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6c7cf1f0
UB
2221 switch (DECL_FUNCTION_CODE (fndecl))
2222 {
ea6a6627 2223 CASE_FLT_FN (BUILT_IN_SIN):
6c7cf1f0 2224 builtin_optab = sin_optab; break;
ea6a6627 2225 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2226 builtin_optab = cos_optab; break;
2227 default:
298e6adc 2228 gcc_unreachable ();
6c7cf1f0 2229 }
6c7cf1f0
UB
2230
2231 /* Before working hard, check whether the instruction is available. */
947131ba 2232 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6c7cf1f0 2233 {
04b80dbb 2234 rtx result = gen_reg_rtx (mode);
6c7cf1f0
UB
2235
2236 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2237 need to expand the argument again. This way, we will not perform
2238 side-effects more the once. */
5799f732 2239 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6c7cf1f0 2240
49452c07 2241 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6c7cf1f0 2242
6c7cf1f0
UB
2243 start_sequence ();
2244
04b80dbb
RS
2245 /* Compute into RESULT.
2246 Set RESULT to wherever the result comes back. */
6c7cf1f0
UB
2247 if (builtin_optab == sincos_optab)
2248 {
04b80dbb 2249 int ok;
5906d013 2250
6c7cf1f0
UB
2251 switch (DECL_FUNCTION_CODE (fndecl))
2252 {
ea6a6627 2253 CASE_FLT_FN (BUILT_IN_SIN):
04b80dbb 2254 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6c7cf1f0 2255 break;
ea6a6627 2256 CASE_FLT_FN (BUILT_IN_COS):
04b80dbb 2257 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6c7cf1f0
UB
2258 break;
2259 default:
298e6adc 2260 gcc_unreachable ();
6c7cf1f0 2261 }
04b80dbb 2262 gcc_assert (ok);
6c7cf1f0
UB
2263 }
2264 else
04b80dbb 2265 result = expand_unop (mode, builtin_optab, op0, result, 0);
6c7cf1f0 2266
04b80dbb 2267 if (result != 0)
6c7cf1f0 2268 {
6c7cf1f0
UB
2269 /* Output the entire sequence. */
2270 insns = get_insns ();
2271 end_sequence ();
2272 emit_insn (insns);
04b80dbb 2273 return result;
6c7cf1f0
UB
2274 }
2275
2276 /* If we were unable to expand via the builtin, stop the sequence
2277 (without outputting the insns) and call to the library function
2278 with the stabilized argument list. */
2279 end_sequence ();
2280 }
2281
04b80dbb 2282 return expand_call (exp, target, target == const0_rtx);
6c7cf1f0
UB
2283}
2284
44e10129
MM
2285/* Given an interclass math builtin decl FNDECL and it's argument ARG
2286 return an RTL instruction code that implements the functionality.
2287 If that isn't possible or available return CODE_FOR_nothing. */
eaee4464 2288
44e10129
MM
2289static enum insn_code
2290interclass_mathfn_icode (tree arg, tree fndecl)
eaee4464 2291{
44e10129 2292 bool errno_set = false;
2225b9f2 2293 optab builtin_optab = unknown_optab;
ef4bddc2 2294 machine_mode mode;
eaee4464
UB
2295
2296 switch (DECL_FUNCTION_CODE (fndecl))
2297 {
2298 CASE_FLT_FN (BUILT_IN_ILOGB):
903c723b
TC
2299 errno_set = true; builtin_optab = ilogb_optab; break;
2300 CASE_FLT_FN (BUILT_IN_ISINF):
2301 builtin_optab = isinf_optab; break;
2302 case BUILT_IN_ISNORMAL:
2303 case BUILT_IN_ISFINITE:
2304 CASE_FLT_FN (BUILT_IN_FINITE):
2305 case BUILT_IN_FINITED32:
2306 case BUILT_IN_FINITED64:
2307 case BUILT_IN_FINITED128:
2308 case BUILT_IN_ISINFD32:
2309 case BUILT_IN_ISINFD64:
2310 case BUILT_IN_ISINFD128:
2311 /* These builtins have no optabs (yet). */
0c8d3c2b 2312 break;
eaee4464
UB
2313 default:
2314 gcc_unreachable ();
2315 }
2316
2317 /* There's no easy way to detect the case we need to set EDOM. */
2318 if (flag_errno_math && errno_set)
44e10129 2319 return CODE_FOR_nothing;
eaee4464
UB
2320
2321 /* Optab mode depends on the mode of the input argument. */
2322 mode = TYPE_MODE (TREE_TYPE (arg));
2323
0c8d3c2b 2324 if (builtin_optab)
947131ba 2325 return optab_handler (builtin_optab, mode);
44e10129
MM
2326 return CODE_FOR_nothing;
2327}
2328
2329/* Expand a call to one of the builtin math functions that operate on
903c723b
TC
2330 floating point argument and output an integer result (ilogb, isinf,
2331 isnan, etc).
44e10129
MM
2332 Return 0 if a normal call should be emitted rather than expanding the
2333 function in-line. EXP is the expression that is a call to the builtin
4359dc2a 2334 function; if convenient, the result should be placed in TARGET. */
44e10129
MM
2335
2336static rtx
4359dc2a 2337expand_builtin_interclass_mathfn (tree exp, rtx target)
44e10129
MM
2338{
2339 enum insn_code icode = CODE_FOR_nothing;
2340 rtx op0;
2341 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2342 machine_mode mode;
44e10129
MM
2343 tree arg;
2344
2345 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2346 return NULL_RTX;
2347
2348 arg = CALL_EXPR_ARG (exp, 0);
2349 icode = interclass_mathfn_icode (arg, fndecl);
2350 mode = TYPE_MODE (TREE_TYPE (arg));
2351
eaee4464
UB
2352 if (icode != CODE_FOR_nothing)
2353 {
a5c7d693 2354 struct expand_operand ops[1];
58f4cf2a 2355 rtx_insn *last = get_last_insn ();
8a0b1aa4 2356 tree orig_arg = arg;
eaee4464
UB
2357
2358 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2359 need to expand the argument again. This way, we will not perform
2360 side-effects more the once. */
5799f732 2361 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
eaee4464 2362
4359dc2a 2363 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
eaee4464
UB
2364
2365 if (mode != GET_MODE (op0))
2366 op0 = convert_to_mode (mode, op0, 0);
2367
a5c7d693
RS
2368 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2369 if (maybe_legitimize_operands (icode, 0, 1, ops)
2370 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2371 return ops[0].value;
2372
8a0b1aa4
MM
2373 delete_insns_since (last);
2374 CALL_EXPR_ARG (exp, 0) = orig_arg;
eaee4464
UB
2375 }
2376
44e10129 2377 return NULL_RTX;
eaee4464
UB
2378}
2379
403e54f0 2380/* Expand a call to the builtin sincos math function.
5039610b 2381 Return NULL_RTX if a normal call should be emitted rather than expanding the
403e54f0
RG
2382 function in-line. EXP is the expression that is a call to the builtin
2383 function. */
2384
2385static rtx
2386expand_builtin_sincos (tree exp)
2387{
2388 rtx op0, op1, op2, target1, target2;
ef4bddc2 2389 machine_mode mode;
403e54f0
RG
2390 tree arg, sinp, cosp;
2391 int result;
db3927fb 2392 location_t loc = EXPR_LOCATION (exp);
ca818bd9 2393 tree alias_type, alias_off;
403e54f0 2394
5039610b
SL
2395 if (!validate_arglist (exp, REAL_TYPE,
2396 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2397 return NULL_RTX;
403e54f0 2398
5039610b
SL
2399 arg = CALL_EXPR_ARG (exp, 0);
2400 sinp = CALL_EXPR_ARG (exp, 1);
2401 cosp = CALL_EXPR_ARG (exp, 2);
403e54f0
RG
2402
2403 /* Make a suitable register to place result in. */
2404 mode = TYPE_MODE (TREE_TYPE (arg));
2405
2406 /* Check if sincos insn is available, otherwise emit the call. */
947131ba 2407 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
403e54f0
RG
2408 return NULL_RTX;
2409
2410 target1 = gen_reg_rtx (mode);
2411 target2 = gen_reg_rtx (mode);
2412
84217346 2413 op0 = expand_normal (arg);
ca818bd9
RG
2414 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2415 alias_off = build_int_cst (alias_type, 0);
2416 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2417 sinp, alias_off));
2418 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2419 cosp, alias_off));
403e54f0
RG
2420
2421 /* Compute into target1 and target2.
2422 Set TARGET to wherever the result comes back. */
2423 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2424 gcc_assert (result);
2425
2426 /* Move target1 and target2 to the memory locations indicated
2427 by op1 and op2. */
2428 emit_move_insn (op1, target1);
2429 emit_move_insn (op2, target2);
2430
2431 return const0_rtx;
2432}
2433
75c7c595
RG
2434/* Expand a call to the internal cexpi builtin to the sincos math function.
2435 EXP is the expression that is a call to the builtin function; if convenient,
4359dc2a 2436 the result should be placed in TARGET. */
75c7c595
RG
2437
2438static rtx
4359dc2a 2439expand_builtin_cexpi (tree exp, rtx target)
75c7c595
RG
2440{
2441 tree fndecl = get_callee_fndecl (exp);
75c7c595 2442 tree arg, type;
ef4bddc2 2443 machine_mode mode;
75c7c595 2444 rtx op0, op1, op2;
db3927fb 2445 location_t loc = EXPR_LOCATION (exp);
75c7c595 2446
5039610b
SL
2447 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2448 return NULL_RTX;
75c7c595 2449
5039610b 2450 arg = CALL_EXPR_ARG (exp, 0);
75c7c595
RG
2451 type = TREE_TYPE (arg);
2452 mode = TYPE_MODE (TREE_TYPE (arg));
2453
2454 /* Try expanding via a sincos optab, fall back to emitting a libcall
b54c5497
RG
2455 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2456 is only generated from sincos, cexp or if we have either of them. */
947131ba 2457 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
75c7c595
RG
2458 {
2459 op1 = gen_reg_rtx (mode);
2460 op2 = gen_reg_rtx (mode);
2461
4359dc2a 2462 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2463
2464 /* Compute into op1 and op2. */
2465 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2466 }
d33d9e47 2467 else if (targetm.libc_has_function (function_sincos))
75c7c595 2468 {
5039610b 2469 tree call, fn = NULL_TREE;
75c7c595
RG
2470 tree top1, top2;
2471 rtx op1a, op2a;
2472
2473 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2474 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
75c7c595 2475 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2476 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
75c7c595 2477 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2478 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
5039610b
SL
2479 else
2480 gcc_unreachable ();
b8698a0f 2481
9474e8ab
MM
2482 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2483 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
18ae1560
UB
2484 op1a = copy_addr_to_reg (XEXP (op1, 0));
2485 op2a = copy_addr_to_reg (XEXP (op2, 0));
75c7c595
RG
2486 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2487 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2488
75c7c595
RG
2489 /* Make sure not to fold the sincos call again. */
2490 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5039610b
SL
2491 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2492 call, 3, arg, top1, top2));
75c7c595 2493 }
b54c5497
RG
2494 else
2495 {
9d972b2d 2496 tree call, fn = NULL_TREE, narg;
b54c5497
RG
2497 tree ctype = build_complex_type (type);
2498
9d972b2d 2499 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2500 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
9d972b2d 2501 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2502 fn = builtin_decl_explicit (BUILT_IN_CEXP);
9d972b2d 2503 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2504 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
5039610b
SL
2505 else
2506 gcc_unreachable ();
34a24c11
RG
2507
2508 /* If we don't have a decl for cexp create one. This is the
2509 friendliest fallback if the user calls __builtin_cexpi
2510 without full target C99 function support. */
2511 if (fn == NULL_TREE)
2512 {
2513 tree fntype;
2514 const char *name = NULL;
2515
2516 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2517 name = "cexpf";
2518 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2519 name = "cexp";
2520 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2521 name = "cexpl";
2522
2523 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2524 fn = build_fn_decl (name, fntype);
2525 }
2526
db3927fb 2527 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
b54c5497
RG
2528 build_real (type, dconst0), arg);
2529
2530 /* Make sure not to fold the cexp call again. */
2531 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
b8698a0f 2532 return expand_expr (build_call_nary (ctype, call, 1, narg),
49452c07 2533 target, VOIDmode, EXPAND_NORMAL);
b54c5497 2534 }
75c7c595
RG
2535
2536 /* Now build the proper return type. */
2537 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2538 make_tree (TREE_TYPE (arg), op2),
2539 make_tree (TREE_TYPE (arg), op1)),
49452c07 2540 target, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2541}
2542
44e10129
MM
2543/* Conveniently construct a function call expression. FNDECL names the
2544 function to be called, N is the number of arguments, and the "..."
2545 parameters are the argument expressions. Unlike build_call_exr
2546 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2547
2548static tree
2549build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2550{
2551 va_list ap;
2552 tree fntype = TREE_TYPE (fndecl);
2553 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2554
2555 va_start (ap, n);
2556 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2557 va_end (ap);
2558 SET_EXPR_LOCATION (fn, loc);
2559 return fn;
2560}
44e10129 2561
0bfa1541
RG
2562/* Expand a call to one of the builtin rounding functions gcc defines
2563 as an extension (lfloor and lceil). As these are gcc extensions we
2564 do not need to worry about setting errno to EDOM.
d8b42d06
UB
2565 If expanding via optab fails, lower expression to (int)(floor(x)).
2566 EXP is the expression that is a call to the builtin function;
1856c8dc 2567 if convenient, the result should be placed in TARGET. */
d8b42d06
UB
2568
2569static rtx
1856c8dc 2570expand_builtin_int_roundingfn (tree exp, rtx target)
d8b42d06 2571{
c3a4177f 2572 convert_optab builtin_optab;
58f4cf2a
DM
2573 rtx op0, tmp;
2574 rtx_insn *insns;
d8b42d06 2575 tree fndecl = get_callee_fndecl (exp);
d8b42d06
UB
2576 enum built_in_function fallback_fn;
2577 tree fallback_fndecl;
ef4bddc2 2578 machine_mode mode;
968fc3b6 2579 tree arg;
d8b42d06 2580
5039610b 2581 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
d8b42d06
UB
2582 gcc_unreachable ();
2583
5039610b 2584 arg = CALL_EXPR_ARG (exp, 0);
d8b42d06
UB
2585
2586 switch (DECL_FUNCTION_CODE (fndecl))
2587 {
6c32ee74 2588 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
2589 CASE_FLT_FN (BUILT_IN_LCEIL):
2590 CASE_FLT_FN (BUILT_IN_LLCEIL):
f94b1661
UB
2591 builtin_optab = lceil_optab;
2592 fallback_fn = BUILT_IN_CEIL;
2593 break;
2594
6c32ee74 2595 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627
VR
2596 CASE_FLT_FN (BUILT_IN_LFLOOR):
2597 CASE_FLT_FN (BUILT_IN_LLFLOOR):
d8b42d06
UB
2598 builtin_optab = lfloor_optab;
2599 fallback_fn = BUILT_IN_FLOOR;
2600 break;
2601
2602 default:
2603 gcc_unreachable ();
2604 }
2605
2606 /* Make a suitable register to place result in. */
2607 mode = TYPE_MODE (TREE_TYPE (exp));
2608
c3a4177f 2609 target = gen_reg_rtx (mode);
d8b42d06 2610
c3a4177f
RG
2611 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2612 need to expand the argument again. This way, we will not perform
2613 side-effects more the once. */
5799f732 2614 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
d8b42d06 2615
1856c8dc 2616 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
d8b42d06 2617
c3a4177f 2618 start_sequence ();
d8b42d06 2619
c3a4177f
RG
2620 /* Compute into TARGET. */
2621 if (expand_sfix_optab (target, op0, builtin_optab))
2622 {
2623 /* Output the entire sequence. */
2624 insns = get_insns ();
d8b42d06 2625 end_sequence ();
c3a4177f
RG
2626 emit_insn (insns);
2627 return target;
d8b42d06
UB
2628 }
2629
c3a4177f
RG
2630 /* If we were unable to expand via the builtin, stop the sequence
2631 (without outputting the insns). */
2632 end_sequence ();
2633
d8b42d06
UB
2634 /* Fall back to floating point rounding optab. */
2635 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
34a24c11
RG
2636
2637 /* For non-C99 targets we may end up without a fallback fndecl here
2638 if the user called __builtin_lfloor directly. In this case emit
2639 a call to the floor/ceil variants nevertheless. This should result
2640 in the best user experience for not full C99 targets. */
2641 if (fallback_fndecl == NULL_TREE)
2642 {
2643 tree fntype;
2644 const char *name = NULL;
2645
2646 switch (DECL_FUNCTION_CODE (fndecl))
2647 {
6c32ee74 2648 case BUILT_IN_ICEIL:
34a24c11
RG
2649 case BUILT_IN_LCEIL:
2650 case BUILT_IN_LLCEIL:
2651 name = "ceil";
2652 break;
6c32ee74 2653 case BUILT_IN_ICEILF:
34a24c11
RG
2654 case BUILT_IN_LCEILF:
2655 case BUILT_IN_LLCEILF:
2656 name = "ceilf";
2657 break;
6c32ee74 2658 case BUILT_IN_ICEILL:
34a24c11
RG
2659 case BUILT_IN_LCEILL:
2660 case BUILT_IN_LLCEILL:
2661 name = "ceill";
2662 break;
6c32ee74 2663 case BUILT_IN_IFLOOR:
34a24c11
RG
2664 case BUILT_IN_LFLOOR:
2665 case BUILT_IN_LLFLOOR:
2666 name = "floor";
2667 break;
6c32ee74 2668 case BUILT_IN_IFLOORF:
34a24c11
RG
2669 case BUILT_IN_LFLOORF:
2670 case BUILT_IN_LLFLOORF:
2671 name = "floorf";
2672 break;
6c32ee74 2673 case BUILT_IN_IFLOORL:
34a24c11
RG
2674 case BUILT_IN_LFLOORL:
2675 case BUILT_IN_LLFLOORL:
2676 name = "floorl";
2677 break;
2678 default:
2679 gcc_unreachable ();
2680 }
2681
2682 fntype = build_function_type_list (TREE_TYPE (arg),
2683 TREE_TYPE (arg), NULL_TREE);
2684 fallback_fndecl = build_fn_decl (name, fntype);
2685 }
2686
aa493694 2687 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
d8b42d06 2688
39b1ec97 2689 tmp = expand_normal (exp);
9a002da8 2690 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
d8b42d06
UB
2691
2692 /* Truncate the result of floating point optab to integer
2693 via expand_fix (). */
2694 target = gen_reg_rtx (mode);
2695 expand_fix (target, tmp, 0);
2696
2697 return target;
2698}
2699
0bfa1541
RG
2700/* Expand a call to one of the builtin math functions doing integer
2701 conversion (lrint).
2702 Return 0 if a normal call should be emitted rather than expanding the
2703 function in-line. EXP is the expression that is a call to the builtin
1856c8dc 2704 function; if convenient, the result should be placed in TARGET. */
0bfa1541
RG
2705
2706static rtx
1856c8dc 2707expand_builtin_int_roundingfn_2 (tree exp, rtx target)
0bfa1541 2708{
bb7f0423 2709 convert_optab builtin_optab;
58f4cf2a
DM
2710 rtx op0;
2711 rtx_insn *insns;
0bfa1541 2712 tree fndecl = get_callee_fndecl (exp);
968fc3b6 2713 tree arg;
ef4bddc2 2714 machine_mode mode;
ff63ac4d 2715 enum built_in_function fallback_fn = BUILT_IN_NONE;
0bfa1541 2716
5039610b
SL
2717 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2718 gcc_unreachable ();
b8698a0f 2719
5039610b 2720 arg = CALL_EXPR_ARG (exp, 0);
0bfa1541
RG
2721
2722 switch (DECL_FUNCTION_CODE (fndecl))
2723 {
6c32ee74 2724 CASE_FLT_FN (BUILT_IN_IRINT):
ff63ac4d 2725 fallback_fn = BUILT_IN_LRINT;
81fea426 2726 gcc_fallthrough ();
0bfa1541
RG
2727 CASE_FLT_FN (BUILT_IN_LRINT):
2728 CASE_FLT_FN (BUILT_IN_LLRINT):
ff63ac4d
JJ
2729 builtin_optab = lrint_optab;
2730 break;
6c32ee74
UB
2731
2732 CASE_FLT_FN (BUILT_IN_IROUND):
ff63ac4d 2733 fallback_fn = BUILT_IN_LROUND;
81fea426 2734 gcc_fallthrough ();
4d81bf84
RG
2735 CASE_FLT_FN (BUILT_IN_LROUND):
2736 CASE_FLT_FN (BUILT_IN_LLROUND):
ff63ac4d
JJ
2737 builtin_optab = lround_optab;
2738 break;
6c32ee74 2739
0bfa1541
RG
2740 default:
2741 gcc_unreachable ();
2742 }
2743
ff63ac4d
JJ
2744 /* There's no easy way to detect the case we need to set EDOM. */
2745 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2746 return NULL_RTX;
2747
0bfa1541
RG
2748 /* Make a suitable register to place result in. */
2749 mode = TYPE_MODE (TREE_TYPE (exp));
2750
ff63ac4d
JJ
2751 /* There's no easy way to detect the case we need to set EDOM. */
2752 if (!flag_errno_math)
2753 {
04b80dbb 2754 rtx result = gen_reg_rtx (mode);
0bfa1541 2755
ff63ac4d
JJ
2756 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2757 need to expand the argument again. This way, we will not perform
2758 side-effects more the once. */
2759 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
0bfa1541 2760
ff63ac4d 2761 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
0bfa1541 2762
ff63ac4d 2763 start_sequence ();
0bfa1541 2764
04b80dbb 2765 if (expand_sfix_optab (result, op0, builtin_optab))
ff63ac4d
JJ
2766 {
2767 /* Output the entire sequence. */
2768 insns = get_insns ();
2769 end_sequence ();
2770 emit_insn (insns);
04b80dbb 2771 return result;
ff63ac4d
JJ
2772 }
2773
2774 /* If we were unable to expand via the builtin, stop the sequence
2775 (without outputting the insns) and call to the library function
2776 with the stabilized argument list. */
0bfa1541
RG
2777 end_sequence ();
2778 }
2779
ff63ac4d
JJ
2780 if (fallback_fn != BUILT_IN_NONE)
2781 {
2782 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2783 targets, (int) round (x) should never be transformed into
2784 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2785 a call to lround in the hope that the target provides at least some
2786 C99 functions. This should result in the best user experience for
2787 not full C99 targets. */
b03ff92e
RS
2788 tree fallback_fndecl = mathfn_built_in_1
2789 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
ff63ac4d
JJ
2790
2791 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2792 fallback_fndecl, 1, arg);
2793
2794 target = expand_call (exp, NULL_RTX, target == const0_rtx);
9a002da8 2795 target = maybe_emit_group_store (target, TREE_TYPE (exp));
ff63ac4d
JJ
2796 return convert_to_mode (mode, target, 0);
2797 }
bb7f0423 2798
04b80dbb 2799 return expand_call (exp, target, target == const0_rtx);
0bfa1541
RG
2800}
2801
5039610b 2802/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
17684d46
RG
2803 a normal call should be emitted rather than expanding the function
2804 in-line. EXP is the expression that is a call to the builtin
2805 function; if convenient, the result should be placed in TARGET. */
2806
2807static rtx
4359dc2a 2808expand_builtin_powi (tree exp, rtx target)
17684d46 2809{
17684d46
RG
2810 tree arg0, arg1;
2811 rtx op0, op1;
ef4bddc2
RS
2812 machine_mode mode;
2813 machine_mode mode2;
17684d46 2814
5039610b
SL
2815 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2816 return NULL_RTX;
17684d46 2817
5039610b
SL
2818 arg0 = CALL_EXPR_ARG (exp, 0);
2819 arg1 = CALL_EXPR_ARG (exp, 1);
17684d46
RG
2820 mode = TYPE_MODE (TREE_TYPE (exp));
2821
17684d46
RG
2822 /* Emit a libcall to libgcc. */
2823
5039610b 2824 /* Mode of the 2nd argument must match that of an int. */
f4b31647 2825 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
0b8495ae 2826
17684d46
RG
2827 if (target == NULL_RTX)
2828 target = gen_reg_rtx (mode);
2829
4359dc2a 2830 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
17684d46
RG
2831 if (GET_MODE (op0) != mode)
2832 op0 = convert_to_mode (mode, op0, 0);
49452c07 2833 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
0b8495ae
FJ
2834 if (GET_MODE (op1) != mode2)
2835 op1 = convert_to_mode (mode2, op1, 0);
17684d46 2836
8a33f100 2837 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
db69559b 2838 target, LCT_CONST, mode,
0b8495ae 2839 op0, mode, op1, mode2);
17684d46
RG
2840
2841 return target;
2842}
2843
b8698a0f 2844/* Expand expression EXP which is a call to the strlen builtin. Return
781ff3d8 2845 NULL_RTX if we failed and the caller should emit a normal call, otherwise
0e9295cf 2846 try to get the result in TARGET, if convenient. */
3bdf5ad1 2847
28f4ec01 2848static rtx
5039610b 2849expand_builtin_strlen (tree exp, rtx target,
ef4bddc2 2850 machine_mode target_mode)
28f4ec01 2851{
5039610b
SL
2852 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2853 return NULL_RTX;
712b7a05 2854
16155777
MS
2855 struct expand_operand ops[4];
2856 rtx pat;
2857 tree len;
2858 tree src = CALL_EXPR_ARG (exp, 0);
2859 rtx src_reg;
2860 rtx_insn *before_strlen;
2861 machine_mode insn_mode;
2862 enum insn_code icode = CODE_FOR_nothing;
2863 unsigned int align;
ae808627 2864
16155777
MS
2865 /* If the length can be computed at compile-time, return it. */
2866 len = c_strlen (src, 0);
2867 if (len)
2868 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2869
2870 /* If the length can be computed at compile-time and is constant
2871 integer, but there are side-effects in src, evaluate
2872 src for side-effects, then return len.
2873 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2874 can be optimized into: i++; x = 3; */
2875 len = c_strlen (src, 1);
2876 if (len && TREE_CODE (len) == INTEGER_CST)
2877 {
2878 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2879 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2880 }
28f4ec01 2881
16155777 2882 align = get_pointer_alignment (src) / BITS_PER_UNIT;
28f4ec01 2883
16155777
MS
2884 /* If SRC is not a pointer type, don't do this operation inline. */
2885 if (align == 0)
2886 return NULL_RTX;
2887
2888 /* Bail out if we can't compute strlen in the right mode. */
2889 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2890 {
2891 icode = optab_handler (strlen_optab, insn_mode);
2892 if (icode != CODE_FOR_nothing)
2893 break;
2894 }
2895 if (insn_mode == VOIDmode)
2896 return NULL_RTX;
28f4ec01 2897
16155777
MS
2898 /* Make a place to hold the source address. We will not expand
2899 the actual source until we are sure that the expansion will
2900 not fail -- there are trees that cannot be expanded twice. */
2901 src_reg = gen_reg_rtx (Pmode);
28f4ec01 2902
16155777
MS
2903 /* Mark the beginning of the strlen sequence so we can emit the
2904 source operand later. */
2905 before_strlen = get_last_insn ();
28f4ec01 2906
16155777
MS
2907 create_output_operand (&ops[0], target, insn_mode);
2908 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2909 create_integer_operand (&ops[2], 0);
2910 create_integer_operand (&ops[3], align);
2911 if (!maybe_expand_insn (icode, 4, ops))
2912 return NULL_RTX;
dd05e4fa 2913
16155777
MS
2914 /* Check to see if the argument was declared attribute nonstring
2915 and if so, issue a warning since at this point it's not known
2916 to be nul-terminated. */
2917 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6a33d0ff 2918
16155777
MS
2919 /* Now that we are assured of success, expand the source. */
2920 start_sequence ();
2921 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2922 if (pat != src_reg)
2923 {
fa465762 2924#ifdef POINTERS_EXTEND_UNSIGNED
16155777
MS
2925 if (GET_MODE (pat) != Pmode)
2926 pat = convert_to_mode (Pmode, pat,
2927 POINTERS_EXTEND_UNSIGNED);
fa465762 2928#endif
16155777
MS
2929 emit_move_insn (src_reg, pat);
2930 }
2931 pat = get_insns ();
2932 end_sequence ();
fca9f642 2933
16155777
MS
2934 if (before_strlen)
2935 emit_insn_after (pat, before_strlen);
2936 else
2937 emit_insn_before (pat, get_insns ());
28f4ec01 2938
16155777
MS
2939 /* Return the value in the proper mode for this function. */
2940 if (GET_MODE (ops[0].value) == target_mode)
2941 target = ops[0].value;
2942 else if (target != 0)
2943 convert_move (target, ops[0].value, 0);
2944 else
2945 target = convert_to_mode (target_mode, ops[0].value, 0);
dd05e4fa 2946
16155777 2947 return target;
28f4ec01
BS
2948}
2949
781ff3d8
MS
2950/* Expand call EXP to the strnlen built-in, returning the result
2951 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2952
2953static rtx
2954expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2955{
2956 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2957 return NULL_RTX;
2958
2959 tree src = CALL_EXPR_ARG (exp, 0);
2960 tree bound = CALL_EXPR_ARG (exp, 1);
2961
2962 if (!bound)
2963 return NULL_RTX;
2964
2965 location_t loc = UNKNOWN_LOCATION;
2966 if (EXPR_HAS_LOCATION (exp))
2967 loc = EXPR_LOCATION (exp);
2968
2969 tree maxobjsize = max_object_size ();
2970 tree func = get_callee_fndecl (exp);
2971
2972 tree len = c_strlen (src, 0);
2973
2974 if (TREE_CODE (bound) == INTEGER_CST)
2975 {
2976 if (!TREE_NO_WARNING (exp)
2977 && tree_int_cst_lt (maxobjsize, bound)
2978 && warning_at (loc, OPT_Wstringop_overflow_,
2979 "%K%qD specified bound %E "
2980 "exceeds maximum object size %E",
2981 exp, func, bound, maxobjsize))
2982 TREE_NO_WARNING (exp) = true;
2983
2984 if (!len || TREE_CODE (len) != INTEGER_CST)
2985 return NULL_RTX;
2986
2987 len = fold_convert_loc (loc, size_type_node, len);
2988 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2989 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2990 }
2991
2992 if (TREE_CODE (bound) != SSA_NAME)
2993 return NULL_RTX;
2994
2995 wide_int min, max;
2996 enum value_range_type rng = get_range_info (bound, &min, &max);
2997 if (rng != VR_RANGE)
2998 return NULL_RTX;
2999
3000 if (!TREE_NO_WARNING (exp)
3001 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3002 && warning_at (loc, OPT_Wstringop_overflow_,
3003 "%K%qD specified bound [%wu, %wu] "
3004 "exceeds maximum object size %E",
3005 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3006 TREE_NO_WARNING (exp) = true;
3007
3008 if (!len || TREE_CODE (len) != INTEGER_CST)
3009 return NULL_RTX;
3010
3011 if (wi::gtu_p (min, wi::to_wide (len)))
3012 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3013
3014 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3015 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3016}
3017
57814e5e
JJ
3018/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3019 bytes from constant string DATA + OFFSET and return it as target
3020 constant. */
3021
3022static rtx
4682ae04 3023builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 3024 scalar_int_mode mode)
57814e5e
JJ
3025{
3026 const char *str = (const char *) data;
3027
298e6adc
NS
3028 gcc_assert (offset >= 0
3029 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3030 <= strlen (str) + 1));
57814e5e
JJ
3031
3032 return c_readstr (str + offset, mode);
3033}
3034
3918b108 3035/* LEN specify length of the block of memcpy/memset operation.
82bb7d4e
JH
3036 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3037 In some cases we can make very likely guess on max size, then we
3038 set it into PROBABLE_MAX_SIZE. */
3918b108
JH
3039
3040static void
3041determine_block_size (tree len, rtx len_rtx,
3042 unsigned HOST_WIDE_INT *min_size,
82bb7d4e
JH
3043 unsigned HOST_WIDE_INT *max_size,
3044 unsigned HOST_WIDE_INT *probable_max_size)
3918b108
JH
3045{
3046 if (CONST_INT_P (len_rtx))
3047 {
2738b4c7 3048 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3918b108
JH
3049 return;
3050 }
3051 else
3052 {
807e902e 3053 wide_int min, max;
82bb7d4e
JH
3054 enum value_range_type range_type = VR_UNDEFINED;
3055
3056 /* Determine bounds from the type. */
3057 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3058 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3059 else
3060 *min_size = 0;
3061 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2738b4c7
JJ
3062 *probable_max_size = *max_size
3063 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
82bb7d4e
JH
3064 else
3065 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3066
3067 if (TREE_CODE (len) == SSA_NAME)
3068 range_type = get_range_info (len, &min, &max);
3069 if (range_type == VR_RANGE)
3918b108 3070 {
807e902e 3071 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3918b108 3072 *min_size = min.to_uhwi ();
807e902e 3073 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
82bb7d4e 3074 *probable_max_size = *max_size = max.to_uhwi ();
3918b108 3075 }
82bb7d4e 3076 else if (range_type == VR_ANTI_RANGE)
3918b108 3077 {
70ec86ee 3078 /* Anti range 0...N lets us to determine minimal size to N+1. */
807e902e 3079 if (min == 0)
82bb7d4e 3080 {
807e902e
KZ
3081 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3082 *min_size = max.to_uhwi () + 1;
82bb7d4e
JH
3083 }
3084 /* Code like
3085
3086 int n;
3087 if (n < 100)
70ec86ee 3088 memcpy (a, b, n)
82bb7d4e
JH
3089
3090 Produce anti range allowing negative values of N. We still
3091 can use the information and make a guess that N is not negative.
3092 */
807e902e
KZ
3093 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3094 *probable_max_size = min.to_uhwi () - 1;
3918b108
JH
3095 }
3096 }
3097 gcc_checking_assert (*max_size <=
3098 (unsigned HOST_WIDE_INT)
3099 GET_MODE_MASK (GET_MODE (len_rtx)));
3100}
3101
ee92e7ba
MS
3102/* Try to verify that the sizes and lengths of the arguments to a string
3103 manipulation function given by EXP are within valid bounds and that
cc8bea0a
MS
3104 the operation does not lead to buffer overflow or read past the end.
3105 Arguments other than EXP may be null. When non-null, the arguments
3106 have the following meaning:
3107 DST is the destination of a copy call or NULL otherwise.
3108 SRC is the source of a copy call or NULL otherwise.
3109 DSTWRITE is the number of bytes written into the destination obtained
3110 from the user-supplied size argument to the function (such as in
3111 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3112 MAXREAD is the user-supplied bound on the length of the source sequence
ee92e7ba 3113 (such as in strncat(d, s, N). It specifies the upper limit on the number
cc8bea0a
MS
3114 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3115 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3116 expression EXP is a string function call (as opposed to a memory call
3117 like memcpy). As an exception, SRCSTR can also be an integer denoting
3118 the precomputed size of the source string or object (for functions like
3119 memcpy).
3120 DSTSIZE is the size of the destination object specified by the last
ee92e7ba 3121 argument to the _chk builtins, typically resulting from the expansion
cc8bea0a
MS
3122 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3123 DSTSIZE).
ee92e7ba 3124
cc8bea0a 3125 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
ee92e7ba
MS
3126 SIZE_MAX.
3127
cc8bea0a
MS
3128 If the call is successfully verified as safe return true, otherwise
3129 return false. */
ee92e7ba
MS
3130
3131static bool
cc8bea0a
MS
3132check_access (tree exp, tree, tree, tree dstwrite,
3133 tree maxread, tree srcstr, tree dstsize)
ee92e7ba 3134{
cc8bea0a
MS
3135 int opt = OPT_Wstringop_overflow_;
3136
ee92e7ba 3137 /* The size of the largest object is half the address space, or
cc8bea0a
MS
3138 PTRDIFF_MAX. (This is way too permissive.) */
3139 tree maxobjsize = max_object_size ();
ee92e7ba 3140
cc8bea0a
MS
3141 /* Either the length of the source string for string functions or
3142 the size of the source object for raw memory functions. */
ee92e7ba
MS
3143 tree slen = NULL_TREE;
3144
d9c5a8b9
MS
3145 tree range[2] = { NULL_TREE, NULL_TREE };
3146
ee92e7ba
MS
3147 /* Set to true when the exact number of bytes written by a string
3148 function like strcpy is not known and the only thing that is
3149 known is that it must be at least one (for the terminating nul). */
3150 bool at_least_one = false;
cc8bea0a 3151 if (srcstr)
ee92e7ba 3152 {
cc8bea0a 3153 /* SRCSTR is normally a pointer to string but as a special case
ee92e7ba 3154 it can be an integer denoting the length of a string. */
cc8bea0a 3155 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
ee92e7ba
MS
3156 {
3157 /* Try to determine the range of lengths the source string
d9c5a8b9 3158 refers to. If it can be determined and is less than
cc8bea0a 3159 the upper bound given by MAXREAD add one to it for
ee92e7ba 3160 the terminating nul. Otherwise, set it to one for
cc8bea0a
MS
3161 the same reason, or to MAXREAD as appropriate. */
3162 get_range_strlen (srcstr, range);
3163 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
d9c5a8b9 3164 {
cc8bea0a
MS
3165 if (maxread && tree_int_cst_le (maxread, range[0]))
3166 range[0] = range[1] = maxread;
d9c5a8b9
MS
3167 else
3168 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3169 range[0], size_one_node);
3170
cc8bea0a
MS
3171 if (maxread && tree_int_cst_le (maxread, range[1]))
3172 range[1] = maxread;
d9c5a8b9
MS
3173 else if (!integer_all_onesp (range[1]))
3174 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3175 range[1], size_one_node);
3176
3177 slen = range[0];
3178 }
ee92e7ba
MS
3179 else
3180 {
3181 at_least_one = true;
3182 slen = size_one_node;
3183 }
3184 }
3185 else
cc8bea0a 3186 slen = srcstr;
ee92e7ba
MS
3187 }
3188
cc8bea0a 3189 if (!dstwrite && !maxread)
ee92e7ba
MS
3190 {
3191 /* When the only available piece of data is the object size
3192 there is nothing to do. */
3193 if (!slen)
3194 return true;
3195
3196 /* Otherwise, when the length of the source sequence is known
cc8bea0a 3197 (as with strlen), set DSTWRITE to it. */
d9c5a8b9 3198 if (!range[0])
cc8bea0a 3199 dstwrite = slen;
ee92e7ba
MS
3200 }
3201
cc8bea0a
MS
3202 if (!dstsize)
3203 dstsize = maxobjsize;
ee92e7ba 3204
cc8bea0a
MS
3205 if (dstwrite)
3206 get_size_range (dstwrite, range);
ee92e7ba 3207
cc8bea0a 3208 tree func = get_callee_fndecl (exp);
ee92e7ba
MS
3209
3210 /* First check the number of bytes to be written against the maximum
3211 object size. */
bfb9bd47
MS
3212 if (range[0]
3213 && TREE_CODE (range[0]) == INTEGER_CST
3214 && tree_int_cst_lt (maxobjsize, range[0]))
ee92e7ba 3215 {
781ff3d8
MS
3216 if (TREE_NO_WARNING (exp))
3217 return false;
3218
ee92e7ba 3219 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3220 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba 3221
781ff3d8 3222 bool warned;
ee92e7ba 3223 if (range[0] == range[1])
781ff3d8
MS
3224 warned = warning_at (loc, opt,
3225 "%K%qD specified size %E "
3226 "exceeds maximum object size %E",
3227 exp, func, range[0], maxobjsize);
3228 else
3229 warned = warning_at (loc, opt,
3230 "%K%qD specified size between %E and %E "
3231 "exceeds maximum object size %E",
3232 exp, func,
3233 range[0], range[1], maxobjsize);
3234 if (warned)
3235 TREE_NO_WARNING (exp) = true;
3236
ee92e7ba
MS
3237 return false;
3238 }
3239
cc8bea0a
MS
3240 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3241 constant, and in range of unsigned HOST_WIDE_INT. */
3242 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3243
ee92e7ba
MS
3244 /* Next check the number of bytes to be written against the destination
3245 object size. */
cc8bea0a 3246 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
ee92e7ba
MS
3247 {
3248 if (range[0]
bfb9bd47 3249 && TREE_CODE (range[0]) == INTEGER_CST
cc8bea0a
MS
3250 && ((tree_fits_uhwi_p (dstsize)
3251 && tree_int_cst_lt (dstsize, range[0]))
bfb9bd47
MS
3252 || (dstwrite
3253 && tree_fits_uhwi_p (dstwrite)
cc8bea0a 3254 && tree_int_cst_lt (dstwrite, range[0]))))
ee92e7ba 3255 {
e0676e2e
MS
3256 if (TREE_NO_WARNING (exp))
3257 return false;
3258
ee92e7ba 3259 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3260 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba 3261
cc8bea0a 3262 if (dstwrite == slen && at_least_one)
d9c5a8b9
MS
3263 {
3264 /* This is a call to strcpy with a destination of 0 size
3265 and a source of unknown length. The call will write
3266 at least one byte past the end of the destination. */
3267 warning_at (loc, opt,
13c5654f 3268 "%K%qD writing %E or more bytes into a region "
d9c5a8b9 3269 "of size %E overflows the destination",
cc8bea0a 3270 exp, func, range[0], dstsize);
d9c5a8b9
MS
3271 }
3272 else if (tree_int_cst_equal (range[0], range[1]))
457442eb
MS
3273 warning_n (loc, opt, tree_to_uhwi (range[0]),
3274 "%K%qD writing %E byte into a region "
3275 "of size %E overflows the destination",
3276 "%K%qD writing %E bytes into a region "
3277 "of size %E overflows the destination",
3278 exp, func, range[0], dstsize);
d9c5a8b9
MS
3279 else if (tree_int_cst_sign_bit (range[1]))
3280 {
3281 /* Avoid printing the upper bound if it's invalid. */
3282 warning_at (loc, opt,
13c5654f 3283 "%K%qD writing %E or more bytes into a region "
d9c5a8b9 3284 "of size %E overflows the destination",
cc8bea0a 3285 exp, func, range[0], dstsize);
d9c5a8b9 3286 }
ee92e7ba
MS
3287 else
3288 warning_at (loc, opt,
13c5654f 3289 "%K%qD writing between %E and %E bytes into "
d9c5a8b9 3290 "a region of size %E overflows the destination",
cc8bea0a
MS
3291 exp, func, range[0], range[1],
3292 dstsize);
ee92e7ba
MS
3293
3294 /* Return error when an overflow has been detected. */
3295 return false;
3296 }
3297 }
3298
3299 /* Check the maximum length of the source sequence against the size
3300 of the destination object if known, or against the maximum size
3301 of an object. */
cc8bea0a 3302 if (maxread)
ee92e7ba 3303 {
cc8bea0a
MS
3304 get_size_range (maxread, range);
3305
3306 /* Use the lower end for MAXREAD from now on. */
3307 if (range[0])
3308 maxread = range[0];
ee92e7ba 3309
cc8bea0a 3310 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
ee92e7ba
MS
3311 {
3312 location_t loc = tree_nonartificial_location (exp);
e50d56a5 3313 loc = expansion_point_location_if_in_system_header (loc);
ee92e7ba
MS
3314
3315 if (tree_int_cst_lt (maxobjsize, range[0]))
3316 {
e0676e2e
MS
3317 if (TREE_NO_WARNING (exp))
3318 return false;
3319
ee92e7ba
MS
3320 /* Warn about crazy big sizes first since that's more
3321 likely to be meaningful than saying that the bound
3322 is greater than the object size if both are big. */
3323 if (range[0] == range[1])
3324 warning_at (loc, opt,
13c5654f 3325 "%K%qD specified bound %E "
d9c5a8b9 3326 "exceeds maximum object size %E",
cc8bea0a 3327 exp, func,
d9c5a8b9 3328 range[0], maxobjsize);
ee92e7ba
MS
3329 else
3330 warning_at (loc, opt,
13c5654f 3331 "%K%qD specified bound between %E and %E "
d9c5a8b9 3332 "exceeds maximum object size %E",
cc8bea0a 3333 exp, func,
d9c5a8b9 3334 range[0], range[1], maxobjsize);
ee92e7ba
MS
3335
3336 return false;
3337 }
3338
cc8bea0a 3339 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
ee92e7ba 3340 {
e0676e2e
MS
3341 if (TREE_NO_WARNING (exp))
3342 return false;
3343
d9c5a8b9 3344 if (tree_int_cst_equal (range[0], range[1]))
ee92e7ba 3345 warning_at (loc, opt,
13c5654f 3346 "%K%qD specified bound %E "
d9c5a8b9 3347 "exceeds destination size %E",
cc8bea0a
MS
3348 exp, func,
3349 range[0], dstsize);
ee92e7ba
MS
3350 else
3351 warning_at (loc, opt,
13c5654f 3352 "%K%qD specified bound between %E and %E "
d9c5a8b9 3353 "exceeds destination size %E",
cc8bea0a
MS
3354 exp, func,
3355 range[0], range[1], dstsize);
ee92e7ba
MS
3356 return false;
3357 }
3358 }
3359 }
3360
cc8bea0a 3361 /* Check for reading past the end of SRC. */
d9c5a8b9 3362 if (slen
cc8bea0a
MS
3363 && slen == srcstr
3364 && dstwrite && range[0]
d9c5a8b9
MS
3365 && tree_int_cst_lt (slen, range[0]))
3366 {
e0676e2e
MS
3367 if (TREE_NO_WARNING (exp))
3368 return false;
3369
d9c5a8b9
MS
3370 location_t loc = tree_nonartificial_location (exp);
3371
3372 if (tree_int_cst_equal (range[0], range[1]))
457442eb
MS
3373 warning_n (loc, opt, tree_to_uhwi (range[0]),
3374 "%K%qD reading %E byte from a region of size %E",
3375 "%K%qD reading %E bytes from a region of size %E",
cc8bea0a 3376 exp, func, range[0], slen);
d9c5a8b9
MS
3377 else if (tree_int_cst_sign_bit (range[1]))
3378 {
3379 /* Avoid printing the upper bound if it's invalid. */
3380 warning_at (loc, opt,
13c5654f 3381 "%K%qD reading %E or more bytes from a region "
d9c5a8b9 3382 "of size %E",
cc8bea0a 3383 exp, func, range[0], slen);
d9c5a8b9
MS
3384 }
3385 else
3386 warning_at (loc, opt,
13c5654f 3387 "%K%qD reading between %E and %E bytes from a region "
d9c5a8b9 3388 "of size %E",
cc8bea0a 3389 exp, func, range[0], range[1], slen);
d9c5a8b9
MS
3390 return false;
3391 }
3392
ee92e7ba
MS
3393 return true;
3394}
3395
3396/* Helper to compute the size of the object referenced by the DEST
025d57f0 3397 expression which must have pointer type, using Object Size type
ee92e7ba 3398 OSTYPE (only the least significant 2 bits are used). Return
af3fa359
MS
3399 an estimate of the size of the object if successful or NULL when
3400 the size cannot be determined. When the referenced object involves
3401 a non-constant offset in some range the returned value represents
3402 the largest size given the smallest non-negative offset in the
3403 range. The function is intended for diagnostics and should not
3404 be used to influence code generation or optimization. */
ee92e7ba 3405
025d57f0 3406tree
d9c5a8b9 3407compute_objsize (tree dest, int ostype)
ee92e7ba
MS
3408{
3409 unsigned HOST_WIDE_INT size;
025d57f0
MS
3410
3411 /* Only the two least significant bits are meaningful. */
3412 ostype &= 3;
3413
3414 if (compute_builtin_object_size (dest, ostype, &size))
ee92e7ba
MS
3415 return build_int_cst (sizetype, size);
3416
025d57f0
MS
3417 if (TREE_CODE (dest) == SSA_NAME)
3418 {
3419 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3420 if (!is_gimple_assign (stmt))
3421 return NULL_TREE;
3422
af3fa359
MS
3423 dest = gimple_assign_rhs1 (stmt);
3424
025d57f0 3425 tree_code code = gimple_assign_rhs_code (stmt);
af3fa359
MS
3426 if (code == POINTER_PLUS_EXPR)
3427 {
3428 /* compute_builtin_object_size fails for addresses with
3429 non-constant offsets. Try to determine the range of
e3329a78 3430 such an offset here and use it to adjust the constant
af3fa359
MS
3431 size. */
3432 tree off = gimple_assign_rhs2 (stmt);
e3329a78
MS
3433 if (TREE_CODE (off) == INTEGER_CST)
3434 {
3435 if (tree size = compute_objsize (dest, ostype))
3436 {
3437 wide_int wioff = wi::to_wide (off);
3438 wide_int wisiz = wi::to_wide (size);
3439
3440 /* Ignore negative offsets for now. For others,
3441 use the lower bound as the most optimistic
3442 estimate of the (remaining) size. */
3443 if (wi::sign_mask (wioff))
3444 ;
3445 else if (wi::ltu_p (wioff, wisiz))
3446 return wide_int_to_tree (TREE_TYPE (size),
3447 wi::sub (wisiz, wioff));
3448 else
3449 return size_zero_node;
3450 }
3451 }
3452 else if (TREE_CODE (off) == SSA_NAME
af3fa359
MS
3453 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3454 {
3455 wide_int min, max;
3456 enum value_range_type rng = get_range_info (off, &min, &max);
3457
3458 if (rng == VR_RANGE)
3459 {
3460 if (tree size = compute_objsize (dest, ostype))
3461 {
3462 wide_int wisiz = wi::to_wide (size);
3463
3464 /* Ignore negative offsets for now. For others,
3465 use the lower bound as the most optimistic
3466 estimate of the (remaining)size. */
3467 if (wi::sign_mask (min))
3468 ;
3469 else if (wi::ltu_p (min, wisiz))
3470 return wide_int_to_tree (TREE_TYPE (size),
3471 wi::sub (wisiz, min));
3472 else
3473 return size_zero_node;
3474 }
3475 }
3476 }
3477 }
3478 else if (code != ADDR_EXPR)
025d57f0 3479 return NULL_TREE;
025d57f0
MS
3480 }
3481
af3fa359
MS
3482 /* Unless computing the largest size (for memcpy and other raw memory
3483 functions), try to determine the size of the object from its type. */
3484 if (!ostype)
3485 return NULL_TREE;
3486
025d57f0
MS
3487 if (TREE_CODE (dest) != ADDR_EXPR)
3488 return NULL_TREE;
3489
3490 tree type = TREE_TYPE (dest);
3491 if (TREE_CODE (type) == POINTER_TYPE)
3492 type = TREE_TYPE (type);
3493
3494 type = TYPE_MAIN_VARIANT (type);
3495
3496 if (TREE_CODE (type) == ARRAY_TYPE
f1acdcd0 3497 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
025d57f0
MS
3498 {
3499 /* Return the constant size unless it's zero (that's a zero-length
3500 array likely at the end of a struct). */
3501 tree size = TYPE_SIZE_UNIT (type);
3502 if (size && TREE_CODE (size) == INTEGER_CST
3503 && !integer_zerop (size))
3504 return size;
3505 }
3506
ee92e7ba
MS
3507 return NULL_TREE;
3508}
3509
3510/* Helper to determine and check the sizes of the source and the destination
d9c5a8b9
MS
3511 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3512 call expression, DEST is the destination argument, SRC is the source
3513 argument or null, and LEN is the number of bytes. Use Object Size type-0
3514 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
ee92e7ba
MS
3515 (no overflow or invalid sizes), false otherwise. */
3516
3517static bool
cc8bea0a 3518check_memop_access (tree exp, tree dest, tree src, tree size)
ee92e7ba 3519{
ee92e7ba 3520 /* For functions like memset and memcpy that operate on raw memory
d9c5a8b9
MS
3521 try to determine the size of the largest source and destination
3522 object using type-0 Object Size regardless of the object size
3523 type specified by the option. */
3524 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3525 tree dstsize = compute_objsize (dest, 0);
ee92e7ba 3526
cc8bea0a
MS
3527 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3528 srcsize, dstsize);
d9c5a8b9
MS
3529}
3530
3531/* Validate memchr arguments without performing any expansion.
3532 Return NULL_RTX. */
3533
3534static rtx
3535expand_builtin_memchr (tree exp, rtx)
3536{
3537 if (!validate_arglist (exp,
3538 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3539 return NULL_RTX;
3540
3541 tree arg1 = CALL_EXPR_ARG (exp, 0);
3542 tree len = CALL_EXPR_ARG (exp, 2);
3543
3544 /* Diagnose calls where the specified length exceeds the size
3545 of the object. */
3546 if (warn_stringop_overflow)
3547 {
3548 tree size = compute_objsize (arg1, 0);
cc8bea0a
MS
3549 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3550 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
d9c5a8b9
MS
3551 }
3552
3553 return NULL_RTX;
ee92e7ba
MS
3554}
3555
5039610b
SL
3556/* Expand a call EXP to the memcpy builtin.
3557 Return NULL_RTX if we failed, the caller should emit a normal call,
9cb65f92 3558 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e 3559 mode MODE if that's convenient). */
5039610b 3560
28f4ec01 3561static rtx
44e10129 3562expand_builtin_memcpy (tree exp, rtx target)
28f4ec01 3563{
5039610b
SL
3564 if (!validate_arglist (exp,
3565 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3566 return NULL_RTX;
ee92e7ba
MS
3567
3568 tree dest = CALL_EXPR_ARG (exp, 0);
3569 tree src = CALL_EXPR_ARG (exp, 1);
3570 tree len = CALL_EXPR_ARG (exp, 2);
3571
cc8bea0a 3572 check_memop_access (exp, dest, src, len);
ee92e7ba 3573
671a00ee
ML
3574 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3575 /*endp=*/ 0);
edcf72f3 3576}
57814e5e 3577
e50d56a5
MS
3578/* Check a call EXP to the memmove built-in for validity.
3579 Return NULL_RTX on both success and failure. */
3580
3581static rtx
3582expand_builtin_memmove (tree exp, rtx)
3583{
3584 if (!validate_arglist (exp,
3585 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3586 return NULL_RTX;
3587
3588 tree dest = CALL_EXPR_ARG (exp, 0);
d9c5a8b9 3589 tree src = CALL_EXPR_ARG (exp, 1);
e50d56a5
MS
3590 tree len = CALL_EXPR_ARG (exp, 2);
3591
cc8bea0a 3592 check_memop_access (exp, dest, src, len);
e50d56a5
MS
3593
3594 return NULL_RTX;
3595}
3596
5039610b
SL
3597/* Expand a call EXP to the mempcpy builtin.
3598 Return NULL_RTX if we failed; the caller should emit a normal call,
e3e9f108 3599 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e
JJ
3600 mode MODE if that's convenient). If ENDP is 0 return the
3601 destination pointer, if ENDP is 1 return the end pointer ala
3602 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3603 stpcpy. */
e3e9f108
JJ
3604
3605static rtx
671a00ee 3606expand_builtin_mempcpy (tree exp, rtx target)
e3e9f108 3607{
5039610b
SL
3608 if (!validate_arglist (exp,
3609 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3610 return NULL_RTX;
ee92e7ba
MS
3611
3612 tree dest = CALL_EXPR_ARG (exp, 0);
3613 tree src = CALL_EXPR_ARG (exp, 1);
3614 tree len = CALL_EXPR_ARG (exp, 2);
3615
af3fa359
MS
3616 /* Policy does not generally allow using compute_objsize (which
3617 is used internally by check_memop_size) to change code generation
3618 or drive optimization decisions.
3619
3620 In this instance it is safe because the code we generate has
3621 the same semantics regardless of the return value of
3622 check_memop_sizes. Exactly the same amount of data is copied
3623 and the return value is exactly the same in both cases.
3624
3625 Furthermore, check_memop_size always uses mode 0 for the call to
3626 compute_objsize, so the imprecise nature of compute_objsize is
3627 avoided. */
3628
ee92e7ba
MS
3629 /* Avoid expanding mempcpy into memcpy when the call is determined
3630 to overflow the buffer. This also prevents the same overflow
3631 from being diagnosed again when expanding memcpy. */
cc8bea0a 3632 if (!check_memop_access (exp, dest, src, len))
ee92e7ba
MS
3633 return NULL_RTX;
3634
3635 return expand_builtin_mempcpy_args (dest, src, len,
671a00ee 3636 target, exp, /*endp=*/ 1);
edcf72f3
IE
3637}
3638
671a00ee
ML
3639/* Helper function to do the actual work for expand of memory copy family
3640 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3641 of memory from SRC to DEST and assign to TARGET if convenient.
3642 If ENDP is 0 return the
3643 destination pointer, if ENDP is 1 return the end pointer ala
3644 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3645 stpcpy. */
5039610b
SL
3646
3647static rtx
671a00ee
ML
3648expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3649 rtx target, tree exp, int endp)
5039610b 3650{
671a00ee
ML
3651 const char *src_str;
3652 unsigned int src_align = get_pointer_alignment (src);
3653 unsigned int dest_align = get_pointer_alignment (dest);
3654 rtx dest_mem, src_mem, dest_addr, len_rtx;
3655 HOST_WIDE_INT expected_size = -1;
3656 unsigned int expected_align = 0;
3657 unsigned HOST_WIDE_INT min_size;
3658 unsigned HOST_WIDE_INT max_size;
3659 unsigned HOST_WIDE_INT probable_max_size;
edcf72f3 3660
671a00ee
ML
3661 /* If DEST is not a pointer type, call the normal function. */
3662 if (dest_align == 0)
3663 return NULL_RTX;
c22cacf3 3664
671a00ee
ML
3665 /* If either SRC is not a pointer type, don't do this
3666 operation in-line. */
3667 if (src_align == 0)
3668 return NULL_RTX;
8fd3cf4e 3669
671a00ee
ML
3670 if (currently_expanding_gimple_stmt)
3671 stringop_block_profile (currently_expanding_gimple_stmt,
3672 &expected_align, &expected_size);
33521f7d 3673
671a00ee
ML
3674 if (expected_align < dest_align)
3675 expected_align = dest_align;
3676 dest_mem = get_memory_rtx (dest, len);
3677 set_mem_align (dest_mem, dest_align);
3678 len_rtx = expand_normal (len);
3679 determine_block_size (len, len_rtx, &min_size, &max_size,
3680 &probable_max_size);
3681 src_str = c_getstr (src);
e3e9f108 3682
671a00ee
ML
3683 /* If SRC is a string constant and block move would be done
3684 by pieces, we can avoid loading the string from memory
3685 and only stored the computed constants. */
3686 if (src_str
3687 && CONST_INT_P (len_rtx)
3688 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3689 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3690 CONST_CAST (char *, src_str),
3691 dest_align, false))
3692 {
3693 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3694 builtin_memcpy_read_str,
7d3eecca 3695 CONST_CAST (char *, src_str),
671a00ee
ML
3696 dest_align, false, endp);
3697 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3698 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3699 return dest_mem;
3700 }
e3e9f108 3701
671a00ee
ML
3702 src_mem = get_memory_rtx (src, len);
3703 set_mem_align (src_mem, src_align);
8fd3cf4e 3704
671a00ee 3705 /* Copy word part most expediently. */
fdd33254
ML
3706 enum block_op_methods method = BLOCK_OP_NORMAL;
3707 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3708 method = BLOCK_OP_TAILCALL;
3709 if (endp == 1 && target != const0_rtx)
3710 method = BLOCK_OP_NO_LIBCALL_RET;
3711 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
671a00ee
ML
3712 expected_align, expected_size,
3713 min_size, max_size, probable_max_size);
fdd33254
ML
3714 if (dest_addr == pc_rtx)
3715 return NULL_RTX;
671a00ee
ML
3716
3717 if (dest_addr == 0)
3718 {
3719 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3720 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3721 }
3722
3723 if (endp && target != const0_rtx)
3724 {
3725 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3726 /* stpcpy pointer to last byte. */
3727 if (endp == 2)
3728 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
7d3eecca 3729 }
671a00ee
ML
3730
3731 return dest_addr;
3732}
3733
3734static rtx
3735expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3736 rtx target, tree orig_exp, int endp)
3737{
3738 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3739 endp);
e3e9f108
JJ
3740}
3741
5039610b 3742/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
beed8fc0
AO
3743 we failed, the caller should emit a normal call, otherwise try to
3744 get the result in TARGET, if convenient. If ENDP is 0 return the
3745 destination pointer, if ENDP is 1 return the end pointer ala
3746 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3747 stpcpy. */
3748
3749static rtx
3750expand_movstr (tree dest, tree src, rtx target, int endp)
3751{
a5c7d693 3752 struct expand_operand ops[3];
beed8fc0
AO
3753 rtx dest_mem;
3754 rtx src_mem;
beed8fc0 3755
7cff0471 3756 if (!targetm.have_movstr ())
5039610b 3757 return NULL_RTX;
beed8fc0 3758
435bb2a1
JJ
3759 dest_mem = get_memory_rtx (dest, NULL);
3760 src_mem = get_memory_rtx (src, NULL);
beed8fc0
AO
3761 if (!endp)
3762 {
3763 target = force_reg (Pmode, XEXP (dest_mem, 0));
3764 dest_mem = replace_equiv_address (dest_mem, target);
beed8fc0
AO
3765 }
3766
a5c7d693
RS
3767 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3768 create_fixed_operand (&ops[1], dest_mem);
3769 create_fixed_operand (&ops[2], src_mem);
7cff0471 3770 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
7c5425fa 3771 return NULL_RTX;
beed8fc0 3772
a5c7d693 3773 if (endp && target != const0_rtx)
7ce3fc8f 3774 {
a5c7d693
RS
3775 target = ops[0].value;
3776 /* movstr is supposed to set end to the address of the NUL
3777 terminator. If the caller requested a mempcpy-like return value,
3778 adjust it. */
3779 if (endp == 1)
3780 {
0a81f074
RS
3781 rtx tem = plus_constant (GET_MODE (target),
3782 gen_lowpart (GET_MODE (target), target), 1);
a5c7d693
RS
3783 emit_move_insn (target, force_operand (tem, NULL_RTX));
3784 }
7ce3fc8f 3785 }
beed8fc0
AO
3786 return target;
3787}
3788
ee92e7ba
MS
3789/* Do some very basic size validation of a call to the strcpy builtin
3790 given by EXP. Return NULL_RTX to have the built-in expand to a call
3791 to the library function. */
3792
3793static rtx
3794expand_builtin_strcat (tree exp, rtx)
3795{
3796 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3797 || !warn_stringop_overflow)
3798 return NULL_RTX;
3799
3800 tree dest = CALL_EXPR_ARG (exp, 0);
3801 tree src = CALL_EXPR_ARG (exp, 1);
3802
3803 /* There is no way here to determine the length of the string in
3804 the destination to which the SRC string is being appended so
3805 just diagnose cases when the souce string is longer than
3806 the destination object. */
3807
d9c5a8b9 3808 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba 3809
cc8bea0a
MS
3810 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3811 destsize);
ee92e7ba
MS
3812
3813 return NULL_RTX;
3814}
3815
b8698a0f
L
3816/* Expand expression EXP, which is a call to the strcpy builtin. Return
3817 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 3818 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 3819 convenient). */
fed3cef0 3820
28f4ec01 3821static rtx
44e10129 3822expand_builtin_strcpy (tree exp, rtx target)
28f4ec01 3823{
ee92e7ba
MS
3824 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3825 return NULL_RTX;
3826
3827 tree dest = CALL_EXPR_ARG (exp, 0);
3828 tree src = CALL_EXPR_ARG (exp, 1);
3829
3830 if (warn_stringop_overflow)
3831 {
d9c5a8b9 3832 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
cc8bea0a
MS
3833 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3834 src, destsize);
ee92e7ba
MS
3835 }
3836
36537a1c
MS
3837 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3838 {
3839 /* Check to see if the argument was declared attribute nonstring
3840 and if so, issue a warning since at this point it's not known
3841 to be nul-terminated. */
3842 tree fndecl = get_callee_fndecl (exp);
3843 maybe_warn_nonstring_arg (fndecl, exp);
3844 return ret;
3845 }
3846
3847 return NULL_RTX;
5039610b
SL
3848}
3849
3850/* Helper function to do the actual work for expand_builtin_strcpy. The
3851 arguments to the builtin_strcpy call DEST and SRC are broken out
3852 so that this can also be called without constructing an actual CALL_EXPR.
3853 The other arguments and return value are the same as for
3854 expand_builtin_strcpy. */
3855
3856static rtx
44e10129 3857expand_builtin_strcpy_args (tree dest, tree src, rtx target)
5039610b 3858{
5039610b 3859 return expand_movstr (dest, src, target, /*endp=*/0);
28f4ec01
BS
3860}
3861
5039610b
SL
3862/* Expand a call EXP to the stpcpy builtin.
3863 Return NULL_RTX if we failed the caller should emit a normal call,
9cb65f92
KG
3864 otherwise try to get the result in TARGET, if convenient (and in
3865 mode MODE if that's convenient). */
3866
3867static rtx
3ce4cdb2 3868expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
9cb65f92 3869{
5039610b 3870 tree dst, src;
db3927fb 3871 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
3872
3873 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3874 return NULL_RTX;
3875
3876 dst = CALL_EXPR_ARG (exp, 0);
3877 src = CALL_EXPR_ARG (exp, 1);
3878
e50d56a5
MS
3879 if (warn_stringop_overflow)
3880 {
d9c5a8b9 3881 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
cc8bea0a
MS
3882 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3883 src, destsize);
e50d56a5
MS
3884 }
3885
beed8fc0 3886 /* If return value is ignored, transform stpcpy into strcpy. */
e79983f4 3887 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
ad4319ec 3888 {
e79983f4 3889 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
aa493694 3890 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
0d2a6e08 3891 return expand_expr (result, target, mode, EXPAND_NORMAL);
ad4319ec 3892 }
9cb65f92
KG
3893 else
3894 {
5039610b 3895 tree len, lenp1;
beed8fc0 3896 rtx ret;
e3e9f108 3897
8fd3cf4e 3898 /* Ensure we get an actual string whose length can be evaluated at
c22cacf3
MS
3899 compile-time, not an expression containing a string. This is
3900 because the latter will potentially produce pessimized code
3901 when used to produce the return value. */
ae808627 3902 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
5039610b 3903 return expand_movstr (dst, src, target, /*endp=*/2);
9cb65f92 3904
db3927fb 3905 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
44e10129 3906 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
671a00ee 3907 target, exp, /*endp=*/2);
beed8fc0
AO
3908
3909 if (ret)
3910 return ret;
3911
3912 if (TREE_CODE (len) == INTEGER_CST)
3913 {
84217346 3914 rtx len_rtx = expand_normal (len);
beed8fc0 3915
481683e1 3916 if (CONST_INT_P (len_rtx))
beed8fc0 3917 {
44e10129 3918 ret = expand_builtin_strcpy_args (dst, src, target);
beed8fc0
AO
3919
3920 if (ret)
3921 {
3922 if (! target)
58ec6ece
SE
3923 {
3924 if (mode != VOIDmode)
3925 target = gen_reg_rtx (mode);
3926 else
3927 target = gen_reg_rtx (GET_MODE (ret));
3928 }
beed8fc0
AO
3929 if (GET_MODE (target) != GET_MODE (ret))
3930 ret = gen_lowpart (GET_MODE (target), ret);
3931
0a81f074 3932 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
7ce3fc8f 3933 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
298e6adc 3934 gcc_assert (ret);
beed8fc0
AO
3935
3936 return target;
3937 }
3938 }
3939 }
3940
5039610b 3941 return expand_movstr (dst, src, target, /*endp=*/2);
9cb65f92
KG
3942 }
3943}
3944
3ce4cdb2
MS
3945/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3946 arguments while being careful to avoid duplicate warnings (which could
3947 be issued if the expander were to expand the call, resulting in it
3948 being emitted in expand_call(). */
3949
3950static rtx
3951expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3952{
3953 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3954 {
3955 /* The call has been successfully expanded. Check for nonstring
3956 arguments and issue warnings as appropriate. */
3957 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3958 return ret;
3959 }
3960
3961 return NULL_RTX;
3962}
3963
e50d56a5
MS
3964/* Check a call EXP to the stpncpy built-in for validity.
3965 Return NULL_RTX on both success and failure. */
3966
3967static rtx
3968expand_builtin_stpncpy (tree exp, rtx)
3969{
3970 if (!validate_arglist (exp,
3971 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3972 || !warn_stringop_overflow)
3973 return NULL_RTX;
3974
c6c02519 3975 /* The source and destination of the call. */
e50d56a5
MS
3976 tree dest = CALL_EXPR_ARG (exp, 0);
3977 tree src = CALL_EXPR_ARG (exp, 1);
3978
c6c02519 3979 /* The exact number of bytes to write (not the maximum). */
e50d56a5 3980 tree len = CALL_EXPR_ARG (exp, 2);
e50d56a5 3981
c6c02519 3982 /* The size of the destination object. */
d9c5a8b9 3983 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
e50d56a5 3984
cc8bea0a 3985 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
e50d56a5
MS
3986
3987 return NULL_RTX;
3988}
3989
57814e5e
JJ
3990/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3991 bytes from constant string DATA + OFFSET and return it as target
3992 constant. */
3993
14a43348 3994rtx
4682ae04 3995builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 3996 scalar_int_mode mode)
57814e5e
JJ
3997{
3998 const char *str = (const char *) data;
3999
4000 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4001 return const0_rtx;
4002
4003 return c_readstr (str + offset, mode);
4004}
4005
ee92e7ba
MS
4006/* Helper to check the sizes of sequences and the destination of calls
4007 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4008 success (no overflow or invalid sizes), false otherwise. */
4009
4010static bool
4011check_strncat_sizes (tree exp, tree objsize)
4012{
4013 tree dest = CALL_EXPR_ARG (exp, 0);
4014 tree src = CALL_EXPR_ARG (exp, 1);
cc8bea0a 4015 tree maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
4016
4017 /* Try to determine the range of lengths that the source expression
4018 refers to. */
4019 tree lenrange[2];
4020 get_range_strlen (src, lenrange);
4021
4022 /* Try to verify that the destination is big enough for the shortest
4023 string. */
4024
4025 if (!objsize && warn_stringop_overflow)
4026 {
4027 /* If it hasn't been provided by __strncat_chk, try to determine
4028 the size of the destination object into which the source is
4029 being copied. */
d9c5a8b9 4030 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
4031 }
4032
4033 /* Add one for the terminating nul. */
4034 tree srclen = (lenrange[0]
4035 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4036 size_one_node)
4037 : NULL_TREE);
4038
cc8bea0a
MS
4039 /* The strncat function copies at most MAXREAD bytes and always appends
4040 the terminating nul so the specified upper bound should never be equal
4041 to (or greater than) the size of the destination. */
4042 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4043 && tree_int_cst_equal (objsize, maxread))
ee92e7ba 4044 {
e50d56a5
MS
4045 location_t loc = tree_nonartificial_location (exp);
4046 loc = expansion_point_location_if_in_system_header (loc);
4047
4048 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 4049 "%K%qD specified bound %E equals destination size",
cc8bea0a 4050 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
4051
4052 return false;
4053 }
4054
4055 if (!srclen
cc8bea0a 4056 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 4057 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
4058 && tree_int_cst_lt (maxread, srclen)))
4059 srclen = maxread;
ee92e7ba 4060
cc8bea0a 4061 /* The number of bytes to write is LEN but check_access will also
ee92e7ba 4062 check SRCLEN if LEN's value isn't known. */
cc8bea0a
MS
4063 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4064 objsize);
ee92e7ba
MS
4065}
4066
4067/* Similar to expand_builtin_strcat, do some very basic size validation
4068 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4069 the built-in expand to a call to the library function. */
4070
4071static rtx
4072expand_builtin_strncat (tree exp, rtx)
4073{
4074 if (!validate_arglist (exp,
4075 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4076 || !warn_stringop_overflow)
4077 return NULL_RTX;
4078
4079 tree dest = CALL_EXPR_ARG (exp, 0);
4080 tree src = CALL_EXPR_ARG (exp, 1);
4081 /* The upper bound on the number of bytes to write. */
cc8bea0a 4082 tree maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
4083 /* The length of the source sequence. */
4084 tree slen = c_strlen (src, 1);
4085
4086 /* Try to determine the range of lengths that the source expression
4087 refers to. */
4088 tree lenrange[2];
4089 if (slen)
4090 lenrange[0] = lenrange[1] = slen;
4091 else
4092 get_range_strlen (src, lenrange);
4093
4094 /* Try to verify that the destination is big enough for the shortest
4095 string. First try to determine the size of the destination object
4096 into which the source is being copied. */
d9c5a8b9 4097 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
ee92e7ba
MS
4098
4099 /* Add one for the terminating nul. */
4100 tree srclen = (lenrange[0]
4101 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4102 size_one_node)
4103 : NULL_TREE);
4104
cc8bea0a
MS
4105 /* The strncat function copies at most MAXREAD bytes and always appends
4106 the terminating nul so the specified upper bound should never be equal
4107 to (or greater than) the size of the destination. */
4108 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4109 && tree_int_cst_equal (destsize, maxread))
ee92e7ba 4110 {
e50d56a5
MS
4111 location_t loc = tree_nonartificial_location (exp);
4112 loc = expansion_point_location_if_in_system_header (loc);
4113
4114 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 4115 "%K%qD specified bound %E equals destination size",
cc8bea0a 4116 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
4117
4118 return NULL_RTX;
4119 }
4120
4121 if (!srclen
cc8bea0a 4122 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 4123 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
4124 && tree_int_cst_lt (maxread, srclen)))
4125 srclen = maxread;
ee92e7ba 4126
cc8bea0a
MS
4127 /* The number of bytes to write is SRCLEN. */
4128 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
ee92e7ba
MS
4129
4130 return NULL_RTX;
4131}
4132
b8698a0f 4133/* Expand expression EXP, which is a call to the strncpy builtin. Return
5039610b 4134 NULL_RTX if we failed the caller should emit a normal call. */
da9e9f08
KG
4135
4136static rtx
44e10129 4137expand_builtin_strncpy (tree exp, rtx target)
da9e9f08 4138{
db3927fb 4139 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
4140
4141 if (validate_arglist (exp,
4142 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
da9e9f08 4143 {
5039610b
SL
4144 tree dest = CALL_EXPR_ARG (exp, 0);
4145 tree src = CALL_EXPR_ARG (exp, 1);
ee92e7ba 4146 /* The number of bytes to write (not the maximum). */
5039610b 4147 tree len = CALL_EXPR_ARG (exp, 2);
ee92e7ba 4148 /* The length of the source sequence. */
5039610b 4149 tree slen = c_strlen (src, 1);
57814e5e 4150
cc8bea0a
MS
4151 if (warn_stringop_overflow)
4152 {
4153 tree destsize = compute_objsize (dest,
4154 warn_stringop_overflow - 1);
4155
4156 /* The number of bytes to write is LEN but check_access will also
4157 check SLEN if LEN's value isn't known. */
4158 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4159 destsize);
4160 }
ee92e7ba 4161
559837f7 4162 /* We must be passed a constant len and src parameter. */
cc269bb6 4163 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
5039610b 4164 return NULL_RTX;
da9e9f08 4165
db3927fb 4166 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
da9e9f08
KG
4167
4168 /* We're required to pad with trailing zeros if the requested
c22cacf3 4169 len is greater than strlen(s2)+1. In that case try to
57814e5e 4170 use store_by_pieces, if it fails, punt. */
da9e9f08 4171 if (tree_int_cst_lt (slen, len))
57814e5e 4172 {
0eb77834 4173 unsigned int dest_align = get_pointer_alignment (dest);
5039610b 4174 const char *p = c_getstr (src);
57814e5e
JJ
4175 rtx dest_mem;
4176
cc269bb6 4177 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
ae7e9ddd 4178 || !can_store_by_pieces (tree_to_uhwi (len),
57814e5e 4179 builtin_strncpy_read_str,
f883e0a7
KG
4180 CONST_CAST (char *, p),
4181 dest_align, false))
5039610b 4182 return NULL_RTX;
57814e5e 4183
435bb2a1 4184 dest_mem = get_memory_rtx (dest, len);
ae7e9ddd 4185 store_by_pieces (dest_mem, tree_to_uhwi (len),
57814e5e 4186 builtin_strncpy_read_str,
f883e0a7 4187 CONST_CAST (char *, p), dest_align, false, 0);
44e10129 4188 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5ae6cd0d 4189 dest_mem = convert_memory_address (ptr_mode, dest_mem);
aa0f70e6 4190 return dest_mem;
57814e5e 4191 }
da9e9f08 4192 }
5039610b 4193 return NULL_RTX;
da9e9f08
KG
4194}
4195
ab937357
JJ
4196/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4197 bytes from constant string DATA + OFFSET and return it as target
4198 constant. */
4199
34d85166 4200rtx
4682ae04 4201builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4202 scalar_int_mode mode)
ab937357
JJ
4203{
4204 const char *c = (const char *) data;
f883e0a7 4205 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ab937357
JJ
4206
4207 memset (p, *c, GET_MODE_SIZE (mode));
4208
4209 return c_readstr (p, mode);
4210}
4211
1a887f86
RS
4212/* Callback routine for store_by_pieces. Return the RTL of a register
4213 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4214 char value given in the RTL register data. For example, if mode is
4215 4 bytes wide, return the RTL for 0x01010101*data. */
4216
4217static rtx
4682ae04 4218builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 4219 scalar_int_mode mode)
1a887f86
RS
4220{
4221 rtx target, coeff;
4222 size_t size;
4223 char *p;
4224
4225 size = GET_MODE_SIZE (mode);
5ab2f7b7
KH
4226 if (size == 1)
4227 return (rtx) data;
1a887f86 4228
f883e0a7 4229 p = XALLOCAVEC (char, size);
1a887f86
RS
4230 memset (p, 1, size);
4231 coeff = c_readstr (p, mode);
4232
5ab2f7b7 4233 target = convert_to_mode (mode, (rtx) data, 1);
1a887f86
RS
4234 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4235 return force_reg (mode, target);
4236}
4237
b8698a0f
L
4238/* Expand expression EXP, which is a call to the memset builtin. Return
4239 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 4240 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 4241 convenient). */
fed3cef0 4242
28f4ec01 4243static rtx
ef4bddc2 4244expand_builtin_memset (tree exp, rtx target, machine_mode mode)
28f4ec01 4245{
5039610b
SL
4246 if (!validate_arglist (exp,
4247 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4248 return NULL_RTX;
ee92e7ba
MS
4249
4250 tree dest = CALL_EXPR_ARG (exp, 0);
4251 tree val = CALL_EXPR_ARG (exp, 1);
4252 tree len = CALL_EXPR_ARG (exp, 2);
4253
cc8bea0a 4254 check_memop_access (exp, dest, NULL_TREE, len);
ee92e7ba
MS
4255
4256 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5039610b 4257}
28f4ec01 4258
5039610b
SL
4259/* Helper function to do the actual work for expand_builtin_memset. The
4260 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4261 so that this can also be called without constructing an actual CALL_EXPR.
4262 The other arguments and return value are the same as for
4263 expand_builtin_memset. */
880864cf 4264
5039610b
SL
4265static rtx
4266expand_builtin_memset_args (tree dest, tree val, tree len,
ef4bddc2 4267 rtx target, machine_mode mode, tree orig_exp)
5039610b
SL
4268{
4269 tree fndecl, fn;
4270 enum built_in_function fcode;
ef4bddc2 4271 machine_mode val_mode;
5039610b
SL
4272 char c;
4273 unsigned int dest_align;
4274 rtx dest_mem, dest_addr, len_rtx;
4275 HOST_WIDE_INT expected_size = -1;
4276 unsigned int expected_align = 0;
3918b108
JH
4277 unsigned HOST_WIDE_INT min_size;
4278 unsigned HOST_WIDE_INT max_size;
82bb7d4e 4279 unsigned HOST_WIDE_INT probable_max_size;
28f4ec01 4280
0eb77834 4281 dest_align = get_pointer_alignment (dest);
079a182e 4282
5039610b
SL
4283 /* If DEST is not a pointer type, don't do this operation in-line. */
4284 if (dest_align == 0)
4285 return NULL_RTX;
c2bd38e8 4286
a5883ba0
MM
4287 if (currently_expanding_gimple_stmt)
4288 stringop_block_profile (currently_expanding_gimple_stmt,
4289 &expected_align, &expected_size);
726a989a 4290
5039610b
SL
4291 if (expected_align < dest_align)
4292 expected_align = dest_align;
880864cf 4293
5039610b
SL
4294 /* If the LEN parameter is zero, return DEST. */
4295 if (integer_zerop (len))
4296 {
4297 /* Evaluate and ignore VAL in case it has side-effects. */
4298 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4299 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4300 }
57e84f18 4301
5039610b
SL
4302 /* Stabilize the arguments in case we fail. */
4303 dest = builtin_save_expr (dest);
4304 val = builtin_save_expr (val);
4305 len = builtin_save_expr (len);
1a887f86 4306
5039610b 4307 len_rtx = expand_normal (len);
82bb7d4e
JH
4308 determine_block_size (len, len_rtx, &min_size, &max_size,
4309 &probable_max_size);
5039610b 4310 dest_mem = get_memory_rtx (dest, len);
8a445129 4311 val_mode = TYPE_MODE (unsigned_char_type_node);
1a887f86 4312
5039610b
SL
4313 if (TREE_CODE (val) != INTEGER_CST)
4314 {
4315 rtx val_rtx;
1a887f86 4316
5039610b 4317 val_rtx = expand_normal (val);
8a445129 4318 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
28f4ec01 4319
5039610b
SL
4320 /* Assume that we can memset by pieces if we can store
4321 * the coefficients by pieces (in the required modes).
4322 * We can't pass builtin_memset_gen_str as that emits RTL. */
4323 c = 1;
cc269bb6 4324 if (tree_fits_uhwi_p (len)
ae7e9ddd 4325 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
4326 builtin_memset_read_str, &c, dest_align,
4327 true))
5039610b 4328 {
8a445129 4329 val_rtx = force_reg (val_mode, val_rtx);
ae7e9ddd 4330 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150
SL
4331 builtin_memset_gen_str, val_rtx, dest_align,
4332 true, 0);
5039610b
SL
4333 }
4334 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4335 dest_align, expected_align,
82bb7d4e
JH
4336 expected_size, min_size, max_size,
4337 probable_max_size))
880864cf 4338 goto do_libcall;
b8698a0f 4339
5039610b
SL
4340 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4341 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4342 return dest_mem;
4343 }
28f4ec01 4344
5039610b
SL
4345 if (target_char_cast (val, &c))
4346 goto do_libcall;
ab937357 4347
5039610b
SL
4348 if (c)
4349 {
cc269bb6 4350 if (tree_fits_uhwi_p (len)
ae7e9ddd 4351 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
4352 builtin_memset_read_str, &c, dest_align,
4353 true))
ae7e9ddd 4354 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150 4355 builtin_memset_read_str, &c, dest_align, true, 0);
8a445129
RS
4356 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4357 gen_int_mode (c, val_mode),
5039610b 4358 dest_align, expected_align,
82bb7d4e
JH
4359 expected_size, min_size, max_size,
4360 probable_max_size))
5039610b 4361 goto do_libcall;
b8698a0f 4362
5039610b
SL
4363 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4364 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4365 return dest_mem;
4366 }
ab937357 4367
5039610b
SL
4368 set_mem_align (dest_mem, dest_align);
4369 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4370 CALL_EXPR_TAILCALL (orig_exp)
4371 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3918b108 4372 expected_align, expected_size,
82bb7d4e
JH
4373 min_size, max_size,
4374 probable_max_size);
28f4ec01 4375
5039610b
SL
4376 if (dest_addr == 0)
4377 {
4378 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4379 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4380 }
28f4ec01 4381
5039610b 4382 return dest_addr;
880864cf 4383
5039610b
SL
4384 do_libcall:
4385 fndecl = get_callee_fndecl (orig_exp);
4386 fcode = DECL_FUNCTION_CODE (fndecl);
31db0fe0 4387 if (fcode == BUILT_IN_MEMSET)
aa493694
JJ
4388 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4389 dest, val, len);
5039610b 4390 else if (fcode == BUILT_IN_BZERO)
aa493694
JJ
4391 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4392 dest, len);
5039610b
SL
4393 else
4394 gcc_unreachable ();
44e10129
MM
4395 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4396 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5039610b 4397 return expand_call (fn, target, target == const0_rtx);
28f4ec01
BS
4398}
4399
b8698a0f 4400/* Expand expression EXP, which is a call to the bzero builtin. Return
5039610b 4401 NULL_RTX if we failed the caller should emit a normal call. */
5197bd50 4402
e3a709be 4403static rtx
8148fe65 4404expand_builtin_bzero (tree exp)
e3a709be 4405{
5039610b 4406 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3477addf 4407 return NULL_RTX;
e3a709be 4408
ee92e7ba
MS
4409 tree dest = CALL_EXPR_ARG (exp, 0);
4410 tree size = CALL_EXPR_ARG (exp, 1);
4411
cc8bea0a 4412 check_memop_access (exp, dest, NULL_TREE, size);
8d51ecf8 4413
3477addf 4414 /* New argument list transforming bzero(ptr x, int y) to
c2bd38e8
RS
4415 memset(ptr x, int 0, size_t y). This is done this way
4416 so that if it isn't expanded inline, we fallback to
4417 calling bzero instead of memset. */
8d51ecf8 4418
ee92e7ba
MS
4419 location_t loc = EXPR_LOCATION (exp);
4420
5039610b 4421 return expand_builtin_memset_args (dest, integer_zero_node,
0d82a1c8
RG
4422 fold_convert_loc (loc,
4423 size_type_node, size),
5039610b 4424 const0_rtx, VOIDmode, exp);
e3a709be
KG
4425}
4426
a666df60
RS
4427/* Try to expand cmpstr operation ICODE with the given operands.
4428 Return the result rtx on success, otherwise return null. */
4429
4430static rtx
4431expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4432 HOST_WIDE_INT align)
4433{
4434 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4435
4436 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4437 target = NULL_RTX;
4438
4439 struct expand_operand ops[4];
4440 create_output_operand (&ops[0], target, insn_mode);
4441 create_fixed_operand (&ops[1], arg1_rtx);
4442 create_fixed_operand (&ops[2], arg2_rtx);
4443 create_integer_operand (&ops[3], align);
4444 if (maybe_expand_insn (icode, 4, ops))
4445 return ops[0].value;
4446 return NULL_RTX;
4447}
4448
2be3b5ce 4449/* Expand expression EXP, which is a call to the memcmp built-in function.
9b0f6f5e 4450 Return NULL_RTX if we failed and the caller should emit a normal call,
36b85e43
BS
4451 otherwise try to get the result in TARGET, if convenient.
4452 RESULT_EQ is true if we can relax the returned value to be either zero
4453 or nonzero, without caring about the sign. */
5197bd50 4454
28f4ec01 4455static rtx
36b85e43 4456expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
28f4ec01 4457{
5039610b
SL
4458 if (!validate_arglist (exp,
4459 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4460 return NULL_RTX;
c2bd38e8 4461
7f9f48be
RS
4462 tree arg1 = CALL_EXPR_ARG (exp, 0);
4463 tree arg2 = CALL_EXPR_ARG (exp, 1);
4464 tree len = CALL_EXPR_ARG (exp, 2);
b2272b13
QZ
4465 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4466 bool no_overflow = true;
d9c5a8b9
MS
4467
4468 /* Diagnose calls where the specified length exceeds the size of either
4469 object. */
b2272b13
QZ
4470 tree size = compute_objsize (arg1, 0);
4471 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4472 len, /*maxread=*/NULL_TREE, size,
4473 /*objsize=*/NULL_TREE);
10a0e2a9 4474 if (no_overflow)
b2272b13
QZ
4475 {
4476 size = compute_objsize (arg2, 0);
4477 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4478 len, /*maxread=*/NULL_TREE, size,
4479 /*objsize=*/NULL_TREE);
10a0e2a9 4480 }
b2272b13 4481
b99d7d97
QZ
4482 /* If the specified length exceeds the size of either object,
4483 call the function. */
4484 if (!no_overflow)
4485 return NULL_RTX;
4486
10a0e2a9 4487 /* Due to the performance benefit, always inline the calls first
b2272b13
QZ
4488 when result_eq is false. */
4489 rtx result = NULL_RTX;
10a0e2a9 4490
b99d7d97 4491 if (!result_eq && fcode != BUILT_IN_BCMP)
d9c5a8b9 4492 {
523a59ff 4493 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
4494 if (result)
4495 return result;
d9c5a8b9
MS
4496 }
4497
36b85e43
BS
4498 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4499 location_t loc = EXPR_LOCATION (exp);
358b8f01 4500
7f9f48be
RS
4501 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4502 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
28f4ec01 4503
7f9f48be
RS
4504 /* If we don't have POINTER_TYPE, call the function. */
4505 if (arg1_align == 0 || arg2_align == 0)
4506 return NULL_RTX;
28f4ec01 4507
7f9f48be
RS
4508 rtx arg1_rtx = get_memory_rtx (arg1, len);
4509 rtx arg2_rtx = get_memory_rtx (arg2, len);
36b85e43 4510 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
28f4ec01 4511
7f9f48be 4512 /* Set MEM_SIZE as appropriate. */
36b85e43 4513 if (CONST_INT_P (len_rtx))
7f9f48be 4514 {
36b85e43
BS
4515 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4516 set_mem_size (arg2_rtx, INTVAL (len_rtx));
7f9f48be 4517 }
6cbaec9e 4518
36b85e43
BS
4519 by_pieces_constfn constfn = NULL;
4520
d0d7f887
BS
4521 const char *src_str = c_getstr (arg2);
4522 if (result_eq && src_str == NULL)
4523 {
4524 src_str = c_getstr (arg1);
4525 if (src_str != NULL)
4f353581 4526 std::swap (arg1_rtx, arg2_rtx);
d0d7f887 4527 }
36b85e43
BS
4528
4529 /* If SRC is a string constant and block move would be done
4530 by pieces, we can avoid loading the string from memory
4531 and only stored the computed constants. */
4532 if (src_str
4533 && CONST_INT_P (len_rtx)
4534 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4535 constfn = builtin_memcpy_read_str;
4536
b2272b13
QZ
4537 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4538 TREE_TYPE (len), target,
4539 result_eq, constfn,
4540 CONST_CAST (char *, src_str));
36b85e43 4541
7f9f48be
RS
4542 if (result)
4543 {
4544 /* Return the value in the proper mode for this function. */
4545 if (GET_MODE (result) == mode)
4546 return result;
6cbaec9e 4547
7f9f48be
RS
4548 if (target != 0)
4549 {
4550 convert_move (target, result, 0);
4551 return target;
4552 }
8878e913 4553
28f4ec01 4554 return convert_to_mode (mode, result, 0);
7f9f48be 4555 }
28f4ec01 4556
ee516de9 4557 return NULL_RTX;
c2bd38e8
RS
4558}
4559
5039610b 4560/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
28f4ec01
BS
4561 if we failed the caller should emit a normal call, otherwise try to get
4562 the result in TARGET, if convenient. */
fed3cef0 4563
28f4ec01 4564static rtx
44e10129 4565expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
28f4ec01 4566{
5039610b
SL
4567 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4568 return NULL_RTX;
8d51ecf8 4569
b2272b13
QZ
4570 /* Due to the performance benefit, always inline the calls first. */
4571 rtx result = NULL_RTX;
523a59ff 4572 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
4573 if (result)
4574 return result;
4575
a666df60
RS
4576 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4577 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
4578 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4579 return NULL_RTX;
c22cacf3 4580
16155777
MS
4581 tree arg1 = CALL_EXPR_ARG (exp, 0);
4582 tree arg2 = CALL_EXPR_ARG (exp, 1);
40c1d5f8 4583
16155777
MS
4584 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4585 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
2be3b5ce 4586
16155777
MS
4587 /* If we don't have POINTER_TYPE, call the function. */
4588 if (arg1_align == 0 || arg2_align == 0)
4589 return NULL_RTX;
2be3b5ce 4590
16155777
MS
4591 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4592 arg1 = builtin_save_expr (arg1);
4593 arg2 = builtin_save_expr (arg2);
28f4ec01 4594
16155777
MS
4595 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4596 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
a666df60 4597
16155777
MS
4598 /* Try to call cmpstrsi. */
4599 if (cmpstr_icode != CODE_FOR_nothing)
4600 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4601 MIN (arg1_align, arg2_align));
40c1d5f8 4602
16155777
MS
4603 /* Try to determine at least one length and call cmpstrnsi. */
4604 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4605 {
4606 tree len;
4607 rtx arg3_rtx;
4608
4609 tree len1 = c_strlen (arg1, 1);
4610 tree len2 = c_strlen (arg2, 1);
4611
4612 if (len1)
4613 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4614 if (len2)
4615 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4616
4617 /* If we don't have a constant length for the first, use the length
4618 of the second, if we know it. We don't require a constant for
4619 this case; some cost analysis could be done if both are available
4620 but neither is constant. For now, assume they're equally cheap,
4621 unless one has side effects. If both strings have constant lengths,
4622 use the smaller. */
4623
4624 if (!len1)
4625 len = len2;
4626 else if (!len2)
4627 len = len1;
4628 else if (TREE_SIDE_EFFECTS (len1))
4629 len = len2;
4630 else if (TREE_SIDE_EFFECTS (len2))
4631 len = len1;
4632 else if (TREE_CODE (len1) != INTEGER_CST)
4633 len = len2;
4634 else if (TREE_CODE (len2) != INTEGER_CST)
4635 len = len1;
4636 else if (tree_int_cst_lt (len1, len2))
4637 len = len1;
4638 else
4639 len = len2;
c43fa1f5 4640
16155777
MS
4641 /* If both arguments have side effects, we cannot optimize. */
4642 if (len && !TREE_SIDE_EFFECTS (len))
40c1d5f8 4643 {
16155777
MS
4644 arg3_rtx = expand_normal (len);
4645 result = expand_cmpstrn_or_cmpmem
4646 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4647 arg3_rtx, MIN (arg1_align, arg2_align));
40c1d5f8 4648 }
16155777
MS
4649 }
4650
16155777 4651 tree fndecl = get_callee_fndecl (exp);
16155777
MS
4652 if (result)
4653 {
36537a1c
MS
4654 /* Check to see if the argument was declared attribute nonstring
4655 and if so, issue a warning since at this point it's not known
4656 to be nul-terminated. */
4657 maybe_warn_nonstring_arg (fndecl, exp);
4658
16155777
MS
4659 /* Return the value in the proper mode for this function. */
4660 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4661 if (GET_MODE (result) == mode)
4662 return result;
4663 if (target == 0)
4664 return convert_to_mode (mode, result, 0);
4665 convert_move (target, result, 0);
4666 return target;
40c1d5f8 4667 }
16155777
MS
4668
4669 /* Expand the library call ourselves using a stabilized argument
4670 list to avoid re-evaluating the function's arguments twice. */
4671 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4672 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4673 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4674 return expand_call (fn, target, target == const0_rtx);
2dee4af1 4675}
28f4ec01 4676
b8698a0f 4677/* Expand expression EXP, which is a call to the strncmp builtin. Return
5039610b 4678 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
da9e9f08 4679 the result in TARGET, if convenient. */
5197bd50 4680
da9e9f08 4681static rtx
44e10129 4682expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
ef4bddc2 4683 ATTRIBUTE_UNUSED machine_mode mode)
da9e9f08 4684{
5039610b
SL
4685 if (!validate_arglist (exp,
4686 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4687 return NULL_RTX;
da9e9f08 4688
b2272b13
QZ
4689 /* Due to the performance benefit, always inline the calls first. */
4690 rtx result = NULL_RTX;
523a59ff 4691 result = inline_expand_builtin_string_cmp (exp, target);
b2272b13
QZ
4692 if (result)
4693 return result;
4694
819c1488 4695 /* If c_strlen can determine an expression for one of the string
40c1d5f8 4696 lengths, and it doesn't have side effects, then emit cmpstrnsi
2be3b5ce 4697 using length MIN(strlen(string)+1, arg3). */
a666df60 4698 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
4699 if (cmpstrn_icode == CODE_FOR_nothing)
4700 return NULL_RTX;
5197bd50 4701
16155777
MS
4702 tree len;
4703
4704 tree arg1 = CALL_EXPR_ARG (exp, 0);
4705 tree arg2 = CALL_EXPR_ARG (exp, 1);
4706 tree arg3 = CALL_EXPR_ARG (exp, 2);
4707
4708 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4709 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4710
4711 tree len1 = c_strlen (arg1, 1);
4712 tree len2 = c_strlen (arg2, 1);
4713
4714 location_t loc = EXPR_LOCATION (exp);
4715
4716 if (len1)
4717 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4718 if (len2)
4719 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4720
4721 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4722
4723 /* If we don't have a constant length for the first, use the length
4724 of the second, if we know it. If neither string is constant length,
4725 use the given length argument. We don't require a constant for
4726 this case; some cost analysis could be done if both are available
4727 but neither is constant. For now, assume they're equally cheap,
4728 unless one has side effects. If both strings have constant lengths,
4729 use the smaller. */
4730
4731 if (!len1 && !len2)
4732 len = len3;
4733 else if (!len1)
4734 len = len2;
4735 else if (!len2)
4736 len = len1;
4737 else if (TREE_SIDE_EFFECTS (len1))
4738 len = len2;
4739 else if (TREE_SIDE_EFFECTS (len2))
4740 len = len1;
4741 else if (TREE_CODE (len1) != INTEGER_CST)
4742 len = len2;
4743 else if (TREE_CODE (len2) != INTEGER_CST)
4744 len = len1;
4745 else if (tree_int_cst_lt (len1, len2))
4746 len = len1;
4747 else
4748 len = len2;
4749
4750 /* If we are not using the given length, we must incorporate it here.
4751 The actual new length parameter will be MIN(len,arg3) in this case. */
4752 if (len != len3)
75e96bc8
MS
4753 {
4754 len = fold_convert_loc (loc, sizetype, len);
4755 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4756 }
16155777
MS
4757 rtx arg1_rtx = get_memory_rtx (arg1, len);
4758 rtx arg2_rtx = get_memory_rtx (arg2, len);
4759 rtx arg3_rtx = expand_normal (len);
b2272b13
QZ
4760 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4761 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4762 MIN (arg1_align, arg2_align));
16155777 4763
16155777 4764 tree fndecl = get_callee_fndecl (exp);
16155777
MS
4765 if (result)
4766 {
36537a1c
MS
4767 /* Check to see if the argument was declared attribute nonstring
4768 and if so, issue a warning since at this point it's not known
4769 to be nul-terminated. */
4770 maybe_warn_nonstring_arg (fndecl, exp);
4771
16155777
MS
4772 /* Return the value in the proper mode for this function. */
4773 mode = TYPE_MODE (TREE_TYPE (exp));
4774 if (GET_MODE (result) == mode)
4775 return result;
4776 if (target == 0)
4777 return convert_to_mode (mode, result, 0);
4778 convert_move (target, result, 0);
4779 return target;
4780 }
4781
4782 /* Expand the library call ourselves using a stabilized argument
4783 list to avoid re-evaluating the function's arguments twice. */
4784 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4785 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4786 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4787 return expand_call (fn, target, target == const0_rtx);
d118937d
KG
4788}
4789
d3707adb
RH
4790/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4791 if that's convenient. */
fed3cef0 4792
d3707adb 4793rtx
4682ae04 4794expand_builtin_saveregs (void)
28f4ec01 4795{
58f4cf2a
DM
4796 rtx val;
4797 rtx_insn *seq;
28f4ec01
BS
4798
4799 /* Don't do __builtin_saveregs more than once in a function.
4800 Save the result of the first call and reuse it. */
4801 if (saveregs_value != 0)
4802 return saveregs_value;
28f4ec01 4803
d3707adb
RH
4804 /* When this function is called, it means that registers must be
4805 saved on entry to this function. So we migrate the call to the
4806 first insn of this function. */
4807
4808 start_sequence ();
28f4ec01 4809
d3707adb 4810 /* Do whatever the machine needs done in this case. */
61f71b34 4811 val = targetm.calls.expand_builtin_saveregs ();
28f4ec01 4812
d3707adb
RH
4813 seq = get_insns ();
4814 end_sequence ();
28f4ec01 4815
d3707adb 4816 saveregs_value = val;
28f4ec01 4817
2f937369
DM
4818 /* Put the insns after the NOTE that starts the function. If this
4819 is inside a start_sequence, make the outer-level insn chain current, so
d3707adb
RH
4820 the code is placed at the start of the function. */
4821 push_topmost_sequence ();
242229bb 4822 emit_insn_after (seq, entry_of_function ());
d3707adb
RH
4823 pop_topmost_sequence ();
4824
4825 return val;
28f4ec01
BS
4826}
4827
8870e212 4828/* Expand a call to __builtin_next_arg. */
5197bd50 4829
28f4ec01 4830static rtx
8870e212 4831expand_builtin_next_arg (void)
28f4ec01 4832{
8870e212
JJ
4833 /* Checking arguments is already done in fold_builtin_next_arg
4834 that must be called before this function. */
4319e38c 4835 return expand_binop (ptr_mode, add_optab,
38173d38
JH
4836 crtl->args.internal_arg_pointer,
4837 crtl->args.arg_offset_rtx,
28f4ec01
BS
4838 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4839}
4840
d3707adb
RH
4841/* Make it easier for the backends by protecting the valist argument
4842 from multiple evaluations. */
4843
4844static tree
db3927fb 4845stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
d3707adb 4846{
35cbb299
KT
4847 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4848
70f34814
RG
4849 /* The current way of determining the type of valist is completely
4850 bogus. We should have the information on the va builtin instead. */
4851 if (!vatype)
4852 vatype = targetm.fn_abi_va_list (cfun->decl);
35cbb299
KT
4853
4854 if (TREE_CODE (vatype) == ARRAY_TYPE)
d3707adb 4855 {
9f720c3e
GK
4856 if (TREE_SIDE_EFFECTS (valist))
4857 valist = save_expr (valist);
8ebecc3b 4858
9f720c3e 4859 /* For this case, the backends will be expecting a pointer to
35cbb299
KT
4860 vatype, but it's possible we've actually been given an array
4861 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
9f720c3e
GK
4862 So fix it. */
4863 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
daf68dd7 4864 {
35cbb299 4865 tree p1 = build_pointer_type (TREE_TYPE (vatype));
db3927fb 4866 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
daf68dd7 4867 }
d3707adb 4868 }
8ebecc3b 4869 else
d3707adb 4870 {
70f34814 4871 tree pt = build_pointer_type (vatype);
8ebecc3b 4872
9f720c3e
GK
4873 if (! needs_lvalue)
4874 {
8ebecc3b
RH
4875 if (! TREE_SIDE_EFFECTS (valist))
4876 return valist;
8d51ecf8 4877
db3927fb 4878 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
d3707adb 4879 TREE_SIDE_EFFECTS (valist) = 1;
d3707adb 4880 }
9f720c3e 4881
8ebecc3b 4882 if (TREE_SIDE_EFFECTS (valist))
9f720c3e 4883 valist = save_expr (valist);
70f34814
RG
4884 valist = fold_build2_loc (loc, MEM_REF,
4885 vatype, valist, build_int_cst (pt, 0));
d3707adb
RH
4886 }
4887
4888 return valist;
4889}
4890
c35d187f
RH
4891/* The "standard" definition of va_list is void*. */
4892
4893tree
4894std_build_builtin_va_list (void)
4895{
4896 return ptr_type_node;
4897}
4898
35cbb299
KT
4899/* The "standard" abi va_list is va_list_type_node. */
4900
4901tree
4902std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4903{
4904 return va_list_type_node;
4905}
4906
4907/* The "standard" type of va_list is va_list_type_node. */
4908
4909tree
4910std_canonical_va_list_type (tree type)
4911{
4912 tree wtype, htype;
4913
35cbb299
KT
4914 wtype = va_list_type_node;
4915 htype = type;
431e31a9
TV
4916
4917 if (TREE_CODE (wtype) == ARRAY_TYPE)
35cbb299
KT
4918 {
4919 /* If va_list is an array type, the argument may have decayed
4920 to a pointer type, e.g. by being passed to another function.
4921 In that case, unwrap both types so that we can compare the
4922 underlying records. */
4923 if (TREE_CODE (htype) == ARRAY_TYPE
4924 || POINTER_TYPE_P (htype))
4925 {
4926 wtype = TREE_TYPE (wtype);
4927 htype = TREE_TYPE (htype);
4928 }
4929 }
4930 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4931 return va_list_type_node;
4932
4933 return NULL_TREE;
4934}
4935
d3707adb
RH
4936/* The "standard" implementation of va_start: just assign `nextarg' to
4937 the variable. */
5197bd50 4938
d3707adb 4939void
4682ae04 4940std_expand_builtin_va_start (tree valist, rtx nextarg)
d3707adb 4941{
508dabda
ILT
4942 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4943 convert_move (va_r, nextarg, 0);
d3707adb
RH
4944}
4945
5039610b 4946/* Expand EXP, a call to __builtin_va_start. */
5197bd50 4947
d3707adb 4948static rtx
5039610b 4949expand_builtin_va_start (tree exp)
d3707adb
RH
4950{
4951 rtx nextarg;
5039610b 4952 tree valist;
db3927fb 4953 location_t loc = EXPR_LOCATION (exp);
d3707adb 4954
5039610b 4955 if (call_expr_nargs (exp) < 2)
c69c9b36 4956 {
db3927fb 4957 error_at (loc, "too few arguments to function %<va_start%>");
c69c9b36
JM
4958 return const0_rtx;
4959 }
d3707adb 4960
5039610b 4961 if (fold_builtin_next_arg (exp, true))
8870e212 4962 return const0_rtx;
d3147f64 4963
8870e212 4964 nextarg = expand_builtin_next_arg ();
db3927fb 4965 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
d3707adb 4966
d7bd8aeb
JJ
4967 if (targetm.expand_builtin_va_start)
4968 targetm.expand_builtin_va_start (valist, nextarg);
4969 else
4970 std_expand_builtin_va_start (valist, nextarg);
d3707adb
RH
4971
4972 return const0_rtx;
4973}
4974
5039610b 4975/* Expand EXP, a call to __builtin_va_end. */
3bdf5ad1 4976
d3707adb 4977static rtx
5039610b 4978expand_builtin_va_end (tree exp)
d3707adb 4979{
5039610b 4980 tree valist = CALL_EXPR_ARG (exp, 0);
daf68dd7 4981
daf68dd7
RH
4982 /* Evaluate for side effects, if needed. I hate macros that don't
4983 do that. */
4984 if (TREE_SIDE_EFFECTS (valist))
4985 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
d3707adb
RH
4986
4987 return const0_rtx;
4988}
4989
5039610b 4990/* Expand EXP, a call to __builtin_va_copy. We do this as a
d3707adb
RH
4991 builtin rather than just as an assignment in stdarg.h because of the
4992 nastiness of array-type va_list types. */
3bdf5ad1 4993
d3707adb 4994static rtx
5039610b 4995expand_builtin_va_copy (tree exp)
d3707adb
RH
4996{
4997 tree dst, src, t;
db3927fb 4998 location_t loc = EXPR_LOCATION (exp);
d3707adb 4999
5039610b
SL
5000 dst = CALL_EXPR_ARG (exp, 0);
5001 src = CALL_EXPR_ARG (exp, 1);
d3707adb 5002
db3927fb
AH
5003 dst = stabilize_va_list_loc (loc, dst, 1);
5004 src = stabilize_va_list_loc (loc, src, 0);
d3707adb 5005
35cbb299
KT
5006 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5007
5008 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
d3707adb 5009 {
35cbb299 5010 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
d3707adb
RH
5011 TREE_SIDE_EFFECTS (t) = 1;
5012 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5013 }
5014 else
5015 {
8ebecc3b
RH
5016 rtx dstb, srcb, size;
5017
5018 /* Evaluate to pointers. */
5019 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5020 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
35cbb299
KT
5021 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5022 NULL_RTX, VOIDmode, EXPAND_NORMAL);
8ebecc3b 5023
5ae6cd0d
MM
5024 dstb = convert_memory_address (Pmode, dstb);
5025 srcb = convert_memory_address (Pmode, srcb);
ce2d32cd 5026
8ebecc3b
RH
5027 /* "Dereference" to BLKmode memories. */
5028 dstb = gen_rtx_MEM (BLKmode, dstb);
ba4828e0 5029 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
35cbb299 5030 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b 5031 srcb = gen_rtx_MEM (BLKmode, srcb);
ba4828e0 5032 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
35cbb299 5033 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b
RH
5034
5035 /* Copy. */
44bb111a 5036 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
d3707adb
RH
5037 }
5038
5039 return const0_rtx;
5040}
5041
28f4ec01
BS
5042/* Expand a call to one of the builtin functions __builtin_frame_address or
5043 __builtin_return_address. */
5197bd50 5044
28f4ec01 5045static rtx
5039610b 5046expand_builtin_frame_address (tree fndecl, tree exp)
28f4ec01 5047{
28f4ec01
BS
5048 /* The argument must be a nonnegative integer constant.
5049 It counts the number of frames to scan up the stack.
8423e57c
MS
5050 The value is either the frame pointer value or the return
5051 address saved in that frame. */
5039610b 5052 if (call_expr_nargs (exp) == 0)
28f4ec01
BS
5053 /* Warning about missing arg was already issued. */
5054 return const0_rtx;
cc269bb6 5055 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
28f4ec01 5056 {
8423e57c 5057 error ("invalid argument to %qD", fndecl);
28f4ec01
BS
5058 return const0_rtx;
5059 }
5060 else
5061 {
8423e57c
MS
5062 /* Number of frames to scan up the stack. */
5063 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5064
5065 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
28f4ec01
BS
5066
5067 /* Some ports cannot access arbitrary stack frames. */
5068 if (tem == NULL)
5069 {
8423e57c 5070 warning (0, "unsupported argument to %qD", fndecl);
28f4ec01
BS
5071 return const0_rtx;
5072 }
5073
8423e57c
MS
5074 if (count)
5075 {
5076 /* Warn since no effort is made to ensure that any frame
5077 beyond the current one exists or can be safely reached. */
5078 warning (OPT_Wframe_address, "calling %qD with "
5079 "a nonzero argument is unsafe", fndecl);
5080 }
5081
28f4ec01
BS
5082 /* For __builtin_frame_address, return what we've got. */
5083 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5084 return tem;
5085
f8cfc6aa 5086 if (!REG_P (tem)
28f4ec01 5087 && ! CONSTANT_P (tem))
18ae1560 5088 tem = copy_addr_to_reg (tem);
28f4ec01
BS
5089 return tem;
5090 }
5091}
5092
d3c12306 5093/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
b7e52782 5094 failed and the caller should emit a normal call. */
d5457140 5095
28f4ec01 5096static rtx
b7e52782 5097expand_builtin_alloca (tree exp)
28f4ec01
BS
5098{
5099 rtx op0;
d5457140 5100 rtx result;
13e49da9 5101 unsigned int align;
8bd9f164 5102 tree fndecl = get_callee_fndecl (exp);
9e878cf1
EB
5103 HOST_WIDE_INT max_size;
5104 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
b7e52782 5105 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
8bd9f164 5106 bool valid_arglist
9e878cf1
EB
5107 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5108 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5109 VOID_TYPE)
5110 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5111 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5112 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
13e49da9
TV
5113
5114 if (!valid_arglist)
5039610b 5115 return NULL_RTX;
28f4ec01 5116
00abf86c
MS
5117 if ((alloca_for_var
5118 && warn_vla_limit >= HOST_WIDE_INT_MAX
5119 && warn_alloc_size_limit < warn_vla_limit)
5120 || (!alloca_for_var
5121 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5122 && warn_alloc_size_limit < warn_alloca_limit
5123 ))
8bd9f164 5124 {
00abf86c
MS
5125 /* -Walloca-larger-than and -Wvla-larger-than settings of
5126 less than HOST_WIDE_INT_MAX override the more general
5127 -Walloc-size-larger-than so unless either of the former
5128 options is smaller than the last one (wchich would imply
5129 that the call was already checked), check the alloca
5130 arguments for overflow. */
8bd9f164
MS
5131 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5132 int idx[] = { 0, -1 };
5133 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5134 }
5135
28f4ec01 5136 /* Compute the argument. */
5039610b 5137 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
28f4ec01 5138
13e49da9 5139 /* Compute the alignment. */
9e878cf1
EB
5140 align = (fcode == BUILT_IN_ALLOCA
5141 ? BIGGEST_ALIGNMENT
5142 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5143
5144 /* Compute the maximum size. */
5145 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5146 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5147 : -1);
13e49da9 5148
b7e52782
EB
5149 /* Allocate the desired space. If the allocation stems from the declaration
5150 of a variable-sized object, it cannot accumulate. */
9e878cf1
EB
5151 result
5152 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5ae6cd0d 5153 result = convert_memory_address (ptr_mode, result);
d5457140
RK
5154
5155 return result;
28f4ec01
BS
5156}
5157
7504c3bf
JJ
5158/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5159 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5160 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5161 handle_builtin_stack_restore function. */
e3174bdf
MO
5162
5163static rtx
5164expand_asan_emit_allocas_unpoison (tree exp)
5165{
5166 tree arg0 = CALL_EXPR_ARG (exp, 0);
7504c3bf 5167 tree arg1 = CALL_EXPR_ARG (exp, 1);
8f4956ca 5168 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7504c3bf
JJ
5169 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5170 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5171 stack_pointer_rtx, NULL_RTX, 0,
5172 OPTAB_LIB_WIDEN);
5173 off = convert_modes (ptr_mode, Pmode, off, 0);
5174 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5175 OPTAB_LIB_WIDEN);
e3174bdf 5176 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
db69559b
RS
5177 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5178 top, ptr_mode, bot, ptr_mode);
e3174bdf
MO
5179 return ret;
5180}
5181
ac868f29
EB
5182/* Expand a call to bswap builtin in EXP.
5183 Return NULL_RTX if a normal call should be emitted rather than expanding the
5184 function in-line. If convenient, the result should be placed in TARGET.
5185 SUBTARGET may be used as the target for computing one of EXP's operands. */
167fa32c
EC
5186
5187static rtx
ef4bddc2 5188expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
ac868f29 5189 rtx subtarget)
167fa32c 5190{
167fa32c
EC
5191 tree arg;
5192 rtx op0;
5193
5039610b
SL
5194 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5195 return NULL_RTX;
167fa32c 5196
5039610b 5197 arg = CALL_EXPR_ARG (exp, 0);
ac868f29
EB
5198 op0 = expand_expr (arg,
5199 subtarget && GET_MODE (subtarget) == target_mode
5200 ? subtarget : NULL_RTX,
5201 target_mode, EXPAND_NORMAL);
5202 if (GET_MODE (op0) != target_mode)
5203 op0 = convert_to_mode (target_mode, op0, 1);
167fa32c 5204
ac868f29 5205 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
167fa32c
EC
5206
5207 gcc_assert (target);
5208
ac868f29 5209 return convert_to_mode (target_mode, target, 1);
167fa32c
EC
5210}
5211
5039610b
SL
5212/* Expand a call to a unary builtin in EXP.
5213 Return NULL_RTX if a normal call should be emitted rather than expanding the
28f4ec01
BS
5214 function in-line. If convenient, the result should be placed in TARGET.
5215 SUBTARGET may be used as the target for computing one of EXP's operands. */
d5457140 5216
28f4ec01 5217static rtx
ef4bddc2 5218expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4682ae04 5219 rtx subtarget, optab op_optab)
28f4ec01
BS
5220{
5221 rtx op0;
5039610b
SL
5222
5223 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5224 return NULL_RTX;
28f4ec01
BS
5225
5226 /* Compute the argument. */
4359dc2a
JJ
5227 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5228 (subtarget
5229 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5230 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
49452c07 5231 VOIDmode, EXPAND_NORMAL);
2928cd7a 5232 /* Compute op, into TARGET if possible.
28f4ec01 5233 Set TARGET to wherever the result comes back. */
5039610b 5234 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
146aef0b 5235 op_optab, op0, target, op_optab != clrsb_optab);
298e6adc 5236 gcc_assert (target);
5906d013 5237
6c537d03 5238 return convert_to_mode (target_mode, target, 0);
28f4ec01 5239}
994a57cd 5240
b8698a0f 5241/* Expand a call to __builtin_expect. We just return our argument
ef950eba
JH
5242 as the builtin_expect semantic should've been already executed by
5243 tree branch prediction pass. */
994a57cd
RH
5244
5245static rtx
5039610b 5246expand_builtin_expect (tree exp, rtx target)
994a57cd 5247{
451409e4 5248 tree arg;
994a57cd 5249
5039610b 5250 if (call_expr_nargs (exp) < 2)
994a57cd 5251 return const0_rtx;
5039610b 5252 arg = CALL_EXPR_ARG (exp, 0);
994a57cd 5253
5039610b 5254 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
ef950eba 5255 /* When guessing was done, the hints should be already stripped away. */
1d8381f1 5256 gcc_assert (!flag_guess_branch_prob
1da2ed5f 5257 || optimize == 0 || seen_error ());
994a57cd
RH
5258 return target;
5259}
5f2d6cfa 5260
1e9168b2
ML
5261/* Expand a call to __builtin_expect_with_probability. We just return our
5262 argument as the builtin_expect semantic should've been already executed by
5263 tree branch prediction pass. */
5264
5265static rtx
5266expand_builtin_expect_with_probability (tree exp, rtx target)
5267{
5268 tree arg;
5269
5270 if (call_expr_nargs (exp) < 3)
5271 return const0_rtx;
5272 arg = CALL_EXPR_ARG (exp, 0);
5273
5274 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5275 /* When guessing was done, the hints should be already stripped away. */
5276 gcc_assert (!flag_guess_branch_prob
5277 || optimize == 0 || seen_error ());
5278 return target;
5279}
5280
5281
45d439ac
JJ
5282/* Expand a call to __builtin_assume_aligned. We just return our first
5283 argument as the builtin_assume_aligned semantic should've been already
5284 executed by CCP. */
5285
5286static rtx
5287expand_builtin_assume_aligned (tree exp, rtx target)
5288{
5289 if (call_expr_nargs (exp) < 2)
5290 return const0_rtx;
5291 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5292 EXPAND_NORMAL);
5293 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5294 && (call_expr_nargs (exp) < 3
5295 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5296 return target;
5297}
5298
1e188d1e 5299void
4682ae04 5300expand_builtin_trap (void)
9602f5a0 5301{
eb6f47fb 5302 if (targetm.have_trap ())
206604dc 5303 {
eb6f47fb 5304 rtx_insn *insn = emit_insn (targetm.gen_trap ());
206604dc
JJ
5305 /* For trap insns when not accumulating outgoing args force
5306 REG_ARGS_SIZE note to prevent crossjumping of calls with
5307 different args sizes. */
5308 if (!ACCUMULATE_OUTGOING_ARGS)
68184180 5309 add_args_size_note (insn, stack_pointer_delta);
206604dc 5310 }
9602f5a0 5311 else
ee516de9
EB
5312 {
5313 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5314 tree call_expr = build_call_expr (fn, 0);
5315 expand_call (call_expr, NULL_RTX, false);
5316 }
5317
9602f5a0
RH
5318 emit_barrier ();
5319}
075ec276 5320
468059bc
DD
5321/* Expand a call to __builtin_unreachable. We do nothing except emit
5322 a barrier saying that control flow will not pass here.
5323
5324 It is the responsibility of the program being compiled to ensure
5325 that control flow does never reach __builtin_unreachable. */
5326static void
5327expand_builtin_unreachable (void)
5328{
5329 emit_barrier ();
5330}
5331
5039610b
SL
5332/* Expand EXP, a call to fabs, fabsf or fabsl.
5333 Return NULL_RTX if a normal call should be emitted rather than expanding
075ec276
RS
5334 the function inline. If convenient, the result should be placed
5335 in TARGET. SUBTARGET may be used as the target for computing
5336 the operand. */
5337
5338static rtx
5039610b 5339expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
075ec276 5340{
ef4bddc2 5341 machine_mode mode;
075ec276
RS
5342 tree arg;
5343 rtx op0;
5344
5039610b
SL
5345 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5346 return NULL_RTX;
075ec276 5347
5039610b 5348 arg = CALL_EXPR_ARG (exp, 0);
4cd8e76f 5349 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
075ec276 5350 mode = TYPE_MODE (TREE_TYPE (arg));
49452c07 5351 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
075ec276
RS
5352 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5353}
5354
5039610b 5355/* Expand EXP, a call to copysign, copysignf, or copysignl.
046625fa
RH
5356 Return NULL is a normal call should be emitted rather than expanding the
5357 function inline. If convenient, the result should be placed in TARGET.
5358 SUBTARGET may be used as the target for computing the operand. */
5359
5360static rtx
5039610b 5361expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
046625fa
RH
5362{
5363 rtx op0, op1;
5364 tree arg;
5365
5039610b
SL
5366 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5367 return NULL_RTX;
046625fa 5368
5039610b 5369 arg = CALL_EXPR_ARG (exp, 0);
84217346 5370 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
046625fa 5371
5039610b 5372 arg = CALL_EXPR_ARG (exp, 1);
84217346 5373 op1 = expand_normal (arg);
046625fa
RH
5374
5375 return expand_copysign (op0, op1, target);
5376}
5377
677feb77
DD
5378/* Expand a call to __builtin___clear_cache. */
5379
5380static rtx
f2cf13bd 5381expand_builtin___clear_cache (tree exp)
677feb77 5382{
f2cf13bd
RS
5383 if (!targetm.code_for_clear_cache)
5384 {
677feb77 5385#ifdef CLEAR_INSN_CACHE
f2cf13bd
RS
5386 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5387 does something. Just do the default expansion to a call to
5388 __clear_cache(). */
5389 return NULL_RTX;
677feb77 5390#else
f2cf13bd
RS
5391 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5392 does nothing. There is no need to call it. Do nothing. */
5393 return const0_rtx;
677feb77 5394#endif /* CLEAR_INSN_CACHE */
f2cf13bd
RS
5395 }
5396
677feb77
DD
5397 /* We have a "clear_cache" insn, and it will handle everything. */
5398 tree begin, end;
5399 rtx begin_rtx, end_rtx;
677feb77
DD
5400
5401 /* We must not expand to a library call. If we did, any
5402 fallback library function in libgcc that might contain a call to
5403 __builtin___clear_cache() would recurse infinitely. */
5404 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5405 {
5406 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5407 return const0_rtx;
5408 }
5409
f2cf13bd 5410 if (targetm.have_clear_cache ())
677feb77 5411 {
a5c7d693 5412 struct expand_operand ops[2];
677feb77
DD
5413
5414 begin = CALL_EXPR_ARG (exp, 0);
5415 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77
DD
5416
5417 end = CALL_EXPR_ARG (exp, 1);
5418 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77 5419
a5c7d693
RS
5420 create_address_operand (&ops[0], begin_rtx);
5421 create_address_operand (&ops[1], end_rtx);
f2cf13bd 5422 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
a5c7d693 5423 return const0_rtx;
677feb77
DD
5424 }
5425 return const0_rtx;
677feb77
DD
5426}
5427
6de9cd9a
DN
5428/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5429
5430static rtx
5431round_trampoline_addr (rtx tramp)
5432{
5433 rtx temp, addend, mask;
5434
5435 /* If we don't need too much alignment, we'll have been guaranteed
5436 proper alignment by get_trampoline_type. */
5437 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5438 return tramp;
5439
5440 /* Round address up to desired boundary. */
5441 temp = gen_reg_rtx (Pmode);
2f1cd2eb
RS
5442 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5443 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6de9cd9a
DN
5444
5445 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5446 temp, 0, OPTAB_LIB_WIDEN);
5447 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5448 temp, 0, OPTAB_LIB_WIDEN);
5449
5450 return tramp;
5451}
5452
5453static rtx
183dd130 5454expand_builtin_init_trampoline (tree exp, bool onstack)
6de9cd9a
DN
5455{
5456 tree t_tramp, t_func, t_chain;
531ca746 5457 rtx m_tramp, r_tramp, r_chain, tmp;
6de9cd9a 5458
5039610b 5459 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6de9cd9a
DN
5460 POINTER_TYPE, VOID_TYPE))
5461 return NULL_RTX;
5462
5039610b
SL
5463 t_tramp = CALL_EXPR_ARG (exp, 0);
5464 t_func = CALL_EXPR_ARG (exp, 1);
5465 t_chain = CALL_EXPR_ARG (exp, 2);
6de9cd9a 5466
84217346 5467 r_tramp = expand_normal (t_tramp);
531ca746
RH
5468 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5469 MEM_NOTRAP_P (m_tramp) = 1;
5470
183dd130
ILT
5471 /* If ONSTACK, the TRAMP argument should be the address of a field
5472 within the local function's FRAME decl. Either way, let's see if
5473 we can fill in the MEM_ATTRs for this memory. */
531ca746 5474 if (TREE_CODE (t_tramp) == ADDR_EXPR)
ad2e5b71 5475 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
531ca746 5476
183dd130
ILT
5477 /* Creator of a heap trampoline is responsible for making sure the
5478 address is aligned to at least STACK_BOUNDARY. Normally malloc
5479 will ensure this anyhow. */
531ca746
RH
5480 tmp = round_trampoline_addr (r_tramp);
5481 if (tmp != r_tramp)
5482 {
5483 m_tramp = change_address (m_tramp, BLKmode, tmp);
5484 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
f5541398 5485 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
531ca746
RH
5486 }
5487
5488 /* The FUNC argument should be the address of the nested function.
5489 Extract the actual function decl to pass to the hook. */
5490 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5491 t_func = TREE_OPERAND (t_func, 0);
5492 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5493
84217346 5494 r_chain = expand_normal (t_chain);
6de9cd9a
DN
5495
5496 /* Generate insns to initialize the trampoline. */
531ca746 5497 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6de9cd9a 5498
183dd130
ILT
5499 if (onstack)
5500 {
5501 trampolines_created = 1;
8ffadef9 5502
4c640e26
EB
5503 if (targetm.calls.custom_function_descriptors != 0)
5504 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5505 "trampoline generated for nested function %qD", t_func);
183dd130 5506 }
8ffadef9 5507
6de9cd9a
DN
5508 return const0_rtx;
5509}
5510
5511static rtx
5039610b 5512expand_builtin_adjust_trampoline (tree exp)
6de9cd9a
DN
5513{
5514 rtx tramp;
5515
5039610b 5516 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
5517 return NULL_RTX;
5518
5039610b 5519 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6de9cd9a 5520 tramp = round_trampoline_addr (tramp);
531ca746
RH
5521 if (targetm.calls.trampoline_adjust_address)
5522 tramp = targetm.calls.trampoline_adjust_address (tramp);
6de9cd9a
DN
5523
5524 return tramp;
5525}
5526
4c640e26
EB
5527/* Expand a call to the builtin descriptor initialization routine.
5528 A descriptor is made up of a couple of pointers to the static
5529 chain and the code entry in this order. */
5530
5531static rtx
5532expand_builtin_init_descriptor (tree exp)
5533{
5534 tree t_descr, t_func, t_chain;
5535 rtx m_descr, r_descr, r_func, r_chain;
5536
5537 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5538 VOID_TYPE))
5539 return NULL_RTX;
5540
5541 t_descr = CALL_EXPR_ARG (exp, 0);
5542 t_func = CALL_EXPR_ARG (exp, 1);
5543 t_chain = CALL_EXPR_ARG (exp, 2);
5544
5545 r_descr = expand_normal (t_descr);
5546 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5547 MEM_NOTRAP_P (m_descr) = 1;
5548
5549 r_func = expand_normal (t_func);
5550 r_chain = expand_normal (t_chain);
5551
5552 /* Generate insns to initialize the descriptor. */
5553 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5554 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5555 POINTER_SIZE / BITS_PER_UNIT), r_func);
5556
5557 return const0_rtx;
5558}
5559
5560/* Expand a call to the builtin descriptor adjustment routine. */
5561
5562static rtx
5563expand_builtin_adjust_descriptor (tree exp)
5564{
5565 rtx tramp;
5566
5567 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5568 return NULL_RTX;
5569
5570 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5571
5572 /* Unalign the descriptor to allow runtime identification. */
5573 tramp = plus_constant (ptr_mode, tramp,
5574 targetm.calls.custom_function_descriptors);
5575
5576 return force_operand (tramp, NULL_RTX);
5577}
5578
0f67fa83
WG
5579/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5580 function. The function first checks whether the back end provides
5581 an insn to implement signbit for the respective mode. If not, it
5582 checks whether the floating point format of the value is such that
61717a45
FXC
5583 the sign bit can be extracted. If that is not the case, error out.
5584 EXP is the expression that is a call to the builtin function; if
5585 convenient, the result should be placed in TARGET. */
ef79730c
RS
5586static rtx
5587expand_builtin_signbit (tree exp, rtx target)
5588{
5589 const struct real_format *fmt;
b5f2d801 5590 scalar_float_mode fmode;
095a2d76 5591 scalar_int_mode rmode, imode;
5039610b 5592 tree arg;
e4fbead1 5593 int word, bitpos;
d0c9d431 5594 enum insn_code icode;
ef79730c 5595 rtx temp;
db3927fb 5596 location_t loc = EXPR_LOCATION (exp);
ef79730c 5597
5039610b
SL
5598 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5599 return NULL_RTX;
ef79730c 5600
5039610b 5601 arg = CALL_EXPR_ARG (exp, 0);
b5f2d801 5602 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
7a504f33 5603 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
ef79730c
RS
5604 fmt = REAL_MODE_FORMAT (fmode);
5605
0f67fa83
WG
5606 arg = builtin_save_expr (arg);
5607
5608 /* Expand the argument yielding a RTX expression. */
5609 temp = expand_normal (arg);
5610
5611 /* Check if the back end provides an insn that handles signbit for the
5612 argument's mode. */
947131ba 5613 icode = optab_handler (signbit_optab, fmode);
d0c9d431 5614 if (icode != CODE_FOR_nothing)
0f67fa83 5615 {
58f4cf2a 5616 rtx_insn *last = get_last_insn ();
0f67fa83 5617 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8a0b1aa4
MM
5618 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5619 return target;
5620 delete_insns_since (last);
0f67fa83
WG
5621 }
5622
ef79730c
RS
5623 /* For floating point formats without a sign bit, implement signbit
5624 as "ARG < 0.0". */
b87a0206 5625 bitpos = fmt->signbit_ro;
e4fbead1 5626 if (bitpos < 0)
ef79730c
RS
5627 {
5628 /* But we can't do this if the format supports signed zero. */
61717a45 5629 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
ef79730c 5630
db3927fb 5631 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
987b67bc 5632 build_real (TREE_TYPE (arg), dconst0));
ef79730c
RS
5633 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5634 }
5635
e4fbead1 5636 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
ef79730c 5637 {
304b9962 5638 imode = int_mode_for_mode (fmode).require ();
e4fbead1 5639 temp = gen_lowpart (imode, temp);
254878ea
RS
5640 }
5641 else
5642 {
e4fbead1
RS
5643 imode = word_mode;
5644 /* Handle targets with different FP word orders. */
5645 if (FLOAT_WORDS_BIG_ENDIAN)
c22cacf3 5646 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
e4fbead1 5647 else
c22cacf3 5648 word = bitpos / BITS_PER_WORD;
e4fbead1
RS
5649 temp = operand_subword_force (temp, word, fmode);
5650 bitpos = bitpos % BITS_PER_WORD;
5651 }
5652
210e1852
RS
5653 /* Force the intermediate word_mode (or narrower) result into a
5654 register. This avoids attempting to create paradoxical SUBREGs
5655 of floating point modes below. */
5656 temp = force_reg (imode, temp);
5657
e4fbead1
RS
5658 /* If the bitpos is within the "result mode" lowpart, the operation
5659 can be implement with a single bitwise AND. Otherwise, we need
5660 a right shift and an AND. */
5661
5662 if (bitpos < GET_MODE_BITSIZE (rmode))
5663 {
807e902e 5664 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
ef79730c 5665
515e442a 5666 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
e4fbead1 5667 temp = gen_lowpart (rmode, temp);
254878ea 5668 temp = expand_binop (rmode, and_optab, temp,
807e902e 5669 immed_wide_int_const (mask, rmode),
e4fbead1 5670 NULL_RTX, 1, OPTAB_LIB_WIDEN);
ef79730c 5671 }
e4fbead1
RS
5672 else
5673 {
5674 /* Perform a logical right shift to place the signbit in the least
c22cacf3 5675 significant bit, then truncate the result to the desired mode
e4fbead1 5676 and mask just this bit. */
eb6c3df1 5677 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
e4fbead1
RS
5678 temp = gen_lowpart (rmode, temp);
5679 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5680 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5681 }
5682
ef79730c
RS
5683 return temp;
5684}
d1c38823
ZD
5685
5686/* Expand fork or exec calls. TARGET is the desired target of the
5039610b 5687 call. EXP is the call. FN is the
d1c38823
ZD
5688 identificator of the actual function. IGNORE is nonzero if the
5689 value is to be ignored. */
5690
5691static rtx
5039610b 5692expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
d1c38823
ZD
5693{
5694 tree id, decl;
5695 tree call;
5696
5697 /* If we are not profiling, just call the function. */
5698 if (!profile_arc_flag)
5699 return NULL_RTX;
5700
5701 /* Otherwise call the wrapper. This should be equivalent for the rest of
5702 compiler, so the code does not diverge, and the wrapper may run the
2b8a92de 5703 code necessary for keeping the profiling sane. */
d1c38823
ZD
5704
5705 switch (DECL_FUNCTION_CODE (fn))
5706 {
5707 case BUILT_IN_FORK:
5708 id = get_identifier ("__gcov_fork");
5709 break;
5710
5711 case BUILT_IN_EXECL:
5712 id = get_identifier ("__gcov_execl");
5713 break;
5714
5715 case BUILT_IN_EXECV:
5716 id = get_identifier ("__gcov_execv");
5717 break;
5718
5719 case BUILT_IN_EXECLP:
5720 id = get_identifier ("__gcov_execlp");
5721 break;
5722
5723 case BUILT_IN_EXECLE:
5724 id = get_identifier ("__gcov_execle");
5725 break;
5726
5727 case BUILT_IN_EXECVP:
5728 id = get_identifier ("__gcov_execvp");
5729 break;
5730
5731 case BUILT_IN_EXECVE:
5732 id = get_identifier ("__gcov_execve");
5733 break;
5734
5735 default:
298e6adc 5736 gcc_unreachable ();
d1c38823
ZD
5737 }
5738
c2255bc4
AH
5739 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5740 FUNCTION_DECL, id, TREE_TYPE (fn));
d1c38823
ZD
5741 DECL_EXTERNAL (decl) = 1;
5742 TREE_PUBLIC (decl) = 1;
5743 DECL_ARTIFICIAL (decl) = 1;
5744 TREE_NOTHROW (decl) = 1;
ac382b62
JM
5745 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5746 DECL_VISIBILITY_SPECIFIED (decl) = 1;
db3927fb 5747 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
d1c38823 5748 return expand_call (call, target, ignore);
5039610b 5749 }
b8698a0f 5750
48ae6c13
RH
5751
5752\f
02ee605c
RH
5753/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5754 the pointer in these functions is void*, the tree optimizers may remove
5755 casts. The mode computed in expand_builtin isn't reliable either, due
5756 to __sync_bool_compare_and_swap.
5757
5758 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5759 group of builtins. This gives us log2 of the mode size. */
5760
ef4bddc2 5761static inline machine_mode
02ee605c
RH
5762get_builtin_sync_mode (int fcode_diff)
5763{
2de0aa52
HPN
5764 /* The size is not negotiable, so ask not to get BLKmode in return
5765 if the target indicates that a smaller size would be better. */
f4b31647 5766 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
02ee605c
RH
5767}
5768
1387fef3
AS
5769/* Expand the memory expression LOC and return the appropriate memory operand
5770 for the builtin_sync operations. */
5771
5772static rtx
ef4bddc2 5773get_builtin_sync_mem (tree loc, machine_mode mode)
1387fef3
AS
5774{
5775 rtx addr, mem;
5776
f46835f5
JJ
5777 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5778 addr = convert_memory_address (Pmode, addr);
1387fef3
AS
5779
5780 /* Note that we explicitly do not want any alias information for this
5781 memory, so that we kill all other live memories. Otherwise we don't
5782 satisfy the full barrier semantics of the intrinsic. */
5783 mem = validize_mem (gen_rtx_MEM (mode, addr));
5784
1be38ccb
RG
5785 /* The alignment needs to be at least according to that of the mode. */
5786 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
0eb77834 5787 get_pointer_alignment (loc)));
9cd9e512 5788 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
1387fef3
AS
5789 MEM_VOLATILE_P (mem) = 1;
5790
5791 return mem;
5792}
5793
86951993
AM
5794/* Make sure an argument is in the right mode.
5795 EXP is the tree argument.
5796 MODE is the mode it should be in. */
5797
5798static rtx
ef4bddc2 5799expand_expr_force_mode (tree exp, machine_mode mode)
86951993
AM
5800{
5801 rtx val;
ef4bddc2 5802 machine_mode old_mode;
86951993
AM
5803
5804 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5805 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5806 of CONST_INTs, where we know the old_mode only from the call argument. */
5807
5808 old_mode = GET_MODE (val);
5809 if (old_mode == VOIDmode)
5810 old_mode = TYPE_MODE (TREE_TYPE (exp));
5811 val = convert_modes (mode, old_mode, val, 1);
5812 return val;
5813}
5814
5815
48ae6c13 5816/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5039610b 5817 EXP is the CALL_EXPR. CODE is the rtx code
48ae6c13
RH
5818 that corresponds to the arithmetic or logical operation from the name;
5819 an exception here is that NOT actually means NAND. TARGET is an optional
5820 place for us to store the results; AFTER is true if this is the
86951993 5821 fetch_and_xxx form. */
48ae6c13
RH
5822
5823static rtx
ef4bddc2 5824expand_builtin_sync_operation (machine_mode mode, tree exp,
02ee605c 5825 enum rtx_code code, bool after,
86951993 5826 rtx target)
48ae6c13 5827{
1387fef3 5828 rtx val, mem;
c2255bc4 5829 location_t loc = EXPR_LOCATION (exp);
48ae6c13 5830
23462d4d
UB
5831 if (code == NOT && warn_sync_nand)
5832 {
5833 tree fndecl = get_callee_fndecl (exp);
5834 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5835
5836 static bool warned_f_a_n, warned_n_a_f;
5837
5838 switch (fcode)
5839 {
e0a8ecf2
AM
5840 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5841 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5842 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5843 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5844 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
23462d4d
UB
5845 if (warned_f_a_n)
5846 break;
5847
e79983f4 5848 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
c2255bc4 5849 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
5850 warned_f_a_n = true;
5851 break;
5852
e0a8ecf2
AM
5853 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5854 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5855 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5856 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5857 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
23462d4d
UB
5858 if (warned_n_a_f)
5859 break;
5860
e79983f4 5861 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
c2255bc4 5862 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
5863 warned_n_a_f = true;
5864 break;
5865
5866 default:
5867 gcc_unreachable ();
5868 }
5869 }
5870
48ae6c13 5871 /* Expand the operands. */
5039610b 5872 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993 5873 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
48ae6c13 5874
46b35980 5875 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
86951993 5876 after);
48ae6c13
RH
5877}
5878
5879/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5039610b 5880 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
48ae6c13
RH
5881 true if this is the boolean form. TARGET is a place for us to store the
5882 results; this is NOT optional if IS_BOOL is true. */
5883
5884static rtx
ef4bddc2 5885expand_builtin_compare_and_swap (machine_mode mode, tree exp,
02ee605c 5886 bool is_bool, rtx target)
48ae6c13 5887{
1387fef3 5888 rtx old_val, new_val, mem;
f0409b19 5889 rtx *pbool, *poval;
48ae6c13
RH
5890
5891 /* Expand the operands. */
5039610b 5892 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
5893 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5894 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
48ae6c13 5895
f0409b19
RH
5896 pbool = poval = NULL;
5897 if (target != const0_rtx)
5898 {
5899 if (is_bool)
5900 pbool = &target;
5901 else
5902 poval = &target;
5903 }
5904 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
46b35980
AM
5905 false, MEMMODEL_SYNC_SEQ_CST,
5906 MEMMODEL_SYNC_SEQ_CST))
86951993 5907 return NULL_RTX;
5039610b 5908
86951993 5909 return target;
48ae6c13
RH
5910}
5911
5912/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5913 general form is actually an atomic exchange, and some targets only
5914 support a reduced form with the second argument being a constant 1.
b8698a0f 5915 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5039610b 5916 the results. */
48ae6c13
RH
5917
5918static rtx
ef4bddc2 5919expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
86951993 5920 rtx target)
48ae6c13 5921{
1387fef3 5922 rtx val, mem;
48ae6c13
RH
5923
5924 /* Expand the operands. */
5039610b 5925 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
5926 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5927
744accb2 5928 return expand_sync_lock_test_and_set (target, mem, val);
86951993
AM
5929}
5930
5931/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5932
5933static void
ef4bddc2 5934expand_builtin_sync_lock_release (machine_mode mode, tree exp)
86951993
AM
5935{
5936 rtx mem;
5937
5938 /* Expand the operands. */
5939 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5940
46b35980 5941 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
86951993
AM
5942}
5943
5944/* Given an integer representing an ``enum memmodel'', verify its
5945 correctness and return the memory model enum. */
5946
5947static enum memmodel
5948get_memmodel (tree exp)
5949{
5950 rtx op;
5dcfdccd 5951 unsigned HOST_WIDE_INT val;
8d9fdb49
MP
5952 source_location loc
5953 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
5954
5955 /* If the parameter is not a constant, it's a run time value so we'll just
5956 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5957 if (TREE_CODE (exp) != INTEGER_CST)
5958 return MEMMODEL_SEQ_CST;
5959
5960 op = expand_normal (exp);
5dcfdccd
KY
5961
5962 val = INTVAL (op);
5963 if (targetm.memmodel_check)
5964 val = targetm.memmodel_check (val);
5965 else if (val & ~MEMMODEL_MASK)
5966 {
8d9fdb49
MP
5967 warning_at (loc, OPT_Winvalid_memory_model,
5968 "unknown architecture specifier in memory model to builtin");
5dcfdccd
KY
5969 return MEMMODEL_SEQ_CST;
5970 }
5971
46b35980
AM
5972 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5973 if (memmodel_base (val) >= MEMMODEL_LAST)
86951993 5974 {
8d9fdb49
MP
5975 warning_at (loc, OPT_Winvalid_memory_model,
5976 "invalid memory model argument to builtin");
86951993
AM
5977 return MEMMODEL_SEQ_CST;
5978 }
5dcfdccd 5979
8673b671
AM
5980 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5981 be conservative and promote consume to acquire. */
5982 if (val == MEMMODEL_CONSUME)
5983 val = MEMMODEL_ACQUIRE;
5984
5dcfdccd 5985 return (enum memmodel) val;
86951993
AM
5986}
5987
5988/* Expand the __atomic_exchange intrinsic:
5989 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5990 EXP is the CALL_EXPR.
5991 TARGET is an optional place for us to store the results. */
5992
5993static rtx
ef4bddc2 5994expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
86951993
AM
5995{
5996 rtx val, mem;
5997 enum memmodel model;
5998
5999 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
86951993
AM
6000
6001 if (!flag_inline_atomics)
6002 return NULL_RTX;
6003
6004 /* Expand the operands. */
6005 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6006 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6007
744accb2 6008 return expand_atomic_exchange (target, mem, val, model);
86951993
AM
6009}
6010
6011/* Expand the __atomic_compare_exchange intrinsic:
6012 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6013 TYPE desired, BOOL weak,
6014 enum memmodel success,
6015 enum memmodel failure)
6016 EXP is the CALL_EXPR.
6017 TARGET is an optional place for us to store the results. */
6018
6019static rtx
ef4bddc2 6020expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
86951993
AM
6021 rtx target)
6022{
58f4cf2a
DM
6023 rtx expect, desired, mem, oldval;
6024 rtx_code_label *label;
86951993
AM
6025 enum memmodel success, failure;
6026 tree weak;
6027 bool is_weak;
8d9fdb49
MP
6028 source_location loc
6029 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
6030
6031 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6032 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6033
77df5327
AM
6034 if (failure > success)
6035 {
8d9fdb49
MP
6036 warning_at (loc, OPT_Winvalid_memory_model,
6037 "failure memory model cannot be stronger than success "
6038 "memory model for %<__atomic_compare_exchange%>");
77df5327
AM
6039 success = MEMMODEL_SEQ_CST;
6040 }
6041
46b35980 6042 if (is_mm_release (failure) || is_mm_acq_rel (failure))
86951993 6043 {
8d9fdb49
MP
6044 warning_at (loc, OPT_Winvalid_memory_model,
6045 "invalid failure memory model for "
6046 "%<__atomic_compare_exchange%>");
77df5327
AM
6047 failure = MEMMODEL_SEQ_CST;
6048 success = MEMMODEL_SEQ_CST;
86951993
AM
6049 }
6050
77df5327 6051
86951993
AM
6052 if (!flag_inline_atomics)
6053 return NULL_RTX;
6054
6055 /* Expand the operands. */
6056 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6057
6058 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6059 expect = convert_memory_address (Pmode, expect);
215770ad 6060 expect = gen_rtx_MEM (mode, expect);
86951993
AM
6061 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6062
6063 weak = CALL_EXPR_ARG (exp, 3);
6064 is_weak = false;
9439e9a1 6065 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
86951993
AM
6066 is_weak = true;
6067
672ce939
RH
6068 if (target == const0_rtx)
6069 target = NULL;
672ce939 6070
2fdc29e8
RH
6071 /* Lest the rtl backend create a race condition with an imporoper store
6072 to memory, always create a new pseudo for OLDVAL. */
6073 oldval = NULL;
6074
6075 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
f0409b19 6076 is_weak, success, failure))
86951993
AM
6077 return NULL_RTX;
6078
672ce939
RH
6079 /* Conditionally store back to EXPECT, lest we create a race condition
6080 with an improper store to memory. */
6081 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6082 the normal case where EXPECT is totally private, i.e. a register. At
6083 which point the store can be unconditional. */
6084 label = gen_label_rtx ();
f8940d4a
JG
6085 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6086 GET_MODE (target), 1, label);
672ce939
RH
6087 emit_move_insn (expect, oldval);
6088 emit_label (label);
215770ad 6089
86951993
AM
6090 return target;
6091}
6092
849a76a5
JJ
6093/* Helper function for expand_ifn_atomic_compare_exchange - expand
6094 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6095 call. The weak parameter must be dropped to match the expected parameter
6096 list and the expected argument changed from value to pointer to memory
6097 slot. */
6098
6099static void
6100expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6101{
6102 unsigned int z;
6103 vec<tree, va_gc> *vec;
6104
6105 vec_alloc (vec, 5);
6106 vec->quick_push (gimple_call_arg (call, 0));
6107 tree expected = gimple_call_arg (call, 1);
6108 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6109 TREE_TYPE (expected));
6110 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6111 if (expd != x)
6112 emit_move_insn (x, expd);
6113 tree v = make_tree (TREE_TYPE (expected), x);
6114 vec->quick_push (build1 (ADDR_EXPR,
6115 build_pointer_type (TREE_TYPE (expected)), v));
6116 vec->quick_push (gimple_call_arg (call, 2));
6117 /* Skip the boolean weak parameter. */
6118 for (z = 4; z < 6; z++)
6119 vec->quick_push (gimple_call_arg (call, z));
4871e1ed 6120 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
cf098191 6121 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
4871e1ed 6122 gcc_assert (bytes_log2 < 5);
849a76a5
JJ
6123 built_in_function fncode
6124 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
4871e1ed 6125 + bytes_log2);
849a76a5
JJ
6126 tree fndecl = builtin_decl_explicit (fncode);
6127 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6128 fndecl);
6129 tree exp = build_call_vec (boolean_type_node, fn, vec);
6130 tree lhs = gimple_call_lhs (call);
6131 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6132 if (lhs)
6133 {
6134 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6135 if (GET_MODE (boolret) != mode)
6136 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6137 x = force_reg (mode, x);
6138 write_complex_part (target, boolret, true);
6139 write_complex_part (target, x, false);
6140 }
6141}
6142
6143/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6144
6145void
6146expand_ifn_atomic_compare_exchange (gcall *call)
6147{
6148 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6149 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
f4b31647 6150 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
849a76a5
JJ
6151 rtx expect, desired, mem, oldval, boolret;
6152 enum memmodel success, failure;
6153 tree lhs;
6154 bool is_weak;
6155 source_location loc
6156 = expansion_point_location_if_in_system_header (gimple_location (call));
6157
6158 success = get_memmodel (gimple_call_arg (call, 4));
6159 failure = get_memmodel (gimple_call_arg (call, 5));
6160
6161 if (failure > success)
6162 {
6163 warning_at (loc, OPT_Winvalid_memory_model,
6164 "failure memory model cannot be stronger than success "
6165 "memory model for %<__atomic_compare_exchange%>");
6166 success = MEMMODEL_SEQ_CST;
6167 }
6168
6169 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6170 {
6171 warning_at (loc, OPT_Winvalid_memory_model,
6172 "invalid failure memory model for "
6173 "%<__atomic_compare_exchange%>");
6174 failure = MEMMODEL_SEQ_CST;
6175 success = MEMMODEL_SEQ_CST;
6176 }
6177
6178 if (!flag_inline_atomics)
6179 {
6180 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6181 return;
6182 }
6183
6184 /* Expand the operands. */
6185 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6186
6187 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6188 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6189
6190 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6191
6192 boolret = NULL;
6193 oldval = NULL;
6194
6195 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6196 is_weak, success, failure))
6197 {
6198 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6199 return;
6200 }
6201
6202 lhs = gimple_call_lhs (call);
6203 if (lhs)
6204 {
6205 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6206 if (GET_MODE (boolret) != mode)
6207 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6208 write_complex_part (target, boolret, true);
6209 write_complex_part (target, oldval, false);
6210 }
6211}
6212
86951993
AM
6213/* Expand the __atomic_load intrinsic:
6214 TYPE __atomic_load (TYPE *object, enum memmodel)
6215 EXP is the CALL_EXPR.
6216 TARGET is an optional place for us to store the results. */
6217
6218static rtx
ef4bddc2 6219expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
86951993
AM
6220{
6221 rtx mem;
6222 enum memmodel model;
6223
6224 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
46b35980 6225 if (is_mm_release (model) || is_mm_acq_rel (model))
86951993 6226 {
8d9fdb49
MP
6227 source_location loc
6228 = expansion_point_location_if_in_system_header (input_location);
6229 warning_at (loc, OPT_Winvalid_memory_model,
6230 "invalid memory model for %<__atomic_load%>");
77df5327 6231 model = MEMMODEL_SEQ_CST;
86951993
AM
6232 }
6233
6234 if (!flag_inline_atomics)
6235 return NULL_RTX;
6236
6237 /* Expand the operand. */
6238 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6239
6240 return expand_atomic_load (target, mem, model);
6241}
6242
6243
6244/* Expand the __atomic_store intrinsic:
6245 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6246 EXP is the CALL_EXPR.
6247 TARGET is an optional place for us to store the results. */
6248
6249static rtx
ef4bddc2 6250expand_builtin_atomic_store (machine_mode mode, tree exp)
86951993
AM
6251{
6252 rtx mem, val;
6253 enum memmodel model;
6254
6255 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
46b35980
AM
6256 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6257 || is_mm_release (model)))
86951993 6258 {
8d9fdb49
MP
6259 source_location loc
6260 = expansion_point_location_if_in_system_header (input_location);
6261 warning_at (loc, OPT_Winvalid_memory_model,
6262 "invalid memory model for %<__atomic_store%>");
77df5327 6263 model = MEMMODEL_SEQ_CST;
86951993
AM
6264 }
6265
6266 if (!flag_inline_atomics)
6267 return NULL_RTX;
6268
6269 /* Expand the operands. */
6270 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6271 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6272
0669295b 6273 return expand_atomic_store (mem, val, model, false);
86951993
AM
6274}
6275
6276/* Expand the __atomic_fetch_XXX intrinsic:
6277 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6278 EXP is the CALL_EXPR.
6279 TARGET is an optional place for us to store the results.
6280 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6281 FETCH_AFTER is true if returning the result of the operation.
6282 FETCH_AFTER is false if returning the value before the operation.
6283 IGNORE is true if the result is not used.
6284 EXT_CALL is the correct builtin for an external call if this cannot be
6285 resolved to an instruction sequence. */
6286
6287static rtx
ef4bddc2 6288expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
86951993
AM
6289 enum rtx_code code, bool fetch_after,
6290 bool ignore, enum built_in_function ext_call)
6291{
6292 rtx val, mem, ret;
6293 enum memmodel model;
6294 tree fndecl;
6295 tree addr;
6296
6297 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6298
6299 /* Expand the operands. */
6300 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6301 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6302
6303 /* Only try generating instructions if inlining is turned on. */
6304 if (flag_inline_atomics)
6305 {
6306 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6307 if (ret)
6308 return ret;
6309 }
6310
6311 /* Return if a different routine isn't needed for the library call. */
6312 if (ext_call == BUILT_IN_NONE)
6313 return NULL_RTX;
6314
6315 /* Change the call to the specified function. */
6316 fndecl = get_callee_fndecl (exp);
6317 addr = CALL_EXPR_FN (exp);
6318 STRIP_NOPS (addr);
6319
6320 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
c3284718 6321 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
86951993 6322
08c273bb
SB
6323 /* If we will emit code after the call, the call can not be a tail call.
6324 If it is emitted as a tail call, a barrier is emitted after it, and
6325 then all trailing code is removed. */
6326 if (!ignore)
6327 CALL_EXPR_TAILCALL (exp) = 0;
6328
86951993
AM
6329 /* Expand the call here so we can emit trailing code. */
6330 ret = expand_call (exp, target, ignore);
6331
6332 /* Replace the original function just in case it matters. */
6333 TREE_OPERAND (addr, 0) = fndecl;
6334
6335 /* Then issue the arithmetic correction to return the right result. */
6336 if (!ignore)
154b68db
AM
6337 {
6338 if (code == NOT)
6339 {
6340 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6341 OPTAB_LIB_WIDEN);
6342 ret = expand_simple_unop (mode, NOT, ret, target, true);
6343 }
6344 else
6345 ret = expand_simple_binop (mode, code, ret, val, target, true,
6346 OPTAB_LIB_WIDEN);
6347 }
86951993
AM
6348 return ret;
6349}
6350
adedd5c1
JJ
6351/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6352
6353void
6354expand_ifn_atomic_bit_test_and (gcall *call)
6355{
6356 tree ptr = gimple_call_arg (call, 0);
6357 tree bit = gimple_call_arg (call, 1);
6358 tree flag = gimple_call_arg (call, 2);
6359 tree lhs = gimple_call_lhs (call);
6360 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6361 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6362 enum rtx_code code;
6363 optab optab;
6364 struct expand_operand ops[5];
6365
6366 gcc_assert (flag_inline_atomics);
6367
6368 if (gimple_call_num_args (call) == 4)
6369 model = get_memmodel (gimple_call_arg (call, 3));
6370
6371 rtx mem = get_builtin_sync_mem (ptr, mode);
6372 rtx val = expand_expr_force_mode (bit, mode);
6373
6374 switch (gimple_call_internal_fn (call))
6375 {
6376 case IFN_ATOMIC_BIT_TEST_AND_SET:
6377 code = IOR;
6378 optab = atomic_bit_test_and_set_optab;
6379 break;
6380 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6381 code = XOR;
6382 optab = atomic_bit_test_and_complement_optab;
6383 break;
6384 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6385 code = AND;
6386 optab = atomic_bit_test_and_reset_optab;
6387 break;
6388 default:
6389 gcc_unreachable ();
6390 }
6391
6392 if (lhs == NULL_TREE)
6393 {
6394 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6395 val, NULL_RTX, true, OPTAB_DIRECT);
6396 if (code == AND)
6397 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6398 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6399 return;
6400 }
6401
6402 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6403 enum insn_code icode = direct_optab_handler (optab, mode);
6404 gcc_assert (icode != CODE_FOR_nothing);
6405 create_output_operand (&ops[0], target, mode);
6406 create_fixed_operand (&ops[1], mem);
6407 create_convert_operand_to (&ops[2], val, mode, true);
6408 create_integer_operand (&ops[3], model);
6409 create_integer_operand (&ops[4], integer_onep (flag));
6410 if (maybe_expand_insn (icode, 5, ops))
6411 return;
6412
6413 rtx bitval = val;
6414 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6415 val, NULL_RTX, true, OPTAB_DIRECT);
6416 rtx maskval = val;
6417 if (code == AND)
6418 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6419 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6420 code, model, false);
6421 if (integer_onep (flag))
6422 {
6423 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6424 NULL_RTX, true, OPTAB_DIRECT);
6425 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6426 true, OPTAB_DIRECT);
6427 }
6428 else
6429 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6430 OPTAB_DIRECT);
6431 if (result != target)
6432 emit_move_insn (target, result);
6433}
6434
d660c35e
AM
6435/* Expand an atomic clear operation.
6436 void _atomic_clear (BOOL *obj, enum memmodel)
6437 EXP is the call expression. */
6438
6439static rtx
6440expand_builtin_atomic_clear (tree exp)
6441{
ef4bddc2 6442 machine_mode mode;
d660c35e
AM
6443 rtx mem, ret;
6444 enum memmodel model;
6445
f4b31647 6446 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
6447 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6448 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6449
46b35980 6450 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
d660c35e 6451 {
8d9fdb49
MP
6452 source_location loc
6453 = expansion_point_location_if_in_system_header (input_location);
6454 warning_at (loc, OPT_Winvalid_memory_model,
6455 "invalid memory model for %<__atomic_store%>");
77df5327 6456 model = MEMMODEL_SEQ_CST;
d660c35e
AM
6457 }
6458
6459 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6460 Failing that, a store is issued by __atomic_store. The only way this can
6461 fail is if the bool type is larger than a word size. Unlikely, but
6462 handle it anyway for completeness. Assume a single threaded model since
6463 there is no atomic support in this case, and no barriers are required. */
6464 ret = expand_atomic_store (mem, const0_rtx, model, true);
6465 if (!ret)
6466 emit_move_insn (mem, const0_rtx);
6467 return const0_rtx;
6468}
6469
6470/* Expand an atomic test_and_set operation.
6471 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6472 EXP is the call expression. */
6473
6474static rtx
744accb2 6475expand_builtin_atomic_test_and_set (tree exp, rtx target)
d660c35e 6476{
744accb2 6477 rtx mem;
d660c35e 6478 enum memmodel model;
ef4bddc2 6479 machine_mode mode;
d660c35e 6480
f4b31647 6481 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
6482 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6483 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6484
744accb2 6485 return expand_atomic_test_and_set (target, mem, model);
d660c35e
AM
6486}
6487
6488
86951993
AM
6489/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6490 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6491
6492static tree
6493fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6494{
6495 int size;
ef4bddc2 6496 machine_mode mode;
86951993
AM
6497 unsigned int mode_align, type_align;
6498
6499 if (TREE_CODE (arg0) != INTEGER_CST)
6500 return NULL_TREE;
48ae6c13 6501
f4b31647 6502 /* We need a corresponding integer mode for the access to be lock-free. */
86951993 6503 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
f4b31647
RS
6504 if (!int_mode_for_size (size, 0).exists (&mode))
6505 return boolean_false_node;
6506
86951993
AM
6507 mode_align = GET_MODE_ALIGNMENT (mode);
6508
310055e7
JW
6509 if (TREE_CODE (arg1) == INTEGER_CST)
6510 {
6511 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6512
6513 /* Either this argument is null, or it's a fake pointer encoding
6514 the alignment of the object. */
146ec50f 6515 val = least_bit_hwi (val);
310055e7
JW
6516 val *= BITS_PER_UNIT;
6517
6518 if (val == 0 || mode_align < val)
6519 type_align = mode_align;
6520 else
6521 type_align = val;
6522 }
86951993
AM
6523 else
6524 {
6525 tree ttype = TREE_TYPE (arg1);
6526
6527 /* This function is usually invoked and folded immediately by the front
6528 end before anything else has a chance to look at it. The pointer
6529 parameter at this point is usually cast to a void *, so check for that
6530 and look past the cast. */
7d9cf801
JJ
6531 if (CONVERT_EXPR_P (arg1)
6532 && POINTER_TYPE_P (ttype)
6533 && VOID_TYPE_P (TREE_TYPE (ttype))
6534 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
86951993
AM
6535 arg1 = TREE_OPERAND (arg1, 0);
6536
6537 ttype = TREE_TYPE (arg1);
6538 gcc_assert (POINTER_TYPE_P (ttype));
6539
6540 /* Get the underlying type of the object. */
6541 ttype = TREE_TYPE (ttype);
6542 type_align = TYPE_ALIGN (ttype);
6543 }
6544
026c3cfd 6545 /* If the object has smaller alignment, the lock free routines cannot
86951993
AM
6546 be used. */
6547 if (type_align < mode_align)
58d38fd2 6548 return boolean_false_node;
86951993
AM
6549
6550 /* Check if a compare_and_swap pattern exists for the mode which represents
6551 the required size. The pattern is not allowed to fail, so the existence
969a32ce
TR
6552 of the pattern indicates support is present. Also require that an
6553 atomic load exists for the required size. */
6554 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
58d38fd2 6555 return boolean_true_node;
86951993 6556 else
58d38fd2 6557 return boolean_false_node;
86951993
AM
6558}
6559
6560/* Return true if the parameters to call EXP represent an object which will
6561 always generate lock free instructions. The first argument represents the
6562 size of the object, and the second parameter is a pointer to the object
6563 itself. If NULL is passed for the object, then the result is based on
6564 typical alignment for an object of the specified size. Otherwise return
6565 false. */
6566
6567static rtx
6568expand_builtin_atomic_always_lock_free (tree exp)
6569{
6570 tree size;
6571 tree arg0 = CALL_EXPR_ARG (exp, 0);
6572 tree arg1 = CALL_EXPR_ARG (exp, 1);
6573
6574 if (TREE_CODE (arg0) != INTEGER_CST)
6575 {
6576 error ("non-constant argument 1 to __atomic_always_lock_free");
6577 return const0_rtx;
6578 }
6579
6580 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
58d38fd2 6581 if (size == boolean_true_node)
86951993
AM
6582 return const1_rtx;
6583 return const0_rtx;
6584}
6585
6586/* Return a one or zero if it can be determined that object ARG1 of size ARG
6587 is lock free on this architecture. */
6588
6589static tree
6590fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6591{
6592 if (!flag_inline_atomics)
6593 return NULL_TREE;
6594
6595 /* If it isn't always lock free, don't generate a result. */
58d38fd2
JJ
6596 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6597 return boolean_true_node;
86951993
AM
6598
6599 return NULL_TREE;
6600}
6601
6602/* Return true if the parameters to call EXP represent an object which will
6603 always generate lock free instructions. The first argument represents the
6604 size of the object, and the second parameter is a pointer to the object
6605 itself. If NULL is passed for the object, then the result is based on
6606 typical alignment for an object of the specified size. Otherwise return
6607 NULL*/
6608
6609static rtx
6610expand_builtin_atomic_is_lock_free (tree exp)
6611{
6612 tree size;
6613 tree arg0 = CALL_EXPR_ARG (exp, 0);
6614 tree arg1 = CALL_EXPR_ARG (exp, 1);
6615
6616 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6617 {
6618 error ("non-integer argument 1 to __atomic_is_lock_free");
6619 return NULL_RTX;
6620 }
6621
6622 if (!flag_inline_atomics)
6623 return NULL_RTX;
6624
6625 /* If the value is known at compile time, return the RTX for it. */
6626 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
58d38fd2 6627 if (size == boolean_true_node)
86951993
AM
6628 return const1_rtx;
6629
6630 return NULL_RTX;
6631}
6632
86951993
AM
6633/* Expand the __atomic_thread_fence intrinsic:
6634 void __atomic_thread_fence (enum memmodel)
6635 EXP is the CALL_EXPR. */
6636
6637static void
6638expand_builtin_atomic_thread_fence (tree exp)
6639{
c39169c8
RH
6640 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6641 expand_mem_thread_fence (model);
86951993
AM
6642}
6643
6644/* Expand the __atomic_signal_fence intrinsic:
6645 void __atomic_signal_fence (enum memmodel)
6646 EXP is the CALL_EXPR. */
6647
6648static void
6649expand_builtin_atomic_signal_fence (tree exp)
6650{
c39169c8
RH
6651 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6652 expand_mem_signal_fence (model);
48ae6c13
RH
6653}
6654
6655/* Expand the __sync_synchronize intrinsic. */
6656
6657static void
e0a8ecf2 6658expand_builtin_sync_synchronize (void)
48ae6c13 6659{
46b35980 6660 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
48ae6c13
RH
6661}
6662
f959607b
CLT
6663static rtx
6664expand_builtin_thread_pointer (tree exp, rtx target)
6665{
6666 enum insn_code icode;
6667 if (!validate_arglist (exp, VOID_TYPE))
6668 return const0_rtx;
6669 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6670 if (icode != CODE_FOR_nothing)
6671 {
6672 struct expand_operand op;
b8a542c6
AP
6673 /* If the target is not sutitable then create a new target. */
6674 if (target == NULL_RTX
6675 || !REG_P (target)
6676 || GET_MODE (target) != Pmode)
f959607b
CLT
6677 target = gen_reg_rtx (Pmode);
6678 create_output_operand (&op, target, Pmode);
6679 expand_insn (icode, 1, &op);
6680 return target;
6681 }
6682 error ("__builtin_thread_pointer is not supported on this target");
6683 return const0_rtx;
6684}
6685
6686static void
6687expand_builtin_set_thread_pointer (tree exp)
6688{
6689 enum insn_code icode;
6690 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6691 return;
6692 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6693 if (icode != CODE_FOR_nothing)
6694 {
6695 struct expand_operand op;
6696 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6697 Pmode, EXPAND_NORMAL);
5440a1b0 6698 create_input_operand (&op, val, Pmode);
f959607b
CLT
6699 expand_insn (icode, 1, &op);
6700 return;
6701 }
6702 error ("__builtin_set_thread_pointer is not supported on this target");
6703}
6704
28f4ec01 6705\f
862d0b35
DN
6706/* Emit code to restore the current value of stack. */
6707
6708static void
6709expand_stack_restore (tree var)
6710{
58f4cf2a
DM
6711 rtx_insn *prev;
6712 rtx sa = expand_normal (var);
862d0b35
DN
6713
6714 sa = convert_memory_address (Pmode, sa);
6715
6716 prev = get_last_insn ();
6717 emit_stack_restore (SAVE_BLOCK, sa);
d33606c3
EB
6718
6719 record_new_stack_level ();
6720
862d0b35
DN
6721 fixup_args_size_notes (prev, get_last_insn (), 0);
6722}
6723
862d0b35
DN
6724/* Emit code to save the current value of stack. */
6725
6726static rtx
6727expand_stack_save (void)
6728{
6729 rtx ret = NULL_RTX;
6730
862d0b35
DN
6731 emit_stack_save (SAVE_BLOCK, &ret);
6732 return ret;
6733}
6734
1f62d637
TV
6735/* Emit code to get the openacc gang, worker or vector id or size. */
6736
6737static rtx
6738expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6739{
6740 const char *name;
6741 rtx fallback_retval;
6742 rtx_insn *(*gen_fn) (rtx, rtx);
6743 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6744 {
6745 case BUILT_IN_GOACC_PARLEVEL_ID:
6746 name = "__builtin_goacc_parlevel_id";
6747 fallback_retval = const0_rtx;
6748 gen_fn = targetm.gen_oacc_dim_pos;
6749 break;
6750 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6751 name = "__builtin_goacc_parlevel_size";
6752 fallback_retval = const1_rtx;
6753 gen_fn = targetm.gen_oacc_dim_size;
6754 break;
6755 default:
6756 gcc_unreachable ();
6757 }
6758
6759 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6760 {
6761 error ("%qs only supported in OpenACC code", name);
6762 return const0_rtx;
6763 }
6764
6765 tree arg = CALL_EXPR_ARG (exp, 0);
6766 if (TREE_CODE (arg) != INTEGER_CST)
6767 {
6768 error ("non-constant argument 0 to %qs", name);
6769 return const0_rtx;
6770 }
6771
6772 int dim = TREE_INT_CST_LOW (arg);
6773 switch (dim)
6774 {
6775 case GOMP_DIM_GANG:
6776 case GOMP_DIM_WORKER:
6777 case GOMP_DIM_VECTOR:
6778 break;
6779 default:
6780 error ("illegal argument 0 to %qs", name);
6781 return const0_rtx;
6782 }
6783
6784 if (ignore)
6785 return target;
6786
39bc9f83
TV
6787 if (target == NULL_RTX)
6788 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6789
1f62d637
TV
6790 if (!targetm.have_oacc_dim_size ())
6791 {
6792 emit_move_insn (target, fallback_retval);
6793 return target;
6794 }
6795
6796 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6797 emit_insn (gen_fn (reg, GEN_INT (dim)));
6798 if (reg != target)
6799 emit_move_insn (target, reg);
6800
6801 return target;
6802}
41dbbb37 6803
10a0e2a9 6804/* Expand a string compare operation using a sequence of char comparison
b2272b13
QZ
6805 to get rid of the calling overhead, with result going to TARGET if
6806 that's convenient.
6807
6808 VAR_STR is the variable string source;
6809 CONST_STR is the constant string source;
6810 LENGTH is the number of chars to compare;
6811 CONST_STR_N indicates which source string is the constant string;
6812 IS_MEMCMP indicates whether it's a memcmp or strcmp.
10a0e2a9 6813
b2272b13
QZ
6814 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6815
523a59ff
QZ
6816 target = (int) (unsigned char) var_str[0]
6817 - (int) (unsigned char) const_str[0];
b2272b13
QZ
6818 if (target != 0)
6819 goto ne_label;
6820 ...
523a59ff
QZ
6821 target = (int) (unsigned char) var_str[length - 2]
6822 - (int) (unsigned char) const_str[length - 2];
b2272b13
QZ
6823 if (target != 0)
6824 goto ne_label;
523a59ff
QZ
6825 target = (int) (unsigned char) var_str[length - 1]
6826 - (int) (unsigned char) const_str[length - 1];
b2272b13
QZ
6827 ne_label:
6828 */
6829
6830static rtx
10a0e2a9 6831inline_string_cmp (rtx target, tree var_str, const char *const_str,
b2272b13 6832 unsigned HOST_WIDE_INT length,
523a59ff 6833 int const_str_n, machine_mode mode)
b2272b13
QZ
6834{
6835 HOST_WIDE_INT offset = 0;
10a0e2a9 6836 rtx var_rtx_array
b2272b13
QZ
6837 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6838 rtx var_rtx = NULL_RTX;
10a0e2a9
JJ
6839 rtx const_rtx = NULL_RTX;
6840 rtx result = target ? target : gen_reg_rtx (mode);
6841 rtx_code_label *ne_label = gen_label_rtx ();
523a59ff 6842 tree unit_type_node = unsigned_char_type_node;
10a0e2a9
JJ
6843 scalar_int_mode unit_mode
6844 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
b2272b13
QZ
6845
6846 start_sequence ();
6847
6848 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6849 {
10a0e2a9 6850 var_rtx
b2272b13 6851 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
10a0e2a9 6852 const_rtx = c_readstr (const_str + offset, unit_mode);
b2272b13
QZ
6853 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6854 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
10a0e2a9 6855
523a59ff
QZ
6856 op0 = convert_modes (mode, unit_mode, op0, 1);
6857 op1 = convert_modes (mode, unit_mode, op1, 1);
10a0e2a9 6858 result = expand_simple_binop (mode, MINUS, op0, op1,
523a59ff 6859 result, 1, OPTAB_WIDEN);
10a0e2a9
JJ
6860 if (i < length - 1)
6861 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6862 mode, true, ne_label);
6863 offset += GET_MODE_SIZE (unit_mode);
b2272b13
QZ
6864 }
6865
6866 emit_label (ne_label);
6867 rtx_insn *insns = get_insns ();
6868 end_sequence ();
6869 emit_insn (insns);
6870
6871 return result;
6872}
6873
10a0e2a9 6874/* Inline expansion a call to str(n)cmp, with result going to
b2272b13
QZ
6875 TARGET if that's convenient.
6876 If the call is not been inlined, return NULL_RTX. */
6877static rtx
523a59ff 6878inline_expand_builtin_string_cmp (tree exp, rtx target)
b2272b13
QZ
6879{
6880 tree fndecl = get_callee_fndecl (exp);
6881 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6882 unsigned HOST_WIDE_INT length = 0;
6883 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6884
3d592d2d
QZ
6885 /* Do NOT apply this inlining expansion when optimizing for size or
6886 optimization level below 2. */
6887 if (optimize < 2 || optimize_insn_for_size_p ())
6888 return NULL_RTX;
6889
b2272b13 6890 gcc_checking_assert (fcode == BUILT_IN_STRCMP
10a0e2a9 6891 || fcode == BUILT_IN_STRNCMP
b2272b13
QZ
6892 || fcode == BUILT_IN_MEMCMP);
6893
523a59ff
QZ
6894 /* On a target where the type of the call (int) has same or narrower presicion
6895 than unsigned char, give up the inlining expansion. */
6896 if (TYPE_PRECISION (unsigned_char_type_node)
6897 >= TYPE_PRECISION (TREE_TYPE (exp)))
6898 return NULL_RTX;
6899
b2272b13
QZ
6900 tree arg1 = CALL_EXPR_ARG (exp, 0);
6901 tree arg2 = CALL_EXPR_ARG (exp, 1);
6902 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6903
6904 unsigned HOST_WIDE_INT len1 = 0;
6905 unsigned HOST_WIDE_INT len2 = 0;
6906 unsigned HOST_WIDE_INT len3 = 0;
6907
6908 const char *src_str1 = c_getstr (arg1, &len1);
6909 const char *src_str2 = c_getstr (arg2, &len2);
10a0e2a9 6910
b2272b13
QZ
6911 /* If neither strings is constant string, the call is not qualify. */
6912 if (!src_str1 && !src_str2)
6913 return NULL_RTX;
6914
6915 /* For strncmp, if the length is not a const, not qualify. */
6916 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6917 return NULL_RTX;
6918
6919 int const_str_n = 0;
6920 if (!len1)
6921 const_str_n = 2;
6922 else if (!len2)
6923 const_str_n = 1;
6924 else if (len2 > len1)
6925 const_str_n = 1;
6926 else
6927 const_str_n = 2;
6928
6929 gcc_checking_assert (const_str_n > 0);
6930 length = (const_str_n == 1) ? len1 : len2;
6931
6932 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6933 length = len3;
6934
10a0e2a9 6935 /* If the length of the comparision is larger than the threshold,
b2272b13 6936 do nothing. */
10a0e2a9 6937 if (length > (unsigned HOST_WIDE_INT)
b2272b13
QZ
6938 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6939 return NULL_RTX;
6940
6941 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6942
6943 /* Now, start inline expansion the call. */
10a0e2a9 6944 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
b2272b13 6945 (const_str_n == 1) ? src_str1 : src_str2, length,
523a59ff 6946 const_str_n, mode);
b2272b13
QZ
6947}
6948
425fc685
RE
6949/* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6950 represents the size of the first argument to that call, or VOIDmode
6951 if the argument is a pointer. IGNORE will be true if the result
6952 isn't used. */
6953static rtx
6954expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6955 bool ignore)
6956{
6957 rtx val, failsafe;
6958 unsigned nargs = call_expr_nargs (exp);
6959
6960 tree arg0 = CALL_EXPR_ARG (exp, 0);
6961
6962 if (mode == VOIDmode)
6963 {
6964 mode = TYPE_MODE (TREE_TYPE (arg0));
6965 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6966 }
6967
6968 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6969
6970 /* An optional second argument can be used as a failsafe value on
6971 some machines. If it isn't present, then the failsafe value is
6972 assumed to be 0. */
6973 if (nargs > 1)
6974 {
6975 tree arg1 = CALL_EXPR_ARG (exp, 1);
6976 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6977 }
6978 else
6979 failsafe = const0_rtx;
6980
6981 /* If the result isn't used, the behavior is undefined. It would be
6982 nice to emit a warning here, but path splitting means this might
6983 happen with legitimate code. So simply drop the builtin
6984 expansion in that case; we've handled any side-effects above. */
6985 if (ignore)
6986 return const0_rtx;
6987
6988 /* If we don't have a suitable target, create one to hold the result. */
6989 if (target == NULL || GET_MODE (target) != mode)
6990 target = gen_reg_rtx (mode);
6991
6992 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6993 val = convert_modes (mode, VOIDmode, val, false);
6994
6995 return targetm.speculation_safe_value (mode, target, val, failsafe);
6996}
6997
28f4ec01
BS
6998/* Expand an expression EXP that calls a built-in function,
6999 with result going to TARGET if that's convenient
7000 (and in mode MODE if that's convenient).
7001 SUBTARGET may be used as the target for computing one of EXP's operands.
7002 IGNORE is nonzero if the value is to be ignored. */
7003
7004rtx
ef4bddc2 7005expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
4682ae04 7006 int ignore)
28f4ec01 7007{
2f503025 7008 tree fndecl = get_callee_fndecl (exp);
28f4ec01 7009 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
ef4bddc2 7010 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9e3920e9 7011 int flags;
28f4ec01 7012
d51151b2
JJ
7013 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7014 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7015
bdea98ca
MO
7016 /* When ASan is enabled, we don't want to expand some memory/string
7017 builtins and rely on libsanitizer's hooks. This allows us to avoid
7018 redundant checks and be sure, that possible overflow will be detected
7019 by ASan. */
7020
7021 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7022 return expand_call (exp, target, ignore);
7023
28f4ec01
BS
7024 /* When not optimizing, generate calls to library functions for a certain
7025 set of builtins. */
d25225de 7026 if (!optimize
48ae6c13 7027 && !called_as_built_in (fndecl)
63bf9a90
JH
7028 && fcode != BUILT_IN_FORK
7029 && fcode != BUILT_IN_EXECL
7030 && fcode != BUILT_IN_EXECV
7031 && fcode != BUILT_IN_EXECLP
7032 && fcode != BUILT_IN_EXECLE
7033 && fcode != BUILT_IN_EXECVP
7034 && fcode != BUILT_IN_EXECVE
9e878cf1 7035 && !ALLOCA_FUNCTION_CODE_P (fcode)
31db0fe0 7036 && fcode != BUILT_IN_FREE)
d25225de 7037 return expand_call (exp, target, ignore);
28f4ec01 7038
0a45ec5c
RS
7039 /* The built-in function expanders test for target == const0_rtx
7040 to determine whether the function's result will be ignored. */
7041 if (ignore)
7042 target = const0_rtx;
7043
7044 /* If the result of a pure or const built-in function is ignored, and
7045 none of its arguments are volatile, we can avoid expanding the
7046 built-in call and just evaluate the arguments for side-effects. */
7047 if (target == const0_rtx
9e3920e9
JJ
7048 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7049 && !(flags & ECF_LOOPING_CONST_OR_PURE))
0a45ec5c
RS
7050 {
7051 bool volatilep = false;
7052 tree arg;
5039610b 7053 call_expr_arg_iterator iter;
0a45ec5c 7054
5039610b
SL
7055 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7056 if (TREE_THIS_VOLATILE (arg))
0a45ec5c
RS
7057 {
7058 volatilep = true;
7059 break;
7060 }
7061
7062 if (! volatilep)
7063 {
5039610b
SL
7064 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7065 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a45ec5c
RS
7066 return const0_rtx;
7067 }
7068 }
7069
28f4ec01
BS
7070 switch (fcode)
7071 {
ea6a6627 7072 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 7073 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
7074 case BUILT_IN_FABSD32:
7075 case BUILT_IN_FABSD64:
7076 case BUILT_IN_FABSD128:
5039610b 7077 target = expand_builtin_fabs (exp, target, subtarget);
075ec276 7078 if (target)
c22cacf3 7079 return target;
075ec276
RS
7080 break;
7081
ea6a6627 7082 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6dc198e3 7083 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5039610b 7084 target = expand_builtin_copysign (exp, target, subtarget);
046625fa
RH
7085 if (target)
7086 return target;
7087 break;
7088
5906d013
EC
7089 /* Just do a normal library call if we were unable to fold
7090 the values. */
ea6a6627 7091 CASE_FLT_FN (BUILT_IN_CABS):
075ec276 7092 break;
28f4ec01 7093
1b1562a5 7094 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 7095 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
7096 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7097 if (target)
7098 return target;
7099 break;
7100
eaee4464
UB
7101 CASE_FLT_FN (BUILT_IN_ILOGB):
7102 if (! flag_unsafe_math_optimizations)
7103 break;
903c723b
TC
7104 gcc_fallthrough ();
7105 CASE_FLT_FN (BUILT_IN_ISINF):
7106 CASE_FLT_FN (BUILT_IN_FINITE):
7107 case BUILT_IN_ISFINITE:
7108 case BUILT_IN_ISNORMAL:
4359dc2a 7109 target = expand_builtin_interclass_mathfn (exp, target);
eaee4464
UB
7110 if (target)
7111 return target;
7112 break;
7113
6c32ee74 7114 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
7115 CASE_FLT_FN (BUILT_IN_LCEIL):
7116 CASE_FLT_FN (BUILT_IN_LLCEIL):
7117 CASE_FLT_FN (BUILT_IN_LFLOOR):
6c32ee74 7118 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627 7119 CASE_FLT_FN (BUILT_IN_LLFLOOR):
1856c8dc 7120 target = expand_builtin_int_roundingfn (exp, target);
d8b42d06
UB
7121 if (target)
7122 return target;
7123 break;
7124
6c32ee74 7125 CASE_FLT_FN (BUILT_IN_IRINT):
0bfa1541
RG
7126 CASE_FLT_FN (BUILT_IN_LRINT):
7127 CASE_FLT_FN (BUILT_IN_LLRINT):
6c32ee74 7128 CASE_FLT_FN (BUILT_IN_IROUND):
4d81bf84
RG
7129 CASE_FLT_FN (BUILT_IN_LROUND):
7130 CASE_FLT_FN (BUILT_IN_LLROUND):
1856c8dc 7131 target = expand_builtin_int_roundingfn_2 (exp, target);
0bfa1541
RG
7132 if (target)
7133 return target;
7134 break;
7135
ea6a6627 7136 CASE_FLT_FN (BUILT_IN_POWI):
4359dc2a 7137 target = expand_builtin_powi (exp, target);
17684d46
RG
7138 if (target)
7139 return target;
7140 break;
7141
75c7c595 7142 CASE_FLT_FN (BUILT_IN_CEXPI):
4359dc2a 7143 target = expand_builtin_cexpi (exp, target);
75c7c595
RG
7144 gcc_assert (target);
7145 return target;
7146
ea6a6627
VR
7147 CASE_FLT_FN (BUILT_IN_SIN):
7148 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
7149 if (! flag_unsafe_math_optimizations)
7150 break;
7151 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7152 if (target)
7153 return target;
7154 break;
7155
403e54f0
RG
7156 CASE_FLT_FN (BUILT_IN_SINCOS):
7157 if (! flag_unsafe_math_optimizations)
7158 break;
7159 target = expand_builtin_sincos (exp);
7160 if (target)
7161 return target;
7162 break;
7163
28f4ec01
BS
7164 case BUILT_IN_APPLY_ARGS:
7165 return expand_builtin_apply_args ();
7166
7167 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7168 FUNCTION with a copy of the parameters described by
7169 ARGUMENTS, and ARGSIZE. It returns a block of memory
7170 allocated on the stack into which is stored all the registers
7171 that might possibly be used for returning the result of a
7172 function. ARGUMENTS is the value returned by
7173 __builtin_apply_args. ARGSIZE is the number of bytes of
7174 arguments that must be copied. ??? How should this value be
7175 computed? We'll also need a safe worst case value for varargs
7176 functions. */
7177 case BUILT_IN_APPLY:
5039610b 7178 if (!validate_arglist (exp, POINTER_TYPE,
019fa094 7179 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5039610b 7180 && !validate_arglist (exp, REFERENCE_TYPE,
019fa094 7181 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01
BS
7182 return const0_rtx;
7183 else
7184 {
28f4ec01
BS
7185 rtx ops[3];
7186
5039610b
SL
7187 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7188 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7189 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
28f4ec01
BS
7190
7191 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7192 }
7193
7194 /* __builtin_return (RESULT) causes the function to return the
7195 value described by RESULT. RESULT is address of the block of
7196 memory returned by __builtin_apply. */
7197 case BUILT_IN_RETURN:
5039610b
SL
7198 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7199 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
28f4ec01
BS
7200 return const0_rtx;
7201
7202 case BUILT_IN_SAVEREGS:
d3707adb 7203 return expand_builtin_saveregs ();
28f4ec01 7204
6ef5231b
JJ
7205 case BUILT_IN_VA_ARG_PACK:
7206 /* All valid uses of __builtin_va_arg_pack () are removed during
7207 inlining. */
c94ed7a1 7208 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6ef5231b
JJ
7209 return const0_rtx;
7210
ab0e176c
JJ
7211 case BUILT_IN_VA_ARG_PACK_LEN:
7212 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7213 inlining. */
c94ed7a1 7214 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
ab0e176c
JJ
7215 return const0_rtx;
7216
28f4ec01
BS
7217 /* Return the address of the first anonymous stack arg. */
7218 case BUILT_IN_NEXT_ARG:
5039610b 7219 if (fold_builtin_next_arg (exp, false))
c22cacf3 7220 return const0_rtx;
8870e212 7221 return expand_builtin_next_arg ();
28f4ec01 7222
677feb77
DD
7223 case BUILT_IN_CLEAR_CACHE:
7224 target = expand_builtin___clear_cache (exp);
7225 if (target)
7226 return target;
7227 break;
7228
28f4ec01 7229 case BUILT_IN_CLASSIFY_TYPE:
5039610b 7230 return expand_builtin_classify_type (exp);
28f4ec01
BS
7231
7232 case BUILT_IN_CONSTANT_P:
6de9cd9a 7233 return const0_rtx;
28f4ec01
BS
7234
7235 case BUILT_IN_FRAME_ADDRESS:
7236 case BUILT_IN_RETURN_ADDRESS:
5039610b 7237 return expand_builtin_frame_address (fndecl, exp);
28f4ec01
BS
7238
7239 /* Returns the address of the area where the structure is returned.
7240 0 otherwise. */
7241 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5039610b 7242 if (call_expr_nargs (exp) != 0
ca7fd9cd 7243 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3c0cb5de 7244 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
ca7fd9cd 7245 return const0_rtx;
28f4ec01 7246 else
ca7fd9cd 7247 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
28f4ec01 7248
9e878cf1 7249 CASE_BUILT_IN_ALLOCA:
b7e52782 7250 target = expand_builtin_alloca (exp);
28f4ec01
BS
7251 if (target)
7252 return target;
7253 break;
7254
e3174bdf
MO
7255 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7256 return expand_asan_emit_allocas_unpoison (exp);
7257
6de9cd9a
DN
7258 case BUILT_IN_STACK_SAVE:
7259 return expand_stack_save ();
7260
7261 case BUILT_IN_STACK_RESTORE:
5039610b 7262 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6de9cd9a
DN
7263 return const0_rtx;
7264
ac868f29 7265 case BUILT_IN_BSWAP16:
167fa32c
EC
7266 case BUILT_IN_BSWAP32:
7267 case BUILT_IN_BSWAP64:
ac868f29 7268 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
167fa32c
EC
7269 if (target)
7270 return target;
7271 break;
7272
ea6a6627 7273 CASE_INT_FN (BUILT_IN_FFS):
5039610b 7274 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7275 subtarget, ffs_optab);
2928cd7a
RH
7276 if (target)
7277 return target;
7278 break;
7279
ea6a6627 7280 CASE_INT_FN (BUILT_IN_CLZ):
5039610b 7281 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7282 subtarget, clz_optab);
2928cd7a
RH
7283 if (target)
7284 return target;
7285 break;
7286
ea6a6627 7287 CASE_INT_FN (BUILT_IN_CTZ):
5039610b 7288 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7289 subtarget, ctz_optab);
2928cd7a
RH
7290 if (target)
7291 return target;
7292 break;
7293
3801c801 7294 CASE_INT_FN (BUILT_IN_CLRSB):
3801c801
BS
7295 target = expand_builtin_unop (target_mode, exp, target,
7296 subtarget, clrsb_optab);
7297 if (target)
7298 return target;
7299 break;
7300
ea6a6627 7301 CASE_INT_FN (BUILT_IN_POPCOUNT):
5039610b 7302 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7303 subtarget, popcount_optab);
2928cd7a
RH
7304 if (target)
7305 return target;
7306 break;
7307
ea6a6627 7308 CASE_INT_FN (BUILT_IN_PARITY):
5039610b 7309 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 7310 subtarget, parity_optab);
28f4ec01
BS
7311 if (target)
7312 return target;
7313 break;
7314
7315 case BUILT_IN_STRLEN:
5039610b 7316 target = expand_builtin_strlen (exp, target, target_mode);
28f4ec01
BS
7317 if (target)
7318 return target;
7319 break;
7320
781ff3d8
MS
7321 case BUILT_IN_STRNLEN:
7322 target = expand_builtin_strnlen (exp, target, target_mode);
7323 if (target)
7324 return target;
7325 break;
7326
ee92e7ba
MS
7327 case BUILT_IN_STRCAT:
7328 target = expand_builtin_strcat (exp, target);
7329 if (target)
7330 return target;
7331 break;
7332
28f4ec01 7333 case BUILT_IN_STRCPY:
44e10129 7334 target = expand_builtin_strcpy (exp, target);
28f4ec01
BS
7335 if (target)
7336 return target;
7337 break;
8d51ecf8 7338
ee92e7ba
MS
7339 case BUILT_IN_STRNCAT:
7340 target = expand_builtin_strncat (exp, target);
7341 if (target)
7342 return target;
7343 break;
7344
da9e9f08 7345 case BUILT_IN_STRNCPY:
44e10129 7346 target = expand_builtin_strncpy (exp, target);
da9e9f08
KG
7347 if (target)
7348 return target;
7349 break;
8d51ecf8 7350
9cb65f92 7351 case BUILT_IN_STPCPY:
609ae0e2 7352 target = expand_builtin_stpcpy (exp, target, mode);
9cb65f92
KG
7353 if (target)
7354 return target;
7355 break;
7356
e50d56a5
MS
7357 case BUILT_IN_STPNCPY:
7358 target = expand_builtin_stpncpy (exp, target);
7359 if (target)
7360 return target;
7361 break;
7362
d9c5a8b9
MS
7363 case BUILT_IN_MEMCHR:
7364 target = expand_builtin_memchr (exp, target);
7365 if (target)
7366 return target;
7367 break;
7368
28f4ec01 7369 case BUILT_IN_MEMCPY:
44e10129 7370 target = expand_builtin_memcpy (exp, target);
9cb65f92
KG
7371 if (target)
7372 return target;
7373 break;
7374
e50d56a5
MS
7375 case BUILT_IN_MEMMOVE:
7376 target = expand_builtin_memmove (exp, target);
7377 if (target)
7378 return target;
7379 break;
7380
9cb65f92 7381 case BUILT_IN_MEMPCPY:
671a00ee 7382 target = expand_builtin_mempcpy (exp, target);
28f4ec01
BS
7383 if (target)
7384 return target;
7385 break;
7386
7387 case BUILT_IN_MEMSET:
5039610b 7388 target = expand_builtin_memset (exp, target, mode);
28f4ec01
BS
7389 if (target)
7390 return target;
7391 break;
7392
e3a709be 7393 case BUILT_IN_BZERO:
8148fe65 7394 target = expand_builtin_bzero (exp);
e3a709be
KG
7395 if (target)
7396 return target;
7397 break;
7398
10a0e2a9 7399 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8b0b334a
QZ
7400 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7401 when changing it to a strcmp call. */
7402 case BUILT_IN_STRCMP_EQ:
7403 target = expand_builtin_memcmp (exp, target, true);
7404 if (target)
7405 return target;
7406
7407 /* Change this call back to a BUILT_IN_STRCMP. */
10a0e2a9 7408 TREE_OPERAND (exp, 1)
8b0b334a
QZ
7409 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7410
7411 /* Delete the last parameter. */
7412 unsigned int i;
7413 vec<tree, va_gc> *arg_vec;
7414 vec_alloc (arg_vec, 2);
7415 for (i = 0; i < 2; i++)
7416 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7417 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7418 /* FALLTHROUGH */
7419
28f4ec01 7420 case BUILT_IN_STRCMP:
44e10129 7421 target = expand_builtin_strcmp (exp, target);
28f4ec01
BS
7422 if (target)
7423 return target;
7424 break;
7425
8b0b334a
QZ
7426 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7427 back to a BUILT_IN_STRNCMP. */
7428 case BUILT_IN_STRNCMP_EQ:
7429 target = expand_builtin_memcmp (exp, target, true);
7430 if (target)
7431 return target;
7432
7433 /* Change it back to a BUILT_IN_STRNCMP. */
10a0e2a9 7434 TREE_OPERAND (exp, 1)
8b0b334a
QZ
7435 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7436 /* FALLTHROUGH */
7437
da9e9f08
KG
7438 case BUILT_IN_STRNCMP:
7439 target = expand_builtin_strncmp (exp, target, mode);
7440 if (target)
7441 return target;
7442 break;
7443
4b2a62db 7444 case BUILT_IN_BCMP:
28f4ec01 7445 case BUILT_IN_MEMCMP:
36b85e43
BS
7446 case BUILT_IN_MEMCMP_EQ:
7447 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
28f4ec01
BS
7448 if (target)
7449 return target;
36b85e43
BS
7450 if (fcode == BUILT_IN_MEMCMP_EQ)
7451 {
7452 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7453 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7454 }
28f4ec01 7455 break;
28f4ec01
BS
7456
7457 case BUILT_IN_SETJMP:
903c723b 7458 /* This should have been lowered to the builtins below. */
4f6c2131
EB
7459 gcc_unreachable ();
7460
7461 case BUILT_IN_SETJMP_SETUP:
7462 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7463 and the receiver label. */
5039610b 7464 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4f6c2131 7465 {
5039610b 7466 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4f6c2131 7467 VOIDmode, EXPAND_NORMAL);
5039610b 7468 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
e67d1102 7469 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
7470
7471 /* This is copied from the handling of non-local gotos. */
7472 expand_builtin_setjmp_setup (buf_addr, label_r);
7473 nonlocal_goto_handler_labels
b5241a5a 7474 = gen_rtx_INSN_LIST (VOIDmode, label_r,
4f6c2131
EB
7475 nonlocal_goto_handler_labels);
7476 /* ??? Do not let expand_label treat us as such since we would
7477 not want to be both on the list of non-local labels and on
7478 the list of forced labels. */
7479 FORCED_LABEL (label) = 0;
7480 return const0_rtx;
7481 }
7482 break;
7483
4f6c2131
EB
7484 case BUILT_IN_SETJMP_RECEIVER:
7485 /* __builtin_setjmp_receiver is passed the receiver label. */
5039610b 7486 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4f6c2131 7487 {
5039610b 7488 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
e67d1102 7489 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
7490
7491 expand_builtin_setjmp_receiver (label_r);
7492 return const0_rtx;
7493 }
250d07b6 7494 break;
28f4ec01
BS
7495
7496 /* __builtin_longjmp is passed a pointer to an array of five words.
7497 It's similar to the C library longjmp function but works with
7498 __builtin_setjmp above. */
7499 case BUILT_IN_LONGJMP:
5039610b 7500 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01 7501 {
5039610b 7502 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
84217346 7503 VOIDmode, EXPAND_NORMAL);
5039610b 7504 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
28f4ec01
BS
7505
7506 if (value != const1_rtx)
7507 {
9e637a26 7508 error ("%<__builtin_longjmp%> second argument must be 1");
28f4ec01
BS
7509 return const0_rtx;
7510 }
7511
7512 expand_builtin_longjmp (buf_addr, value);
7513 return const0_rtx;
7514 }
4f6c2131 7515 break;
28f4ec01 7516
6de9cd9a 7517 case BUILT_IN_NONLOCAL_GOTO:
5039610b 7518 target = expand_builtin_nonlocal_goto (exp);
6de9cd9a
DN
7519 if (target)
7520 return target;
7521 break;
7522
2b92e7f5
RK
7523 /* This updates the setjmp buffer that is its argument with the value
7524 of the current stack pointer. */
7525 case BUILT_IN_UPDATE_SETJMP_BUF:
5039610b 7526 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2b92e7f5
RK
7527 {
7528 rtx buf_addr
5039610b 7529 = expand_normal (CALL_EXPR_ARG (exp, 0));
2b92e7f5
RK
7530
7531 expand_builtin_update_setjmp_buf (buf_addr);
7532 return const0_rtx;
7533 }
7534 break;
7535
28f4ec01 7536 case BUILT_IN_TRAP:
9602f5a0 7537 expand_builtin_trap ();
28f4ec01
BS
7538 return const0_rtx;
7539
468059bc
DD
7540 case BUILT_IN_UNREACHABLE:
7541 expand_builtin_unreachable ();
7542 return const0_rtx;
7543
ea6a6627 7544 CASE_FLT_FN (BUILT_IN_SIGNBIT):
44aea9ac
JJ
7545 case BUILT_IN_SIGNBITD32:
7546 case BUILT_IN_SIGNBITD64:
7547 case BUILT_IN_SIGNBITD128:
ef79730c
RS
7548 target = expand_builtin_signbit (exp, target);
7549 if (target)
7550 return target;
7551 break;
7552
28f4ec01
BS
7553 /* Various hooks for the DWARF 2 __throw routine. */
7554 case BUILT_IN_UNWIND_INIT:
7555 expand_builtin_unwind_init ();
7556 return const0_rtx;
7557 case BUILT_IN_DWARF_CFA:
7558 return virtual_cfa_rtx;
7559#ifdef DWARF2_UNWIND_INFO
9c80ff25
RH
7560 case BUILT_IN_DWARF_SP_COLUMN:
7561 return expand_builtin_dwarf_sp_column ();
d9d5c9de 7562 case BUILT_IN_INIT_DWARF_REG_SIZES:
5039610b 7563 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
d9d5c9de 7564 return const0_rtx;
28f4ec01
BS
7565#endif
7566 case BUILT_IN_FROB_RETURN_ADDR:
5039610b 7567 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 7568 case BUILT_IN_EXTRACT_RETURN_ADDR:
5039610b 7569 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 7570 case BUILT_IN_EH_RETURN:
5039610b
SL
7571 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7572 CALL_EXPR_ARG (exp, 1));
28f4ec01 7573 return const0_rtx;
52a11cbf 7574 case BUILT_IN_EH_RETURN_DATA_REGNO:
5039610b 7575 return expand_builtin_eh_return_data_regno (exp);
c76362b4 7576 case BUILT_IN_EXTEND_POINTER:
5039610b 7577 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
1d65f45c
RH
7578 case BUILT_IN_EH_POINTER:
7579 return expand_builtin_eh_pointer (exp);
7580 case BUILT_IN_EH_FILTER:
7581 return expand_builtin_eh_filter (exp);
7582 case BUILT_IN_EH_COPY_VALUES:
7583 return expand_builtin_eh_copy_values (exp);
c76362b4 7584
6c535c69 7585 case BUILT_IN_VA_START:
5039610b 7586 return expand_builtin_va_start (exp);
d3707adb 7587 case BUILT_IN_VA_END:
5039610b 7588 return expand_builtin_va_end (exp);
d3707adb 7589 case BUILT_IN_VA_COPY:
5039610b 7590 return expand_builtin_va_copy (exp);
994a57cd 7591 case BUILT_IN_EXPECT:
5039610b 7592 return expand_builtin_expect (exp, target);
1e9168b2
ML
7593 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7594 return expand_builtin_expect_with_probability (exp, target);
45d439ac
JJ
7595 case BUILT_IN_ASSUME_ALIGNED:
7596 return expand_builtin_assume_aligned (exp, target);
a9ccbb60 7597 case BUILT_IN_PREFETCH:
5039610b 7598 expand_builtin_prefetch (exp);
a9ccbb60
JJ
7599 return const0_rtx;
7600
6de9cd9a 7601 case BUILT_IN_INIT_TRAMPOLINE:
183dd130
ILT
7602 return expand_builtin_init_trampoline (exp, true);
7603 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7604 return expand_builtin_init_trampoline (exp, false);
6de9cd9a 7605 case BUILT_IN_ADJUST_TRAMPOLINE:
5039610b 7606 return expand_builtin_adjust_trampoline (exp);
6de9cd9a 7607
4c640e26
EB
7608 case BUILT_IN_INIT_DESCRIPTOR:
7609 return expand_builtin_init_descriptor (exp);
7610 case BUILT_IN_ADJUST_DESCRIPTOR:
7611 return expand_builtin_adjust_descriptor (exp);
7612
d1c38823
ZD
7613 case BUILT_IN_FORK:
7614 case BUILT_IN_EXECL:
7615 case BUILT_IN_EXECV:
7616 case BUILT_IN_EXECLP:
7617 case BUILT_IN_EXECLE:
7618 case BUILT_IN_EXECVP:
7619 case BUILT_IN_EXECVE:
5039610b 7620 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
d1c38823
ZD
7621 if (target)
7622 return target;
7623 break;
28f4ec01 7624
e0a8ecf2
AM
7625 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7626 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7627 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7628 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7629 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7630 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
86951993 7631 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
48ae6c13
RH
7632 if (target)
7633 return target;
7634 break;
7635
e0a8ecf2
AM
7636 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7637 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7638 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7639 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7640 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7641 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
86951993 7642 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
48ae6c13
RH
7643 if (target)
7644 return target;
7645 break;
7646
e0a8ecf2
AM
7647 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7648 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7649 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7650 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7651 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7652 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
86951993 7653 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
48ae6c13
RH
7654 if (target)
7655 return target;
7656 break;
7657
e0a8ecf2
AM
7658 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7659 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7660 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7661 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7662 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7663 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
86951993 7664 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
48ae6c13
RH
7665 if (target)
7666 return target;
7667 break;
7668
e0a8ecf2
AM
7669 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7670 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7671 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7672 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7673 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7674 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
86951993 7675 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
48ae6c13
RH
7676 if (target)
7677 return target;
7678 break;
7679
e0a8ecf2
AM
7680 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7681 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7682 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7683 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7684 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7685 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
86951993 7686 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
48ae6c13
RH
7687 if (target)
7688 return target;
7689 break;
7690
e0a8ecf2
AM
7691 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7692 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7693 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7694 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7695 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7696 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
86951993 7697 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
48ae6c13
RH
7698 if (target)
7699 return target;
7700 break;
7701
e0a8ecf2
AM
7702 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7703 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7704 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7705 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7706 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7707 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
86951993 7708 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
48ae6c13
RH
7709 if (target)
7710 return target;
7711 break;
7712
e0a8ecf2
AM
7713 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7714 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7715 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7716 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7717 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7718 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
86951993 7719 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
48ae6c13
RH
7720 if (target)
7721 return target;
7722 break;
7723
e0a8ecf2
AM
7724 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7725 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7726 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7727 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7728 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7729 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
86951993 7730 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
48ae6c13
RH
7731 if (target)
7732 return target;
7733 break;
7734
e0a8ecf2
AM
7735 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7736 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7737 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7738 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7739 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7740 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
86951993 7741 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
48ae6c13
RH
7742 if (target)
7743 return target;
7744 break;
7745
e0a8ecf2
AM
7746 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7747 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7748 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7749 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7750 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7751 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
86951993 7752 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
48ae6c13
RH
7753 if (target)
7754 return target;
7755 break;
7756
e0a8ecf2
AM
7757 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7758 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7759 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7760 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7761 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
5b5513d0
RH
7762 if (mode == VOIDmode)
7763 mode = TYPE_MODE (boolean_type_node);
48ae6c13
RH
7764 if (!target || !register_operand (target, mode))
7765 target = gen_reg_rtx (mode);
02ee605c 7766
e0a8ecf2
AM
7767 mode = get_builtin_sync_mode
7768 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
5039610b 7769 target = expand_builtin_compare_and_swap (mode, exp, true, target);
48ae6c13
RH
7770 if (target)
7771 return target;
7772 break;
7773
e0a8ecf2
AM
7774 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7775 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7776 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7777 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7778 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7779 mode = get_builtin_sync_mode
7780 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
5039610b 7781 target = expand_builtin_compare_and_swap (mode, exp, false, target);
48ae6c13
RH
7782 if (target)
7783 return target;
7784 break;
7785
e0a8ecf2
AM
7786 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7787 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7788 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7789 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7790 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7791 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7792 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
48ae6c13
RH
7793 if (target)
7794 return target;
7795 break;
7796
e0a8ecf2
AM
7797 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7798 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7799 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7800 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7801 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7802 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7803 expand_builtin_sync_lock_release (mode, exp);
48ae6c13
RH
7804 return const0_rtx;
7805
e0a8ecf2
AM
7806 case BUILT_IN_SYNC_SYNCHRONIZE:
7807 expand_builtin_sync_synchronize ();
48ae6c13
RH
7808 return const0_rtx;
7809
86951993
AM
7810 case BUILT_IN_ATOMIC_EXCHANGE_1:
7811 case BUILT_IN_ATOMIC_EXCHANGE_2:
7812 case BUILT_IN_ATOMIC_EXCHANGE_4:
7813 case BUILT_IN_ATOMIC_EXCHANGE_8:
7814 case BUILT_IN_ATOMIC_EXCHANGE_16:
7815 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7816 target = expand_builtin_atomic_exchange (mode, exp, target);
7817 if (target)
7818 return target;
7819 break;
7820
7821 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7822 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7823 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7824 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7825 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
e351ae85
AM
7826 {
7827 unsigned int nargs, z;
9771b263 7828 vec<tree, va_gc> *vec;
e351ae85
AM
7829
7830 mode =
7831 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7832 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7833 if (target)
7834 return target;
7835
7836 /* If this is turned into an external library call, the weak parameter
7837 must be dropped to match the expected parameter list. */
7838 nargs = call_expr_nargs (exp);
9771b263 7839 vec_alloc (vec, nargs - 1);
e351ae85 7840 for (z = 0; z < 3; z++)
9771b263 7841 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
7842 /* Skip the boolean weak parameter. */
7843 for (z = 4; z < 6; z++)
9771b263 7844 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
7845 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7846 break;
7847 }
86951993
AM
7848
7849 case BUILT_IN_ATOMIC_LOAD_1:
7850 case BUILT_IN_ATOMIC_LOAD_2:
7851 case BUILT_IN_ATOMIC_LOAD_4:
7852 case BUILT_IN_ATOMIC_LOAD_8:
7853 case BUILT_IN_ATOMIC_LOAD_16:
7854 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7855 target = expand_builtin_atomic_load (mode, exp, target);
7856 if (target)
7857 return target;
7858 break;
7859
7860 case BUILT_IN_ATOMIC_STORE_1:
7861 case BUILT_IN_ATOMIC_STORE_2:
7862 case BUILT_IN_ATOMIC_STORE_4:
7863 case BUILT_IN_ATOMIC_STORE_8:
7864 case BUILT_IN_ATOMIC_STORE_16:
7865 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7866 target = expand_builtin_atomic_store (mode, exp);
7867 if (target)
7868 return const0_rtx;
7869 break;
7870
7871 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7872 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7873 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7874 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7875 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7876 {
7877 enum built_in_function lib;
7878 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7879 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7880 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7881 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7882 ignore, lib);
7883 if (target)
7884 return target;
7885 break;
7886 }
7887 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7888 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7889 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7890 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7891 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7892 {
7893 enum built_in_function lib;
7894 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7895 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7896 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7897 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7898 ignore, lib);
7899 if (target)
7900 return target;
7901 break;
7902 }
7903 case BUILT_IN_ATOMIC_AND_FETCH_1:
7904 case BUILT_IN_ATOMIC_AND_FETCH_2:
7905 case BUILT_IN_ATOMIC_AND_FETCH_4:
7906 case BUILT_IN_ATOMIC_AND_FETCH_8:
7907 case BUILT_IN_ATOMIC_AND_FETCH_16:
7908 {
7909 enum built_in_function lib;
7910 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7911 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7912 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7913 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7914 ignore, lib);
7915 if (target)
7916 return target;
7917 break;
7918 }
7919 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7920 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7921 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7922 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7923 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7924 {
7925 enum built_in_function lib;
7926 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7927 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7928 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7929 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7930 ignore, lib);
7931 if (target)
7932 return target;
7933 break;
7934 }
7935 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7936 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7937 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7938 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7939 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7940 {
7941 enum built_in_function lib;
7942 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7943 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7944 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7945 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7946 ignore, lib);
7947 if (target)
7948 return target;
7949 break;
7950 }
7951 case BUILT_IN_ATOMIC_OR_FETCH_1:
7952 case BUILT_IN_ATOMIC_OR_FETCH_2:
7953 case BUILT_IN_ATOMIC_OR_FETCH_4:
7954 case BUILT_IN_ATOMIC_OR_FETCH_8:
7955 case BUILT_IN_ATOMIC_OR_FETCH_16:
7956 {
7957 enum built_in_function lib;
7958 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7959 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7960 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7961 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7962 ignore, lib);
7963 if (target)
7964 return target;
7965 break;
7966 }
7967 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7968 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7969 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7970 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7971 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7972 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7973 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7974 ignore, BUILT_IN_NONE);
7975 if (target)
7976 return target;
7977 break;
7978
7979 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7980 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7981 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7982 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7983 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7984 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7985 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7986 ignore, BUILT_IN_NONE);
7987 if (target)
7988 return target;
7989 break;
7990
7991 case BUILT_IN_ATOMIC_FETCH_AND_1:
7992 case BUILT_IN_ATOMIC_FETCH_AND_2:
7993 case BUILT_IN_ATOMIC_FETCH_AND_4:
7994 case BUILT_IN_ATOMIC_FETCH_AND_8:
7995 case BUILT_IN_ATOMIC_FETCH_AND_16:
7996 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7997 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7998 ignore, BUILT_IN_NONE);
7999 if (target)
8000 return target;
8001 break;
8002
8003 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8004 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8005 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8006 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8007 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8008 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8009 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8010 ignore, BUILT_IN_NONE);
8011 if (target)
8012 return target;
8013 break;
8014
8015 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8016 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8017 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8018 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8019 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8020 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8021 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8022 ignore, BUILT_IN_NONE);
8023 if (target)
8024 return target;
8025 break;
8026
8027 case BUILT_IN_ATOMIC_FETCH_OR_1:
8028 case BUILT_IN_ATOMIC_FETCH_OR_2:
8029 case BUILT_IN_ATOMIC_FETCH_OR_4:
8030 case BUILT_IN_ATOMIC_FETCH_OR_8:
8031 case BUILT_IN_ATOMIC_FETCH_OR_16:
8032 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8033 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8034 ignore, BUILT_IN_NONE);
8035 if (target)
8036 return target;
8037 break;
d660c35e
AM
8038
8039 case BUILT_IN_ATOMIC_TEST_AND_SET:
744accb2 8040 return expand_builtin_atomic_test_and_set (exp, target);
d660c35e
AM
8041
8042 case BUILT_IN_ATOMIC_CLEAR:
8043 return expand_builtin_atomic_clear (exp);
86951993
AM
8044
8045 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8046 return expand_builtin_atomic_always_lock_free (exp);
8047
8048 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8049 target = expand_builtin_atomic_is_lock_free (exp);
8050 if (target)
8051 return target;
8052 break;
8053
8054 case BUILT_IN_ATOMIC_THREAD_FENCE:
8055 expand_builtin_atomic_thread_fence (exp);
8056 return const0_rtx;
8057
8058 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8059 expand_builtin_atomic_signal_fence (exp);
8060 return const0_rtx;
8061
10a0d495
JJ
8062 case BUILT_IN_OBJECT_SIZE:
8063 return expand_builtin_object_size (exp);
8064
8065 case BUILT_IN_MEMCPY_CHK:
8066 case BUILT_IN_MEMPCPY_CHK:
8067 case BUILT_IN_MEMMOVE_CHK:
8068 case BUILT_IN_MEMSET_CHK:
8069 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8070 if (target)
8071 return target;
8072 break;
8073
8074 case BUILT_IN_STRCPY_CHK:
8075 case BUILT_IN_STPCPY_CHK:
8076 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 8077 case BUILT_IN_STPNCPY_CHK:
10a0d495 8078 case BUILT_IN_STRCAT_CHK:
1c2fc017 8079 case BUILT_IN_STRNCAT_CHK:
10a0d495
JJ
8080 case BUILT_IN_SNPRINTF_CHK:
8081 case BUILT_IN_VSNPRINTF_CHK:
8082 maybe_emit_chk_warning (exp, fcode);
8083 break;
8084
8085 case BUILT_IN_SPRINTF_CHK:
8086 case BUILT_IN_VSPRINTF_CHK:
8087 maybe_emit_sprintf_chk_warning (exp, fcode);
8088 break;
8089
f9555f40 8090 case BUILT_IN_FREE:
a3a704a4
MH
8091 if (warn_free_nonheap_object)
8092 maybe_emit_free_warning (exp);
f9555f40
JJ
8093 break;
8094
f959607b
CLT
8095 case BUILT_IN_THREAD_POINTER:
8096 return expand_builtin_thread_pointer (exp, target);
8097
8098 case BUILT_IN_SET_THREAD_POINTER:
8099 expand_builtin_set_thread_pointer (exp);
8100 return const0_rtx;
8101
41dbbb37 8102 case BUILT_IN_ACC_ON_DEVICE:
164453bb
NS
8103 /* Do library call, if we failed to expand the builtin when
8104 folding. */
41dbbb37
TS
8105 break;
8106
1f62d637
TV
8107 case BUILT_IN_GOACC_PARLEVEL_ID:
8108 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8109 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8110
425fc685
RE
8111 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8112 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8113
8114 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8115 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8116 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8117 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8118 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8119 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8120 return expand_speculation_safe_value (mode, exp, target, ignore);
8121
e62f4abc 8122 default: /* just do library call, if unknown builtin */
84b8b0e0 8123 break;
28f4ec01
BS
8124 }
8125
8126 /* The switch statement above can drop through to cause the function
8127 to be called normally. */
8128 return expand_call (exp, target, ignore);
8129}
b0b3afb2 8130
4977bab6 8131/* Determine whether a tree node represents a call to a built-in
feda1845
RS
8132 function. If the tree T is a call to a built-in function with
8133 the right number of arguments of the appropriate types, return
8134 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8135 Otherwise the return value is END_BUILTINS. */
4682ae04 8136
4977bab6 8137enum built_in_function
fa233e34 8138builtin_mathfn_code (const_tree t)
4977bab6 8139{
fa233e34
KG
8140 const_tree fndecl, arg, parmlist;
8141 const_tree argtype, parmtype;
8142 const_call_expr_arg_iterator iter;
4977bab6 8143
5f92d109 8144 if (TREE_CODE (t) != CALL_EXPR)
4977bab6
ZW
8145 return END_BUILTINS;
8146
2f503025 8147 fndecl = get_callee_fndecl (t);
3d78e008
ML
8148 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8149 return END_BUILTINS;
4977bab6 8150
feda1845 8151 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
fa233e34 8152 init_const_call_expr_arg_iterator (t, &iter);
feda1845 8153 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
c0a47a61 8154 {
feda1845
RS
8155 /* If a function doesn't take a variable number of arguments,
8156 the last element in the list will have type `void'. */
8157 parmtype = TREE_VALUE (parmlist);
8158 if (VOID_TYPE_P (parmtype))
8159 {
fa233e34 8160 if (more_const_call_expr_args_p (&iter))
feda1845
RS
8161 return END_BUILTINS;
8162 return DECL_FUNCTION_CODE (fndecl);
8163 }
8164
fa233e34 8165 if (! more_const_call_expr_args_p (&iter))
c0a47a61 8166 return END_BUILTINS;
b8698a0f 8167
fa233e34 8168 arg = next_const_call_expr_arg (&iter);
5039610b 8169 argtype = TREE_TYPE (arg);
feda1845
RS
8170
8171 if (SCALAR_FLOAT_TYPE_P (parmtype))
8172 {
8173 if (! SCALAR_FLOAT_TYPE_P (argtype))
8174 return END_BUILTINS;
8175 }
8176 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8177 {
8178 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8179 return END_BUILTINS;
8180 }
8181 else if (POINTER_TYPE_P (parmtype))
8182 {
8183 if (! POINTER_TYPE_P (argtype))
8184 return END_BUILTINS;
8185 }
8186 else if (INTEGRAL_TYPE_P (parmtype))
8187 {
8188 if (! INTEGRAL_TYPE_P (argtype))
8189 return END_BUILTINS;
8190 }
8191 else
c0a47a61 8192 return END_BUILTINS;
c0a47a61
RS
8193 }
8194
feda1845 8195 /* Variable-length argument list. */
4977bab6
ZW
8196 return DECL_FUNCTION_CODE (fndecl);
8197}
8198
5039610b
SL
8199/* Fold a call to __builtin_constant_p, if we know its argument ARG will
8200 evaluate to a constant. */
b0b3afb2
BS
8201
8202static tree
5039610b 8203fold_builtin_constant_p (tree arg)
b0b3afb2 8204{
b0b3afb2
BS
8205 /* We return 1 for a numeric type that's known to be a constant
8206 value at compile-time or for an aggregate type that's a
8207 literal constant. */
5039610b 8208 STRIP_NOPS (arg);
b0b3afb2
BS
8209
8210 /* If we know this is a constant, emit the constant of one. */
5039610b
SL
8211 if (CONSTANT_CLASS_P (arg)
8212 || (TREE_CODE (arg) == CONSTRUCTOR
8213 && TREE_CONSTANT (arg)))
b0b3afb2 8214 return integer_one_node;
5039610b 8215 if (TREE_CODE (arg) == ADDR_EXPR)
fb664a2c 8216 {
5039610b 8217 tree op = TREE_OPERAND (arg, 0);
fb664a2c
RG
8218 if (TREE_CODE (op) == STRING_CST
8219 || (TREE_CODE (op) == ARRAY_REF
8220 && integer_zerop (TREE_OPERAND (op, 1))
8221 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8222 return integer_one_node;
8223 }
b0b3afb2 8224
0dcd3840
RH
8225 /* If this expression has side effects, show we don't know it to be a
8226 constant. Likewise if it's a pointer or aggregate type since in
8227 those case we only want literals, since those are only optimized
13104975
ZW
8228 when generating RTL, not later.
8229 And finally, if we are compiling an initializer, not code, we
8230 need to return a definite result now; there's not going to be any
8231 more optimization done. */
5039610b
SL
8232 if (TREE_SIDE_EFFECTS (arg)
8233 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8234 || POINTER_TYPE_P (TREE_TYPE (arg))
63b48197 8235 || cfun == 0
4e7d7b3d
JJ
8236 || folding_initializer
8237 || force_folding_builtin_constant_p)
b0b3afb2
BS
8238 return integer_zero_node;
8239
5039610b 8240 return NULL_TREE;
b0b3afb2
BS
8241}
8242
1e9168b2
ML
8243/* Create builtin_expect or builtin_expect_with_probability
8244 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8245 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8246 builtin_expect_with_probability instead uses third argument as PROBABILITY
8247 value. */
6de9cd9a
DN
8248
8249static tree
ed9c79e1 8250build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
1e9168b2 8251 tree predictor, tree probability)
6de9cd9a 8252{
419ce103 8253 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6de9cd9a 8254
1e9168b2
ML
8255 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8256 : BUILT_IN_EXPECT_WITH_PROBABILITY);
419ce103
AN
8257 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8258 ret_type = TREE_TYPE (TREE_TYPE (fn));
8259 pred_type = TREE_VALUE (arg_types);
8260 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8261
db3927fb
AH
8262 pred = fold_convert_loc (loc, pred_type, pred);
8263 expected = fold_convert_loc (loc, expected_type, expected);
1e9168b2
ML
8264
8265 if (probability)
8266 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8267 else
8268 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8269 predictor);
419ce103
AN
8270
8271 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8272 build_int_cst (ret_type, 0));
8273}
8274
1e9168b2 8275/* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
419ce103
AN
8276 NULL_TREE if no simplification is possible. */
8277
ed9c79e1 8278tree
1e9168b2
ML
8279fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8280 tree arg3)
419ce103 8281{
be31603a 8282 tree inner, fndecl, inner_arg0;
419ce103
AN
8283 enum tree_code code;
8284
be31603a
KT
8285 /* Distribute the expected value over short-circuiting operators.
8286 See through the cast from truthvalue_type_node to long. */
8287 inner_arg0 = arg0;
625a9766 8288 while (CONVERT_EXPR_P (inner_arg0)
be31603a
KT
8289 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8290 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8291 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8292
419ce103
AN
8293 /* If this is a builtin_expect within a builtin_expect keep the
8294 inner one. See through a comparison against a constant. It
8295 might have been added to create a thruthvalue. */
be31603a
KT
8296 inner = inner_arg0;
8297
419ce103
AN
8298 if (COMPARISON_CLASS_P (inner)
8299 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8300 inner = TREE_OPERAND (inner, 0);
8301
8302 if (TREE_CODE (inner) == CALL_EXPR
8303 && (fndecl = get_callee_fndecl (inner))
3d78e008
ML
8304 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8305 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
419ce103
AN
8306 return arg0;
8307
be31603a 8308 inner = inner_arg0;
419ce103
AN
8309 code = TREE_CODE (inner);
8310 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8311 {
8312 tree op0 = TREE_OPERAND (inner, 0);
8313 tree op1 = TREE_OPERAND (inner, 1);
0d2f7959 8314 arg1 = save_expr (arg1);
419ce103 8315
1e9168b2
ML
8316 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8317 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
419ce103
AN
8318 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8319
db3927fb 8320 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
419ce103
AN
8321 }
8322
8323 /* If the argument isn't invariant then there's nothing else we can do. */
be31603a 8324 if (!TREE_CONSTANT (inner_arg0))
5039610b 8325 return NULL_TREE;
6de9cd9a 8326
419ce103
AN
8327 /* If we expect that a comparison against the argument will fold to
8328 a constant return the constant. In practice, this means a true
8329 constant or the address of a non-weak symbol. */
be31603a 8330 inner = inner_arg0;
6de9cd9a
DN
8331 STRIP_NOPS (inner);
8332 if (TREE_CODE (inner) == ADDR_EXPR)
8333 {
8334 do
8335 {
8336 inner = TREE_OPERAND (inner, 0);
8337 }
8338 while (TREE_CODE (inner) == COMPONENT_REF
8339 || TREE_CODE (inner) == ARRAY_REF);
8813a647 8340 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
5039610b 8341 return NULL_TREE;
6de9cd9a
DN
8342 }
8343
419ce103
AN
8344 /* Otherwise, ARG0 already has the proper type for the return value. */
8345 return arg0;
6de9cd9a
DN
8346}
8347
5039610b 8348/* Fold a call to __builtin_classify_type with argument ARG. */
5197bd50 8349
ad82abb8 8350static tree
5039610b 8351fold_builtin_classify_type (tree arg)
ad82abb8 8352{
5039610b 8353 if (arg == 0)
45a2c477 8354 return build_int_cst (integer_type_node, no_type_class);
ad82abb8 8355
45a2c477 8356 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
ad82abb8
ZW
8357}
8358
5039610b 8359/* Fold a call to __builtin_strlen with argument ARG. */
667bbbbb
EC
8360
8361static tree
ab996409 8362fold_builtin_strlen (location_t loc, tree type, tree arg)
667bbbbb 8363{
5039610b 8364 if (!validate_arg (arg, POINTER_TYPE))
667bbbbb
EC
8365 return NULL_TREE;
8366 else
8367 {
5039610b 8368 tree len = c_strlen (arg, 0);
667bbbbb
EC
8369
8370 if (len)
ab996409 8371 return fold_convert_loc (loc, type, len);
667bbbbb
EC
8372
8373 return NULL_TREE;
8374 }
8375}
8376
ab5e2615
RH
8377/* Fold a call to __builtin_inf or __builtin_huge_val. */
8378
8379static tree
db3927fb 8380fold_builtin_inf (location_t loc, tree type, int warn)
ab5e2615 8381{
efdc7e19
RH
8382 REAL_VALUE_TYPE real;
8383
6d84156b
JM
8384 /* __builtin_inff is intended to be usable to define INFINITY on all
8385 targets. If an infinity is not available, INFINITY expands "to a
8386 positive constant of type float that overflows at translation
8387 time", footnote "In this case, using INFINITY will violate the
8388 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8389 Thus we pedwarn to ensure this constraint violation is
8390 diagnosed. */
ab5e2615 8391 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
db3927fb 8392 pedwarn (loc, 0, "target format does not support infinity");
ab5e2615 8393
efdc7e19
RH
8394 real_inf (&real);
8395 return build_real (type, real);
ab5e2615
RH
8396}
8397
75c7c595
RG
8398/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8399 NULL_TREE if no simplification can be made. */
8400
8401static tree
db3927fb
AH
8402fold_builtin_sincos (location_t loc,
8403 tree arg0, tree arg1, tree arg2)
75c7c595 8404{
5039610b 8405 tree type;
5c1a2e63 8406 tree fndecl, call = NULL_TREE;
75c7c595 8407
5039610b
SL
8408 if (!validate_arg (arg0, REAL_TYPE)
8409 || !validate_arg (arg1, POINTER_TYPE)
8410 || !validate_arg (arg2, POINTER_TYPE))
75c7c595
RG
8411 return NULL_TREE;
8412
75c7c595 8413 type = TREE_TYPE (arg0);
75c7c595
RG
8414
8415 /* Calculate the result when the argument is a constant. */
b03ff92e 8416 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
5c1a2e63 8417 if (fn == END_BUILTINS)
75c7c595
RG
8418 return NULL_TREE;
8419
5c1a2e63
RS
8420 /* Canonicalize sincos to cexpi. */
8421 if (TREE_CODE (arg0) == REAL_CST)
8422 {
8423 tree complex_type = build_complex_type (type);
d7ebef06 8424 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
5c1a2e63
RS
8425 }
8426 if (!call)
8427 {
8428 if (!targetm.libc_has_function (function_c99_math_complex)
8429 || !builtin_decl_implicit_p (fn))
8430 return NULL_TREE;
8431 fndecl = builtin_decl_explicit (fn);
8432 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8433 call = builtin_save_expr (call);
8434 }
75c7c595 8435
1b17b994
RB
8436 tree ptype = build_pointer_type (type);
8437 arg1 = fold_convert (ptype, arg1);
8438 arg2 = fold_convert (ptype, arg2);
928c19bb 8439 return build2 (COMPOUND_EXPR, void_type_node,
75c7c595 8440 build2 (MODIFY_EXPR, void_type_node,
db3927fb 8441 build_fold_indirect_ref_loc (loc, arg1),
5c1a2e63 8442 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
75c7c595 8443 build2 (MODIFY_EXPR, void_type_node,
db3927fb 8444 build_fold_indirect_ref_loc (loc, arg2),
5c1a2e63 8445 fold_build1_loc (loc, REALPART_EXPR, type, call)));
75c7c595
RG
8446}
8447
5039610b
SL
8448/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8449 Return NULL_TREE if no simplification can be made. */
5bb650ec
RS
8450
8451static tree
db3927fb 8452fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
5bb650ec 8453{
5039610b
SL
8454 if (!validate_arg (arg1, POINTER_TYPE)
8455 || !validate_arg (arg2, POINTER_TYPE)
8456 || !validate_arg (len, INTEGER_TYPE))
8457 return NULL_TREE;
5bb650ec
RS
8458
8459 /* If the LEN parameter is zero, return zero. */
8460 if (integer_zerop (len))
db3927fb 8461 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
01847e9d 8462 arg1, arg2);
5bb650ec
RS
8463
8464 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8465 if (operand_equal_p (arg1, arg2, 0))
db3927fb 8466 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
01847e9d 8467
01847e9d
RS
8468 /* If len parameter is one, return an expression corresponding to
8469 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
ae7e9ddd 8470 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
01847e9d
RS
8471 {
8472 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
8473 tree cst_uchar_ptr_node
8474 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8475
db3927fb
AH
8476 tree ind1
8477 = fold_convert_loc (loc, integer_type_node,
8478 build1 (INDIRECT_REF, cst_uchar_node,
8479 fold_convert_loc (loc,
8480 cst_uchar_ptr_node,
01847e9d 8481 arg1)));
db3927fb
AH
8482 tree ind2
8483 = fold_convert_loc (loc, integer_type_node,
8484 build1 (INDIRECT_REF, cst_uchar_node,
8485 fold_convert_loc (loc,
8486 cst_uchar_ptr_node,
01847e9d 8487 arg2)));
db3927fb 8488 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
01847e9d 8489 }
5bb650ec 8490
5039610b 8491 return NULL_TREE;
5bb650ec
RS
8492}
8493
5039610b 8494/* Fold a call to builtin isascii with argument ARG. */
df0785d6
KG
8495
8496static tree
db3927fb 8497fold_builtin_isascii (location_t loc, tree arg)
df0785d6 8498{
5039610b
SL
8499 if (!validate_arg (arg, INTEGER_TYPE))
8500 return NULL_TREE;
df0785d6
KG
8501 else
8502 {
8503 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
6728ee79 8504 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
45a2c477 8505 build_int_cst (integer_type_node,
6728ee79 8506 ~ (unsigned HOST_WIDE_INT) 0x7f));
db3927fb 8507 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
45a2c477 8508 arg, integer_zero_node);
df0785d6
KG
8509 }
8510}
8511
5039610b 8512/* Fold a call to builtin toascii with argument ARG. */
df0785d6
KG
8513
8514static tree
db3927fb 8515fold_builtin_toascii (location_t loc, tree arg)
df0785d6 8516{
5039610b
SL
8517 if (!validate_arg (arg, INTEGER_TYPE))
8518 return NULL_TREE;
b8698a0f 8519
5039610b 8520 /* Transform toascii(c) -> (c & 0x7f). */
db3927fb 8521 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
45a2c477 8522 build_int_cst (integer_type_node, 0x7f));
df0785d6
KG
8523}
8524
5039610b 8525/* Fold a call to builtin isdigit with argument ARG. */
61218d19
KG
8526
8527static tree
db3927fb 8528fold_builtin_isdigit (location_t loc, tree arg)
61218d19 8529{
5039610b
SL
8530 if (!validate_arg (arg, INTEGER_TYPE))
8531 return NULL_TREE;
61218d19
KG
8532 else
8533 {
8534 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
c5ff069d
ZW
8535 /* According to the C standard, isdigit is unaffected by locale.
8536 However, it definitely is affected by the target character set. */
c5ff069d
ZW
8537 unsigned HOST_WIDE_INT target_digit0
8538 = lang_hooks.to_target_charset ('0');
8539
8540 if (target_digit0 == 0)
8541 return NULL_TREE;
8542
db3927fb 8543 arg = fold_convert_loc (loc, unsigned_type_node, arg);
6728ee79
MM
8544 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8545 build_int_cst (unsigned_type_node, target_digit0));
db3927fb 8546 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
5cdc4a26 8547 build_int_cst (unsigned_type_node, 9));
61218d19
KG
8548 }
8549}
ef79730c 8550
5039610b 8551/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9655d83b
RS
8552
8553static tree
db3927fb 8554fold_builtin_fabs (location_t loc, tree arg, tree type)
9655d83b 8555{
5039610b
SL
8556 if (!validate_arg (arg, REAL_TYPE))
8557 return NULL_TREE;
9655d83b 8558
db3927fb 8559 arg = fold_convert_loc (loc, type, arg);
db3927fb 8560 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
8561}
8562
5039610b 8563/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9655d83b
RS
8564
8565static tree
db3927fb 8566fold_builtin_abs (location_t loc, tree arg, tree type)
9655d83b 8567{
5039610b
SL
8568 if (!validate_arg (arg, INTEGER_TYPE))
8569 return NULL_TREE;
9655d83b 8570
db3927fb 8571 arg = fold_convert_loc (loc, type, arg);
db3927fb 8572 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
8573}
8574
527cab20
KG
8575/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8576
8577static tree
db3927fb 8578fold_builtin_carg (location_t loc, tree arg, tree type)
527cab20 8579{
c128599a
KG
8580 if (validate_arg (arg, COMPLEX_TYPE)
8581 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
527cab20
KG
8582 {
8583 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
b8698a0f 8584
527cab20
KG
8585 if (atan2_fn)
8586 {
5039610b 8587 tree new_arg = builtin_save_expr (arg);
db3927fb
AH
8588 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8589 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8590 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
527cab20
KG
8591 }
8592 }
b8698a0f 8593
527cab20
KG
8594 return NULL_TREE;
8595}
8596
7a2a25ab
KG
8597/* Fold a call to builtin frexp, we can assume the base is 2. */
8598
8599static tree
db3927fb 8600fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7a2a25ab
KG
8601{
8602 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8603 return NULL_TREE;
b8698a0f 8604
7a2a25ab 8605 STRIP_NOPS (arg0);
b8698a0f 8606
7a2a25ab
KG
8607 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8608 return NULL_TREE;
b8698a0f 8609
db3927fb 8610 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7a2a25ab
KG
8611
8612 /* Proceed if a valid pointer type was passed in. */
8613 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8614 {
8615 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8616 tree frac, exp;
b8698a0f 8617
7a2a25ab
KG
8618 switch (value->cl)
8619 {
8620 case rvc_zero:
8621 /* For +-0, return (*exp = 0, +-0). */
8622 exp = integer_zero_node;
8623 frac = arg0;
8624 break;
8625 case rvc_nan:
8626 case rvc_inf:
8627 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
db3927fb 8628 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7a2a25ab
KG
8629 case rvc_normal:
8630 {
8631 /* Since the frexp function always expects base 2, and in
8632 GCC normalized significands are already in the range
8633 [0.5, 1.0), we have exactly what frexp wants. */
8634 REAL_VALUE_TYPE frac_rvt = *value;
8635 SET_REAL_EXP (&frac_rvt, 0);
8636 frac = build_real (rettype, frac_rvt);
45a2c477 8637 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7a2a25ab
KG
8638 }
8639 break;
8640 default:
8641 gcc_unreachable ();
8642 }
b8698a0f 8643
7a2a25ab 8644 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 8645 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7a2a25ab 8646 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 8647 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7a2a25ab
KG
8648 }
8649
8650 return NULL_TREE;
8651}
8652
3d577eaf
KG
8653/* Fold a call to builtin modf. */
8654
8655static tree
db3927fb 8656fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
3d577eaf
KG
8657{
8658 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8659 return NULL_TREE;
b8698a0f 8660
3d577eaf 8661 STRIP_NOPS (arg0);
b8698a0f 8662
3d577eaf
KG
8663 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8664 return NULL_TREE;
b8698a0f 8665
db3927fb 8666 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3d577eaf
KG
8667
8668 /* Proceed if a valid pointer type was passed in. */
8669 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8670 {
8671 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8672 REAL_VALUE_TYPE trunc, frac;
8673
8674 switch (value->cl)
8675 {
8676 case rvc_nan:
8677 case rvc_zero:
8678 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8679 trunc = frac = *value;
8680 break;
8681 case rvc_inf:
8682 /* For +-Inf, return (*arg1 = arg0, +-0). */
8683 frac = dconst0;
8684 frac.sign = value->sign;
8685 trunc = *value;
8686 break;
8687 case rvc_normal:
8688 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8689 real_trunc (&trunc, VOIDmode, value);
8690 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8691 /* If the original number was negative and already
8692 integral, then the fractional part is -0.0. */
8693 if (value->sign && frac.cl == rvc_zero)
8694 frac.sign = value->sign;
8695 break;
8696 }
b8698a0f 8697
3d577eaf 8698 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 8699 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
3d577eaf
KG
8700 build_real (rettype, trunc));
8701 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 8702 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
3d577eaf
KG
8703 build_real (rettype, frac));
8704 }
b8698a0f 8705
3d577eaf
KG
8706 return NULL_TREE;
8707}
8708
903c723b
TC
8709/* Given a location LOC, an interclass builtin function decl FNDECL
8710 and its single argument ARG, return an folded expression computing
8711 the same, or NULL_TREE if we either couldn't or didn't want to fold
8712 (the latter happen if there's an RTL instruction available). */
8713
8714static tree
8715fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8716{
8717 machine_mode mode;
8718
8719 if (!validate_arg (arg, REAL_TYPE))
8720 return NULL_TREE;
8721
8722 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8723 return NULL_TREE;
8724
8725 mode = TYPE_MODE (TREE_TYPE (arg));
8726
8727 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
02cf2861 8728
903c723b
TC
8729 /* If there is no optab, try generic code. */
8730 switch (DECL_FUNCTION_CODE (fndecl))
8731 {
8732 tree result;
44e10129 8733
903c723b
TC
8734 CASE_FLT_FN (BUILT_IN_ISINF):
8735 {
8736 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8737 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8738 tree type = TREE_TYPE (arg);
8739 REAL_VALUE_TYPE r;
8740 char buf[128];
8741
8742 if (is_ibm_extended)
8743 {
8744 /* NaN and Inf are encoded in the high-order double value
8745 only. The low-order value is not significant. */
8746 type = double_type_node;
8747 mode = DFmode;
8748 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8749 }
8750 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8751 real_from_string (&r, buf);
8752 result = build_call_expr (isgr_fn, 2,
8753 fold_build1_loc (loc, ABS_EXPR, type, arg),
8754 build_real (type, r));
8755 return result;
8756 }
8757 CASE_FLT_FN (BUILT_IN_FINITE):
8758 case BUILT_IN_ISFINITE:
8759 {
8760 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8761 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8762 tree type = TREE_TYPE (arg);
8763 REAL_VALUE_TYPE r;
8764 char buf[128];
8765
8766 if (is_ibm_extended)
8767 {
8768 /* NaN and Inf are encoded in the high-order double value
8769 only. The low-order value is not significant. */
8770 type = double_type_node;
8771 mode = DFmode;
8772 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8773 }
8774 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8775 real_from_string (&r, buf);
8776 result = build_call_expr (isle_fn, 2,
8777 fold_build1_loc (loc, ABS_EXPR, type, arg),
8778 build_real (type, r));
8779 /*result = fold_build2_loc (loc, UNGT_EXPR,
8780 TREE_TYPE (TREE_TYPE (fndecl)),
8781 fold_build1_loc (loc, ABS_EXPR, type, arg),
8782 build_real (type, r));
8783 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8784 TREE_TYPE (TREE_TYPE (fndecl)),
8785 result);*/
8786 return result;
8787 }
8788 case BUILT_IN_ISNORMAL:
8789 {
8790 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8791 islessequal(fabs(x),DBL_MAX). */
8792 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8793 tree type = TREE_TYPE (arg);
8794 tree orig_arg, max_exp, min_exp;
8795 machine_mode orig_mode = mode;
8796 REAL_VALUE_TYPE rmax, rmin;
8797 char buf[128];
8798
8799 orig_arg = arg = builtin_save_expr (arg);
8800 if (is_ibm_extended)
8801 {
8802 /* Use double to test the normal range of IBM extended
8803 precision. Emin for IBM extended precision is
8804 different to emin for IEEE double, being 53 higher
8805 since the low double exponent is at least 53 lower
8806 than the high double exponent. */
8807 type = double_type_node;
8808 mode = DFmode;
8809 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8810 }
8811 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8812
8813 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8814 real_from_string (&rmax, buf);
8815 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8816 real_from_string (&rmin, buf);
8817 max_exp = build_real (type, rmax);
8818 min_exp = build_real (type, rmin);
8819
8820 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8821 if (is_ibm_extended)
8822 {
8823 /* Testing the high end of the range is done just using
8824 the high double, using the same test as isfinite().
8825 For the subnormal end of the range we first test the
8826 high double, then if its magnitude is equal to the
8827 limit of 0x1p-969, we test whether the low double is
8828 non-zero and opposite sign to the high double. */
8829 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8830 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8831 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8832 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8833 arg, min_exp);
8834 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8835 complex_double_type_node, orig_arg);
8836 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8837 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8838 tree zero = build_real (type, dconst0);
8839 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8840 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8841 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8842 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8843 fold_build3 (COND_EXPR,
8844 integer_type_node,
8845 hilt, logt, lolt));
8846 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8847 eq_min, ok_lo);
8848 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8849 gt_min, eq_min);
8850 }
8851 else
8852 {
8853 tree const isge_fn
8854 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8855 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8856 }
8857 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8858 max_exp, min_exp);
8859 return result;
8860 }
8861 default:
8862 break;
8863 }
8864
8865 return NULL_TREE;
8866}
8867
8868/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
5039610b 8869 ARG is the argument for the call. */
64a9295a
PB
8870
8871static tree
903c723b 8872fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
64a9295a 8873{
903c723b
TC
8874 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8875
5039610b 8876 if (!validate_arg (arg, REAL_TYPE))
83322951 8877 return NULL_TREE;
64a9295a 8878
64a9295a
PB
8879 switch (builtin_index)
8880 {
903c723b
TC
8881 case BUILT_IN_ISINF:
8882 if (!HONOR_INFINITIES (arg))
8883 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8884
8885 return NULL_TREE;
8886
05f41289
KG
8887 case BUILT_IN_ISINF_SIGN:
8888 {
8889 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8890 /* In a boolean context, GCC will fold the inner COND_EXPR to
8891 1. So e.g. "if (isinf_sign(x))" would be folded to just
8892 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
72f52f30 8893 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
e79983f4 8894 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
05f41289
KG
8895 tree tmp = NULL_TREE;
8896
8897 arg = builtin_save_expr (arg);
8898
8899 if (signbit_fn && isinf_fn)
8900 {
db3927fb
AH
8901 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8902 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
05f41289 8903
db3927fb 8904 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 8905 signbit_call, integer_zero_node);
db3927fb 8906 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 8907 isinf_call, integer_zero_node);
b8698a0f 8908
db3927fb 8909 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
05f41289 8910 integer_minus_one_node, integer_one_node);
db3927fb
AH
8911 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8912 isinf_call, tmp,
05f41289
KG
8913 integer_zero_node);
8914 }
8915
8916 return tmp;
8917 }
8918
903c723b
TC
8919 case BUILT_IN_ISFINITE:
8920 if (!HONOR_NANS (arg)
8921 && !HONOR_INFINITIES (arg))
8922 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8923
8924 return NULL_TREE;
8925
8926 case BUILT_IN_ISNAN:
8927 if (!HONOR_NANS (arg))
8928 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8929
8930 {
8931 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8932 if (is_ibm_extended)
8933 {
8934 /* NaN and Inf are encoded in the high-order double value
8935 only. The low-order value is not significant. */
8936 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8937 }
8938 }
8939 arg = builtin_save_expr (arg);
8940 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8941
64a9295a 8942 default:
298e6adc 8943 gcc_unreachable ();
64a9295a
PB
8944 }
8945}
8946
903c723b
TC
8947/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8948 This builtin will generate code to return the appropriate floating
8949 point classification depending on the value of the floating point
8950 number passed in. The possible return values must be supplied as
8951 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8952 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8953 one floating point argument which is "type generic". */
8954
8955static tree
8956fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8957{
8958 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8959 arg, type, res, tmp;
8960 machine_mode mode;
8961 REAL_VALUE_TYPE r;
8962 char buf[128];
8963
8964 /* Verify the required arguments in the original call. */
8965 if (nargs != 6
8966 || !validate_arg (args[0], INTEGER_TYPE)
8967 || !validate_arg (args[1], INTEGER_TYPE)
8968 || !validate_arg (args[2], INTEGER_TYPE)
8969 || !validate_arg (args[3], INTEGER_TYPE)
8970 || !validate_arg (args[4], INTEGER_TYPE)
8971 || !validate_arg (args[5], REAL_TYPE))
8972 return NULL_TREE;
8973
8974 fp_nan = args[0];
8975 fp_infinite = args[1];
8976 fp_normal = args[2];
8977 fp_subnormal = args[3];
8978 fp_zero = args[4];
8979 arg = args[5];
8980 type = TREE_TYPE (arg);
8981 mode = TYPE_MODE (type);
8982 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8983
8984 /* fpclassify(x) ->
8985 isnan(x) ? FP_NAN :
8986 (fabs(x) == Inf ? FP_INFINITE :
8987 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8988 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8989
8990 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8991 build_real (type, dconst0));
8992 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8993 tmp, fp_zero, fp_subnormal);
8994
8995 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8996 real_from_string (&r, buf);
8997 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8998 arg, build_real (type, r));
8999 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9000
9001 if (HONOR_INFINITIES (mode))
9002 {
9003 real_inf (&r);
9004 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9005 build_real (type, r));
9006 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9007 fp_infinite, res);
9008 }
9009
9010 if (HONOR_NANS (mode))
9011 {
9012 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9013 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9014 }
9015
9016 return res;
9017}
9018
08039bd8 9019/* Fold a call to an unordered comparison function such as
a35da91f 9020 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
5039610b 9021 being called and ARG0 and ARG1 are the arguments for the call.
64a9295a
PB
9022 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9023 the opposite of the desired result. UNORDERED_CODE is used
9024 for modes that can hold NaNs and ORDERED_CODE is used for
9025 the rest. */
08039bd8
RS
9026
9027static tree
db3927fb 9028fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
08039bd8
RS
9029 enum tree_code unordered_code,
9030 enum tree_code ordered_code)
9031{
14f661f1 9032 tree type = TREE_TYPE (TREE_TYPE (fndecl));
08039bd8 9033 enum tree_code code;
1aeaea8d
GK
9034 tree type0, type1;
9035 enum tree_code code0, code1;
9036 tree cmp_type = NULL_TREE;
08039bd8 9037
1aeaea8d
GK
9038 type0 = TREE_TYPE (arg0);
9039 type1 = TREE_TYPE (arg1);
c22cacf3 9040
1aeaea8d
GK
9041 code0 = TREE_CODE (type0);
9042 code1 = TREE_CODE (type1);
c22cacf3 9043
1aeaea8d
GK
9044 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9045 /* Choose the wider of two real types. */
9046 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9047 ? type0 : type1;
9048 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9049 cmp_type = type0;
9050 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9051 cmp_type = type1;
c22cacf3 9052
db3927fb
AH
9053 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9054 arg1 = fold_convert_loc (loc, cmp_type, arg1);
14f661f1
RS
9055
9056 if (unordered_code == UNORDERED_EXPR)
9057 {
1b457aa4 9058 if (!HONOR_NANS (arg0))
db3927fb
AH
9059 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9060 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
14f661f1 9061 }
08039bd8 9062
1b457aa4 9063 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
db3927fb
AH
9064 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9065 fold_build2_loc (loc, code, type, arg0, arg1));
08039bd8
RS
9066}
9067
1304953e
JJ
9068/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9069 arithmetics if it can never overflow, or into internal functions that
9070 return both result of arithmetics and overflowed boolean flag in
44a845ca
MS
9071 a complex integer result, or some other check for overflow.
9072 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9073 checking part of that. */
1304953e
JJ
9074
9075static tree
9076fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9077 tree arg0, tree arg1, tree arg2)
9078{
9079 enum internal_fn ifn = IFN_LAST;
44a845ca
MS
9080 /* The code of the expression corresponding to the type-generic
9081 built-in, or ERROR_MARK for the type-specific ones. */
9082 enum tree_code opcode = ERROR_MARK;
9083 bool ovf_only = false;
9084
1304953e
JJ
9085 switch (fcode)
9086 {
44a845ca
MS
9087 case BUILT_IN_ADD_OVERFLOW_P:
9088 ovf_only = true;
9089 /* FALLTHRU */
1304953e 9090 case BUILT_IN_ADD_OVERFLOW:
44a845ca
MS
9091 opcode = PLUS_EXPR;
9092 /* FALLTHRU */
1304953e
JJ
9093 case BUILT_IN_SADD_OVERFLOW:
9094 case BUILT_IN_SADDL_OVERFLOW:
9095 case BUILT_IN_SADDLL_OVERFLOW:
9096 case BUILT_IN_UADD_OVERFLOW:
9097 case BUILT_IN_UADDL_OVERFLOW:
9098 case BUILT_IN_UADDLL_OVERFLOW:
9099 ifn = IFN_ADD_OVERFLOW;
9100 break;
44a845ca
MS
9101 case BUILT_IN_SUB_OVERFLOW_P:
9102 ovf_only = true;
9103 /* FALLTHRU */
1304953e 9104 case BUILT_IN_SUB_OVERFLOW:
44a845ca
MS
9105 opcode = MINUS_EXPR;
9106 /* FALLTHRU */
1304953e
JJ
9107 case BUILT_IN_SSUB_OVERFLOW:
9108 case BUILT_IN_SSUBL_OVERFLOW:
9109 case BUILT_IN_SSUBLL_OVERFLOW:
9110 case BUILT_IN_USUB_OVERFLOW:
9111 case BUILT_IN_USUBL_OVERFLOW:
9112 case BUILT_IN_USUBLL_OVERFLOW:
9113 ifn = IFN_SUB_OVERFLOW;
9114 break;
44a845ca
MS
9115 case BUILT_IN_MUL_OVERFLOW_P:
9116 ovf_only = true;
9117 /* FALLTHRU */
1304953e 9118 case BUILT_IN_MUL_OVERFLOW:
44a845ca
MS
9119 opcode = MULT_EXPR;
9120 /* FALLTHRU */
1304953e
JJ
9121 case BUILT_IN_SMUL_OVERFLOW:
9122 case BUILT_IN_SMULL_OVERFLOW:
9123 case BUILT_IN_SMULLL_OVERFLOW:
9124 case BUILT_IN_UMUL_OVERFLOW:
9125 case BUILT_IN_UMULL_OVERFLOW:
9126 case BUILT_IN_UMULLL_OVERFLOW:
9127 ifn = IFN_MUL_OVERFLOW;
9128 break;
9129 default:
9130 gcc_unreachable ();
9131 }
44a845ca
MS
9132
9133 /* For the "generic" overloads, the first two arguments can have different
9134 types and the last argument determines the target type to use to check
9135 for overflow. The arguments of the other overloads all have the same
9136 type. */
9137 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9138
9139 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9140 arguments are constant, attempt to fold the built-in call into a constant
9141 expression indicating whether or not it detected an overflow. */
9142 if (ovf_only
9143 && TREE_CODE (arg0) == INTEGER_CST
9144 && TREE_CODE (arg1) == INTEGER_CST)
9145 /* Perform the computation in the target type and check for overflow. */
9146 return omit_one_operand_loc (loc, boolean_type_node,
9147 arith_overflowed_p (opcode, type, arg0, arg1)
9148 ? boolean_true_node : boolean_false_node,
9149 arg2);
9150
1304953e
JJ
9151 tree ctype = build_complex_type (type);
9152 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9153 2, arg0, arg1);
9154 tree tgt = save_expr (call);
9155 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9156 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9157 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
44a845ca
MS
9158
9159 if (ovf_only)
9160 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9161
9162 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
1304953e
JJ
9163 tree store
9164 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9165 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9166}
9167
b25aad5f
MS
9168/* Fold a call to __builtin_FILE to a constant string. */
9169
9170static inline tree
9171fold_builtin_FILE (location_t loc)
9172{
9173 if (const char *fname = LOCATION_FILE (loc))
7365279f
BK
9174 {
9175 /* The documentation says this builtin is equivalent to the preprocessor
9176 __FILE__ macro so it appears appropriate to use the same file prefix
9177 mappings. */
9178 fname = remap_macro_filename (fname);
b25aad5f 9179 return build_string_literal (strlen (fname) + 1, fname);
7365279f 9180 }
b25aad5f
MS
9181
9182 return build_string_literal (1, "");
9183}
9184
9185/* Fold a call to __builtin_FUNCTION to a constant string. */
9186
9187static inline tree
9188fold_builtin_FUNCTION ()
9189{
f76b4224
NS
9190 const char *name = "";
9191
b25aad5f 9192 if (current_function_decl)
f76b4224 9193 name = lang_hooks.decl_printable_name (current_function_decl, 0);
b25aad5f 9194
f76b4224 9195 return build_string_literal (strlen (name) + 1, name);
b25aad5f
MS
9196}
9197
9198/* Fold a call to __builtin_LINE to an integer constant. */
9199
9200static inline tree
9201fold_builtin_LINE (location_t loc, tree type)
9202{
9203 return build_int_cst (type, LOCATION_LINE (loc));
9204}
9205
5039610b 9206/* Fold a call to built-in function FNDECL with 0 arguments.
2625bb5d 9207 This function returns NULL_TREE if no simplification was possible. */
b0b3afb2 9208
6de9cd9a 9209static tree
2625bb5d 9210fold_builtin_0 (location_t loc, tree fndecl)
b0b3afb2 9211{
c0a47a61 9212 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5039610b 9213 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
a0d2281e 9214 switch (fcode)
b0b3afb2 9215 {
b25aad5f
MS
9216 case BUILT_IN_FILE:
9217 return fold_builtin_FILE (loc);
9218
9219 case BUILT_IN_FUNCTION:
9220 return fold_builtin_FUNCTION ();
9221
9222 case BUILT_IN_LINE:
9223 return fold_builtin_LINE (loc, type);
9224
5039610b 9225 CASE_FLT_FN (BUILT_IN_INF):
6dc198e3 9226 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
5039610b
SL
9227 case BUILT_IN_INFD32:
9228 case BUILT_IN_INFD64:
9229 case BUILT_IN_INFD128:
db3927fb 9230 return fold_builtin_inf (loc, type, true);
d3147f64 9231
5039610b 9232 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
6dc198e3 9233 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
db3927fb 9234 return fold_builtin_inf (loc, type, false);
d3147f64 9235
5039610b
SL
9236 case BUILT_IN_CLASSIFY_TYPE:
9237 return fold_builtin_classify_type (NULL_TREE);
d3147f64 9238
5039610b
SL
9239 default:
9240 break;
9241 }
9242 return NULL_TREE;
9243}
d3147f64 9244
5039610b 9245/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
2625bb5d 9246 This function returns NULL_TREE if no simplification was possible. */
d3147f64 9247
5039610b 9248static tree
2625bb5d 9249fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
5039610b
SL
9250{
9251 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9252 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
9253
9254 if (TREE_CODE (arg0) == ERROR_MARK)
9255 return NULL_TREE;
9256
d7ebef06 9257 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
5c1a2e63
RS
9258 return ret;
9259
5039610b
SL
9260 switch (fcode)
9261 {
b0b3afb2 9262 case BUILT_IN_CONSTANT_P:
d3147f64 9263 {
5039610b 9264 tree val = fold_builtin_constant_p (arg0);
d3147f64 9265
d3147f64
EC
9266 /* Gimplification will pull the CALL_EXPR for the builtin out of
9267 an if condition. When not optimizing, we'll not CSE it back.
9268 To avoid link error types of regressions, return false now. */
9269 if (!val && !optimize)
9270 val = integer_zero_node;
9271
9272 return val;
9273 }
b0b3afb2 9274
ad82abb8 9275 case BUILT_IN_CLASSIFY_TYPE:
5039610b 9276 return fold_builtin_classify_type (arg0);
ad82abb8 9277
b0b3afb2 9278 case BUILT_IN_STRLEN:
ab996409 9279 return fold_builtin_strlen (loc, type, arg0);
b0b3afb2 9280
ea6a6627 9281 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 9282 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
9283 case BUILT_IN_FABSD32:
9284 case BUILT_IN_FABSD64:
9285 case BUILT_IN_FABSD128:
db3927fb 9286 return fold_builtin_fabs (loc, arg0, type);
9655d83b
RS
9287
9288 case BUILT_IN_ABS:
9289 case BUILT_IN_LABS:
9290 case BUILT_IN_LLABS:
9291 case BUILT_IN_IMAXABS:
db3927fb 9292 return fold_builtin_abs (loc, arg0, type);
07bae5ad 9293
ea6a6627 9294 CASE_FLT_FN (BUILT_IN_CONJ):
c128599a 9295 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9296 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 9297 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
5039610b 9298 break;
aa6c7c3a 9299
ea6a6627 9300 CASE_FLT_FN (BUILT_IN_CREAL):
c128599a 9301 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9302 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
6f3d1a5e 9303 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
5039610b 9304 break;
aa6c7c3a 9305
ea6a6627 9306 CASE_FLT_FN (BUILT_IN_CIMAG):
376da68e
KG
9307 if (validate_arg (arg0, COMPLEX_TYPE)
9308 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 9309 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
5039610b 9310 break;
aa6c7c3a 9311
5c1a2e63
RS
9312 CASE_FLT_FN (BUILT_IN_CARG):
9313 return fold_builtin_carg (loc, arg0, type);
43272bf5 9314
5c1a2e63
RS
9315 case BUILT_IN_ISASCII:
9316 return fold_builtin_isascii (loc, arg0);
b8698a0f 9317
5c1a2e63
RS
9318 case BUILT_IN_TOASCII:
9319 return fold_builtin_toascii (loc, arg0);
b8698a0f 9320
5c1a2e63
RS
9321 case BUILT_IN_ISDIGIT:
9322 return fold_builtin_isdigit (loc, arg0);
b8698a0f 9323
903c723b
TC
9324 CASE_FLT_FN (BUILT_IN_FINITE):
9325 case BUILT_IN_FINITED32:
9326 case BUILT_IN_FINITED64:
9327 case BUILT_IN_FINITED128:
9328 case BUILT_IN_ISFINITE:
9329 {
9330 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9331 if (ret)
9332 return ret;
9333 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9334 }
9335
9336 CASE_FLT_FN (BUILT_IN_ISINF):
9337 case BUILT_IN_ISINFD32:
9338 case BUILT_IN_ISINFD64:
9339 case BUILT_IN_ISINFD128:
9340 {
9341 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9342 if (ret)
9343 return ret;
9344 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9345 }
9346
9347 case BUILT_IN_ISNORMAL:
9348 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9349
5c1a2e63 9350 case BUILT_IN_ISINF_SIGN:
903c723b
TC
9351 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9352
9353 CASE_FLT_FN (BUILT_IN_ISNAN):
9354 case BUILT_IN_ISNAND32:
9355 case BUILT_IN_ISNAND64:
9356 case BUILT_IN_ISNAND128:
9357 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
b8698a0f 9358
5c1a2e63
RS
9359 case BUILT_IN_FREE:
9360 if (integer_zerop (arg0))
9361 return build_empty_stmt (loc);
abcc43f5 9362 break;
07bae5ad 9363
5c1a2e63 9364 default:
4835c978 9365 break;
5c1a2e63 9366 }
4977bab6 9367
5c1a2e63 9368 return NULL_TREE;
e19f6bde 9369
5c1a2e63 9370}
b53fed56 9371
5c1a2e63
RS
9372/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9373 This function returns NULL_TREE if no simplification was possible. */
5039610b
SL
9374
9375static tree
2625bb5d 9376fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
5039610b
SL
9377{
9378 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9379 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9380
5c1a2e63
RS
9381 if (TREE_CODE (arg0) == ERROR_MARK
9382 || TREE_CODE (arg1) == ERROR_MARK)
9383 return NULL_TREE;
ea91f957 9384
d7ebef06 9385 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
5c1a2e63 9386 return ret;
752b7d38 9387
5c1a2e63
RS
9388 switch (fcode)
9389 {
752b7d38
KG
9390 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9391 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9392 if (validate_arg (arg0, REAL_TYPE)
c3284718 9393 && validate_arg (arg1, POINTER_TYPE))
752b7d38
KG
9394 return do_mpfr_lgamma_r (arg0, arg1, type);
9395 break;
5039610b 9396
7a2a25ab 9397 CASE_FLT_FN (BUILT_IN_FREXP):
db3927fb 9398 return fold_builtin_frexp (loc, arg0, arg1, type);
7a2a25ab 9399
3d577eaf 9400 CASE_FLT_FN (BUILT_IN_MODF):
db3927fb 9401 return fold_builtin_modf (loc, arg0, arg1, type);
3d577eaf 9402
5039610b 9403 case BUILT_IN_STRSPN:
db3927fb 9404 return fold_builtin_strspn (loc, arg0, arg1);
5039610b
SL
9405
9406 case BUILT_IN_STRCSPN:
db3927fb 9407 return fold_builtin_strcspn (loc, arg0, arg1);
5039610b 9408
5039610b 9409 case BUILT_IN_STRPBRK:
db3927fb 9410 return fold_builtin_strpbrk (loc, arg0, arg1, type);
5039610b
SL
9411
9412 case BUILT_IN_EXPECT:
1e9168b2 9413 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
5039610b 9414
08039bd8 9415 case BUILT_IN_ISGREATER:
db3927fb
AH
9416 return fold_builtin_unordered_cmp (loc, fndecl,
9417 arg0, arg1, UNLE_EXPR, LE_EXPR);
08039bd8 9418 case BUILT_IN_ISGREATEREQUAL:
db3927fb
AH
9419 return fold_builtin_unordered_cmp (loc, fndecl,
9420 arg0, arg1, UNLT_EXPR, LT_EXPR);
08039bd8 9421 case BUILT_IN_ISLESS:
db3927fb
AH
9422 return fold_builtin_unordered_cmp (loc, fndecl,
9423 arg0, arg1, UNGE_EXPR, GE_EXPR);
08039bd8 9424 case BUILT_IN_ISLESSEQUAL:
db3927fb
AH
9425 return fold_builtin_unordered_cmp (loc, fndecl,
9426 arg0, arg1, UNGT_EXPR, GT_EXPR);
08039bd8 9427 case BUILT_IN_ISLESSGREATER:
db3927fb
AH
9428 return fold_builtin_unordered_cmp (loc, fndecl,
9429 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
08039bd8 9430 case BUILT_IN_ISUNORDERED:
db3927fb
AH
9431 return fold_builtin_unordered_cmp (loc, fndecl,
9432 arg0, arg1, UNORDERED_EXPR,
a35da91f 9433 NOP_EXPR);
08039bd8 9434
d3147f64
EC
9435 /* We do the folding for va_start in the expander. */
9436 case BUILT_IN_VA_START:
9437 break;
a32e70c3 9438
10a0d495 9439 case BUILT_IN_OBJECT_SIZE:
5039610b 9440 return fold_builtin_object_size (arg0, arg1);
10a0d495 9441
86951993
AM
9442 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9443 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9444
9445 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9446 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9447
5039610b
SL
9448 default:
9449 break;
9450 }
9451 return NULL_TREE;
9452}
9453
9454/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
2625bb5d 9455 and ARG2.
5039610b
SL
9456 This function returns NULL_TREE if no simplification was possible. */
9457
9458static tree
db3927fb 9459fold_builtin_3 (location_t loc, tree fndecl,
2625bb5d 9460 tree arg0, tree arg1, tree arg2)
5039610b
SL
9461{
9462 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9463 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
9464
9465 if (TREE_CODE (arg0) == ERROR_MARK
9466 || TREE_CODE (arg1) == ERROR_MARK
9467 || TREE_CODE (arg2) == ERROR_MARK)
9468 return NULL_TREE;
9469
d7ebef06
RS
9470 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9471 arg0, arg1, arg2))
5c1a2e63
RS
9472 return ret;
9473
5039610b
SL
9474 switch (fcode)
9475 {
9476
9477 CASE_FLT_FN (BUILT_IN_SINCOS):
db3927fb 9478 return fold_builtin_sincos (loc, arg0, arg1, arg2);
5039610b 9479
ea91f957
KG
9480 CASE_FLT_FN (BUILT_IN_REMQUO):
9481 if (validate_arg (arg0, REAL_TYPE)
c3284718
RS
9482 && validate_arg (arg1, REAL_TYPE)
9483 && validate_arg (arg2, POINTER_TYPE))
ea91f957
KG
9484 return do_mpfr_remquo (arg0, arg1, arg2);
9485 break;
ea91f957 9486
5039610b 9487 case BUILT_IN_MEMCMP:
5de73c05 9488 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
5039610b 9489
ed9c79e1 9490 case BUILT_IN_EXPECT:
1e9168b2
ML
9491 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9492
9493 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9494 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
ed9c79e1 9495
1304953e
JJ
9496 case BUILT_IN_ADD_OVERFLOW:
9497 case BUILT_IN_SUB_OVERFLOW:
9498 case BUILT_IN_MUL_OVERFLOW:
44a845ca
MS
9499 case BUILT_IN_ADD_OVERFLOW_P:
9500 case BUILT_IN_SUB_OVERFLOW_P:
9501 case BUILT_IN_MUL_OVERFLOW_P:
1304953e
JJ
9502 case BUILT_IN_SADD_OVERFLOW:
9503 case BUILT_IN_SADDL_OVERFLOW:
9504 case BUILT_IN_SADDLL_OVERFLOW:
9505 case BUILT_IN_SSUB_OVERFLOW:
9506 case BUILT_IN_SSUBL_OVERFLOW:
9507 case BUILT_IN_SSUBLL_OVERFLOW:
9508 case BUILT_IN_SMUL_OVERFLOW:
9509 case BUILT_IN_SMULL_OVERFLOW:
9510 case BUILT_IN_SMULLL_OVERFLOW:
9511 case BUILT_IN_UADD_OVERFLOW:
9512 case BUILT_IN_UADDL_OVERFLOW:
9513 case BUILT_IN_UADDLL_OVERFLOW:
9514 case BUILT_IN_USUB_OVERFLOW:
9515 case BUILT_IN_USUBL_OVERFLOW:
9516 case BUILT_IN_USUBLL_OVERFLOW:
9517 case BUILT_IN_UMUL_OVERFLOW:
9518 case BUILT_IN_UMULL_OVERFLOW:
9519 case BUILT_IN_UMULLL_OVERFLOW:
9520 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9521
b0b3afb2
BS
9522 default:
9523 break;
9524 }
5039610b
SL
9525 return NULL_TREE;
9526}
b0b3afb2 9527
5039610b 9528/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
a6a0570f
RB
9529 arguments. IGNORE is true if the result of the
9530 function call is ignored. This function returns NULL_TREE if no
9531 simplification was possible. */
b8698a0f 9532
3d2cf79f 9533tree
2625bb5d 9534fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
5039610b
SL
9535{
9536 tree ret = NULL_TREE;
f4577fcd 9537
5039610b
SL
9538 switch (nargs)
9539 {
9540 case 0:
2625bb5d 9541 ret = fold_builtin_0 (loc, fndecl);
5039610b
SL
9542 break;
9543 case 1:
2625bb5d 9544 ret = fold_builtin_1 (loc, fndecl, args[0]);
5039610b
SL
9545 break;
9546 case 2:
2625bb5d 9547 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
5039610b
SL
9548 break;
9549 case 3:
2625bb5d 9550 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
5039610b 9551 break;
5039610b 9552 default:
903c723b 9553 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
5039610b
SL
9554 break;
9555 }
9556 if (ret)
9557 {
726a989a 9558 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
db3927fb 9559 SET_EXPR_LOCATION (ret, loc);
5039610b
SL
9560 return ret;
9561 }
9562 return NULL_TREE;
9563}
9564
862d0b35
DN
9565/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9566 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9567 of arguments in ARGS to be omitted. OLDNARGS is the number of
9568 elements in ARGS. */
5039610b
SL
9569
9570static tree
862d0b35
DN
9571rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9572 int skip, tree fndecl, int n, va_list newargs)
5039610b 9573{
862d0b35
DN
9574 int nargs = oldnargs - skip + n;
9575 tree *buffer;
5039610b 9576
862d0b35 9577 if (n > 0)
5039610b 9578 {
862d0b35 9579 int i, j;
5039610b 9580
862d0b35
DN
9581 buffer = XALLOCAVEC (tree, nargs);
9582 for (i = 0; i < n; i++)
9583 buffer[i] = va_arg (newargs, tree);
9584 for (j = skip; j < oldnargs; j++, i++)
9585 buffer[i] = args[j];
9586 }
9587 else
9588 buffer = args + skip;
3bf5906b 9589
862d0b35
DN
9590 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9591}
5039610b 9592
0889e9bc
JJ
9593/* Return true if FNDECL shouldn't be folded right now.
9594 If a built-in function has an inline attribute always_inline
9595 wrapper, defer folding it after always_inline functions have
9596 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9597 might not be performed. */
9598
e7f9dae0 9599bool
0889e9bc
JJ
9600avoid_folding_inline_builtin (tree fndecl)
9601{
9602 return (DECL_DECLARED_INLINE_P (fndecl)
9603 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9604 && cfun
9605 && !cfun->always_inline_functions_inlined
9606 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9607}
9608
6de9cd9a 9609/* A wrapper function for builtin folding that prevents warnings for
caf93cb0 9610 "statement without effect" and the like, caused by removing the
6de9cd9a
DN
9611 call node earlier than the warning is generated. */
9612
9613tree
db3927fb 9614fold_call_expr (location_t loc, tree exp, bool ignore)
6de9cd9a 9615{
5039610b
SL
9616 tree ret = NULL_TREE;
9617 tree fndecl = get_callee_fndecl (exp);
3d78e008 9618 if (fndecl && fndecl_built_in_p (fndecl)
6ef5231b
JJ
9619 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9620 yet. Defer folding until we see all the arguments
9621 (after inlining). */
9622 && !CALL_EXPR_VA_ARG_PACK (exp))
9623 {
9624 int nargs = call_expr_nargs (exp);
9625
9626 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9627 instead last argument is __builtin_va_arg_pack (). Defer folding
9628 even in that case, until arguments are finalized. */
9629 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9630 {
9631 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
3d78e008 9632 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
6ef5231b
JJ
9633 return NULL_TREE;
9634 }
9635
0889e9bc
JJ
9636 if (avoid_folding_inline_builtin (fndecl))
9637 return NULL_TREE;
9638
5039610b 9639 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
f311c3b4
NF
9640 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9641 CALL_EXPR_ARGP (exp), ignore);
5039610b
SL
9642 else
9643 {
a6a0570f
RB
9644 tree *args = CALL_EXPR_ARGP (exp);
9645 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
5039610b 9646 if (ret)
db3927fb 9647 return ret;
5039610b 9648 }
6de9cd9a 9649 }
5039610b
SL
9650 return NULL_TREE;
9651}
b8698a0f 9652
a6a0570f
RB
9653/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9654 N arguments are passed in the array ARGARRAY. Return a folded
9655 expression or NULL_TREE if no simplification was possible. */
4977bab6
ZW
9656
9657tree
a6a0570f 9658fold_builtin_call_array (location_t loc, tree,
94a0dd7b
SL
9659 tree fn,
9660 int n,
9661 tree *argarray)
6385a28f 9662{
a6a0570f
RB
9663 if (TREE_CODE (fn) != ADDR_EXPR)
9664 return NULL_TREE;
5039610b 9665
a6a0570f
RB
9666 tree fndecl = TREE_OPERAND (fn, 0);
9667 if (TREE_CODE (fndecl) == FUNCTION_DECL
3d78e008 9668 && fndecl_built_in_p (fndecl))
a6a0570f
RB
9669 {
9670 /* If last argument is __builtin_va_arg_pack (), arguments to this
9671 function are not finalized yet. Defer folding until they are. */
9672 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9673 {
9674 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
3d78e008 9675 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
a6a0570f
RB
9676 return NULL_TREE;
9677 }
9678 if (avoid_folding_inline_builtin (fndecl))
9679 return NULL_TREE;
9680 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9681 return targetm.fold_builtin (fndecl, n, argarray, false);
9682 else
9683 return fold_builtin_n (loc, fndecl, argarray, n, false);
9684 }
5039610b 9685
a6a0570f 9686 return NULL_TREE;
5039610b
SL
9687}
9688
43ea30dc
NF
9689/* Construct a new CALL_EXPR using the tail of the argument list of EXP
9690 along with N new arguments specified as the "..." parameters. SKIP
9691 is the number of arguments in EXP to be omitted. This function is used
9692 to do varargs-to-varargs transformations. */
9693
9694static tree
9695rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9696{
9697 va_list ap;
9698 tree t;
9699
9700 va_start (ap, n);
9701 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9702 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9703 va_end (ap);
5039610b 9704
43ea30dc 9705 return t;
5039610b
SL
9706}
9707
9708/* Validate a single argument ARG against a tree code CODE representing
0dba7960 9709 a type. Return true when argument is valid. */
b8698a0f 9710
5039610b 9711static bool
0dba7960 9712validate_arg (const_tree arg, enum tree_code code)
5039610b
SL
9713{
9714 if (!arg)
9715 return false;
9716 else if (code == POINTER_TYPE)
0dba7960 9717 return POINTER_TYPE_P (TREE_TYPE (arg));
4cd8e76f
RG
9718 else if (code == INTEGER_TYPE)
9719 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
5039610b 9720 return code == TREE_CODE (TREE_TYPE (arg));
6385a28f 9721}
019fa094 9722
726a989a
RB
9723/* This function validates the types of a function call argument list
9724 against a specified list of tree_codes. If the last specifier is a 0,
9725 that represents an ellipses, otherwise the last specifier must be a
9726 VOID_TYPE.
9727
9728 This is the GIMPLE version of validate_arglist. Eventually we want to
9729 completely convert builtins.c to work from GIMPLEs and the tree based
9730 validate_arglist will then be removed. */
9731
9732bool
538dd0b7 9733validate_gimple_arglist (const gcall *call, ...)
726a989a
RB
9734{
9735 enum tree_code code;
9736 bool res = 0;
9737 va_list ap;
9738 const_tree arg;
9739 size_t i;
9740
9741 va_start (ap, call);
9742 i = 0;
9743
9744 do
9745 {
72b5577d 9746 code = (enum tree_code) va_arg (ap, int);
726a989a
RB
9747 switch (code)
9748 {
9749 case 0:
9750 /* This signifies an ellipses, any further arguments are all ok. */
9751 res = true;
9752 goto end;
9753 case VOID_TYPE:
9754 /* This signifies an endlink, if no arguments remain, return
9755 true, otherwise return false. */
9756 res = (i == gimple_call_num_args (call));
9757 goto end;
9758 default:
9759 /* If no parameters remain or the parameter's code does not
9760 match the specified code, return false. Otherwise continue
9761 checking any remaining arguments. */
9762 arg = gimple_call_arg (call, i++);
9763 if (!validate_arg (arg, code))
9764 goto end;
9765 break;
9766 }
9767 }
9768 while (1);
9769
9770 /* We need gotos here since we can only have one VA_CLOSE in a
9771 function. */
9772 end: ;
9773 va_end (ap);
9774
9775 return res;
9776}
9777
f6155fda
SS
9778/* Default target-specific builtin expander that does nothing. */
9779
9780rtx
4682ae04
AJ
9781default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9782 rtx target ATTRIBUTE_UNUSED,
9783 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 9784 machine_mode mode ATTRIBUTE_UNUSED,
4682ae04 9785 int ignore ATTRIBUTE_UNUSED)
f6155fda
SS
9786{
9787 return NULL_RTX;
9788}
34ee7f82 9789
7dc61d6c
KG
9790/* Returns true is EXP represents data that would potentially reside
9791 in a readonly section. */
9792
fef5a0d9 9793bool
7dc61d6c
KG
9794readonly_data_expr (tree exp)
9795{
9796 STRIP_NOPS (exp);
9797
aef0afc4
UW
9798 if (TREE_CODE (exp) != ADDR_EXPR)
9799 return false;
9800
9801 exp = get_base_address (TREE_OPERAND (exp, 0));
9802 if (!exp)
9803 return false;
9804
9805 /* Make sure we call decl_readonly_section only for trees it
9806 can handle (since it returns true for everything it doesn't
9807 understand). */
caf93cb0 9808 if (TREE_CODE (exp) == STRING_CST
aef0afc4 9809 || TREE_CODE (exp) == CONSTRUCTOR
8813a647 9810 || (VAR_P (exp) && TREE_STATIC (exp)))
aef0afc4 9811 return decl_readonly_section (exp, 0);
7dc61d6c
KG
9812 else
9813 return false;
9814}
6de9cd9a 9815
5039610b
SL
9816/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9817 to the call, and TYPE is its return type.
6de9cd9a 9818
5039610b 9819 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
9820 simplified form of the call as a tree.
9821
9822 The simplified form may be a constant or other expression which
9823 computes the same value, but in a more efficient manner (including
9824 calls to other builtin functions).
9825
9826 The call may contain arguments which need to be evaluated, but
9827 which are not useful to determine the result of the call. In
9828 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9829 COMPOUND_EXPR will be an argument which must be evaluated.
9830 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9831 COMPOUND_EXPR in the chain will contain the tree for the simplified
9832 form of the builtin function call. */
9833
9834static tree
db3927fb 9835fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
6de9cd9a 9836{
5039610b
SL
9837 if (!validate_arg (s1, POINTER_TYPE)
9838 || !validate_arg (s2, POINTER_TYPE))
9839 return NULL_TREE;
6de9cd9a
DN
9840 else
9841 {
6de9cd9a
DN
9842 tree fn;
9843 const char *p1, *p2;
9844
9845 p2 = c_getstr (s2);
9846 if (p2 == NULL)
5039610b 9847 return NULL_TREE;
6de9cd9a
DN
9848
9849 p1 = c_getstr (s1);
9850 if (p1 != NULL)
9851 {
9852 const char *r = strpbrk (p1, p2);
5fcfe0b2 9853 tree tem;
6de9cd9a
DN
9854
9855 if (r == NULL)
5212068f 9856 return build_int_cst (TREE_TYPE (s1), 0);
6de9cd9a
DN
9857
9858 /* Return an offset into the constant string argument. */
5d49b6a7 9859 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
db3927fb 9860 return fold_convert_loc (loc, type, tem);
6de9cd9a
DN
9861 }
9862
9863 if (p2[0] == '\0')
d6dc556b
RS
9864 /* strpbrk(x, "") == NULL.
9865 Evaluate and ignore s1 in case it had side-effects. */
a8ed1cbd 9866 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
6de9cd9a
DN
9867
9868 if (p2[1] != '\0')
5039610b 9869 return NULL_TREE; /* Really call strpbrk. */
6de9cd9a 9870
e79983f4 9871 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
6de9cd9a 9872 if (!fn)
5039610b 9873 return NULL_TREE;
6de9cd9a
DN
9874
9875 /* New argument list transforming strpbrk(s1, s2) to
9876 strchr(s1, s2[0]). */
45a2c477
RG
9877 return build_call_expr_loc (loc, fn, 2, s1,
9878 build_int_cst (integer_type_node, p2[0]));
6de9cd9a
DN
9879 }
9880}
9881
5039610b
SL
9882/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9883 to the call.
6de9cd9a 9884
5039610b 9885 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
9886 simplified form of the call as a tree.
9887
9888 The simplified form may be a constant or other expression which
9889 computes the same value, but in a more efficient manner (including
9890 calls to other builtin functions).
9891
9892 The call may contain arguments which need to be evaluated, but
9893 which are not useful to determine the result of the call. In
9894 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9895 COMPOUND_EXPR will be an argument which must be evaluated.
9896 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9897 COMPOUND_EXPR in the chain will contain the tree for the simplified
9898 form of the builtin function call. */
9899
9900static tree
db3927fb 9901fold_builtin_strspn (location_t loc, tree s1, tree s2)
6de9cd9a 9902{
5039610b
SL
9903 if (!validate_arg (s1, POINTER_TYPE)
9904 || !validate_arg (s2, POINTER_TYPE))
9905 return NULL_TREE;
6de9cd9a
DN
9906 else
9907 {
6de9cd9a
DN
9908 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9909
5039610b 9910 /* If either argument is "", return NULL_TREE. */
6de9cd9a 9911 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
08039bd8
RS
9912 /* Evaluate and ignore both arguments in case either one has
9913 side-effects. */
db3927fb 9914 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
08039bd8 9915 s1, s2);
5039610b 9916 return NULL_TREE;
6de9cd9a
DN
9917 }
9918}
9919
5039610b
SL
9920/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9921 to the call.
6de9cd9a 9922
5039610b 9923 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
9924 simplified form of the call as a tree.
9925
9926 The simplified form may be a constant or other expression which
9927 computes the same value, but in a more efficient manner (including
9928 calls to other builtin functions).
9929
9930 The call may contain arguments which need to be evaluated, but
9931 which are not useful to determine the result of the call. In
9932 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9933 COMPOUND_EXPR will be an argument which must be evaluated.
9934 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9935 COMPOUND_EXPR in the chain will contain the tree for the simplified
9936 form of the builtin function call. */
9937
9938static tree
db3927fb 9939fold_builtin_strcspn (location_t loc, tree s1, tree s2)
6de9cd9a 9940{
5039610b
SL
9941 if (!validate_arg (s1, POINTER_TYPE)
9942 || !validate_arg (s2, POINTER_TYPE))
9943 return NULL_TREE;
6de9cd9a
DN
9944 else
9945 {
5039610b 9946 /* If the first argument is "", return NULL_TREE. */
df838ef0 9947 const char *p1 = c_getstr (s1);
6de9cd9a
DN
9948 if (p1 && *p1 == '\0')
9949 {
9950 /* Evaluate and ignore argument s2 in case it has
9951 side-effects. */
db3927fb 9952 return omit_one_operand_loc (loc, size_type_node,
002bd9f0 9953 size_zero_node, s2);
6de9cd9a
DN
9954 }
9955
9956 /* If the second argument is "", return __builtin_strlen(s1). */
df838ef0 9957 const char *p2 = c_getstr (s2);
6de9cd9a
DN
9958 if (p2 && *p2 == '\0')
9959 {
e79983f4 9960 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
6de9cd9a
DN
9961
9962 /* If the replacement _DECL isn't initialized, don't do the
9963 transformation. */
9964 if (!fn)
5039610b 9965 return NULL_TREE;
6de9cd9a 9966
db3927fb 9967 return build_call_expr_loc (loc, fn, 1, s1);
6de9cd9a 9968 }
5039610b 9969 return NULL_TREE;
6de9cd9a
DN
9970 }
9971}
9972
5039610b 9973/* Fold the next_arg or va_start call EXP. Returns true if there was an error
2efcfa4e
AP
9974 produced. False otherwise. This is done so that we don't output the error
9975 or warning twice or three times. */
726a989a 9976
2efcfa4e 9977bool
5039610b 9978fold_builtin_next_arg (tree exp, bool va_start_p)
6de9cd9a
DN
9979{
9980 tree fntype = TREE_TYPE (current_function_decl);
5039610b
SL
9981 int nargs = call_expr_nargs (exp);
9982 tree arg;
34c88790
DS
9983 /* There is good chance the current input_location points inside the
9984 definition of the va_start macro (perhaps on the token for
9985 builtin) in a system header, so warnings will not be emitted.
9986 Use the location in real source code. */
9987 source_location current_location =
9988 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9989 NULL);
6de9cd9a 9990
f38958e8 9991 if (!stdarg_p (fntype))
2efcfa4e
AP
9992 {
9993 error ("%<va_start%> used in function with fixed args");
9994 return true;
9995 }
5039610b
SL
9996
9997 if (va_start_p)
8870e212 9998 {
5039610b
SL
9999 if (va_start_p && (nargs != 2))
10000 {
10001 error ("wrong number of arguments to function %<va_start%>");
10002 return true;
10003 }
10004 arg = CALL_EXPR_ARG (exp, 1);
8870e212
JJ
10005 }
10006 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10007 when we checked the arguments and if needed issued a warning. */
5039610b 10008 else
6de9cd9a 10009 {
5039610b
SL
10010 if (nargs == 0)
10011 {
10012 /* Evidently an out of date version of <stdarg.h>; can't validate
10013 va_start's second argument, but can still work as intended. */
34c88790 10014 warning_at (current_location,
b9c8da34
DS
10015 OPT_Wvarargs,
10016 "%<__builtin_next_arg%> called without an argument");
5039610b
SL
10017 return true;
10018 }
10019 else if (nargs > 1)
c22cacf3 10020 {
5039610b 10021 error ("wrong number of arguments to function %<__builtin_next_arg%>");
c22cacf3
MS
10022 return true;
10023 }
5039610b
SL
10024 arg = CALL_EXPR_ARG (exp, 0);
10025 }
10026
4e3825db
MM
10027 if (TREE_CODE (arg) == SSA_NAME)
10028 arg = SSA_NAME_VAR (arg);
10029
5039610b 10030 /* We destructively modify the call to be __builtin_va_start (ap, 0)
b8698a0f 10031 or __builtin_next_arg (0) the first time we see it, after checking
5039610b
SL
10032 the arguments and if needed issuing a warning. */
10033 if (!integer_zerop (arg))
10034 {
10035 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8870e212 10036
6de9cd9a
DN
10037 /* Strip off all nops for the sake of the comparison. This
10038 is not quite the same as STRIP_NOPS. It does more.
10039 We must also strip off INDIRECT_EXPR for C++ reference
10040 parameters. */
1043771b 10041 while (CONVERT_EXPR_P (arg)
6de9cd9a
DN
10042 || TREE_CODE (arg) == INDIRECT_REF)
10043 arg = TREE_OPERAND (arg, 0);
10044 if (arg != last_parm)
c22cacf3 10045 {
118f3b19
KH
10046 /* FIXME: Sometimes with the tree optimizers we can get the
10047 not the last argument even though the user used the last
10048 argument. We just warn and set the arg to be the last
10049 argument so that we will get wrong-code because of
10050 it. */
34c88790 10051 warning_at (current_location,
b9c8da34 10052 OPT_Wvarargs,
34c88790 10053 "second parameter of %<va_start%> not last named argument");
2efcfa4e 10054 }
2985f531
MLI
10055
10056 /* Undefined by C99 7.15.1.4p4 (va_start):
10057 "If the parameter parmN is declared with the register storage
10058 class, with a function or array type, or with a type that is
10059 not compatible with the type that results after application of
10060 the default argument promotions, the behavior is undefined."
10061 */
10062 else if (DECL_REGISTER (arg))
34c88790
DS
10063 {
10064 warning_at (current_location,
b9c8da34 10065 OPT_Wvarargs,
9c582551 10066 "undefined behavior when second parameter of "
34c88790
DS
10067 "%<va_start%> is declared with %<register%> storage");
10068 }
2985f531 10069
8870e212 10070 /* We want to verify the second parameter just once before the tree
c22cacf3
MS
10071 optimizers are run and then avoid keeping it in the tree,
10072 as otherwise we could warn even for correct code like:
10073 void foo (int i, ...)
10074 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
5039610b
SL
10075 if (va_start_p)
10076 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10077 else
10078 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
2efcfa4e
AP
10079 }
10080 return false;
6de9cd9a
DN
10081}
10082
10083
5039610b 10084/* Expand a call EXP to __builtin_object_size. */
10a0d495 10085
9b2b7279 10086static rtx
10a0d495
JJ
10087expand_builtin_object_size (tree exp)
10088{
10089 tree ost;
10090 int object_size_type;
10091 tree fndecl = get_callee_fndecl (exp);
10a0d495 10092
5039610b 10093 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10a0d495 10094 {
0f2c4a8f 10095 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
c94ed7a1 10096 exp, fndecl);
10a0d495
JJ
10097 expand_builtin_trap ();
10098 return const0_rtx;
10099 }
10100
5039610b 10101 ost = CALL_EXPR_ARG (exp, 1);
10a0d495
JJ
10102 STRIP_NOPS (ost);
10103
10104 if (TREE_CODE (ost) != INTEGER_CST
10105 || tree_int_cst_sgn (ost) < 0
10106 || compare_tree_int (ost, 3) > 0)
10107 {
0f2c4a8f 10108 error ("%Klast argument of %qD is not integer constant between 0 and 3",
c94ed7a1 10109 exp, fndecl);
10a0d495
JJ
10110 expand_builtin_trap ();
10111 return const0_rtx;
10112 }
10113
9439e9a1 10114 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
10115
10116 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10117}
10118
10119/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10120 FCODE is the BUILT_IN_* to use.
5039610b 10121 Return NULL_RTX if we failed; the caller should emit a normal call,
10a0d495
JJ
10122 otherwise try to get the result in TARGET, if convenient (and in
10123 mode MODE if that's convenient). */
10124
10125static rtx
ef4bddc2 10126expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10a0d495
JJ
10127 enum built_in_function fcode)
10128{
5039610b 10129 if (!validate_arglist (exp,
10a0d495
JJ
10130 POINTER_TYPE,
10131 fcode == BUILT_IN_MEMSET_CHK
10132 ? INTEGER_TYPE : POINTER_TYPE,
10133 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5039610b 10134 return NULL_RTX;
10a0d495 10135
cc8bea0a
MS
10136 tree dest = CALL_EXPR_ARG (exp, 0);
10137 tree src = CALL_EXPR_ARG (exp, 1);
10138 tree len = CALL_EXPR_ARG (exp, 2);
10139 tree size = CALL_EXPR_ARG (exp, 3);
10a0d495 10140
cc8bea0a
MS
10141 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10142 /*str=*/NULL_TREE, size);
ee92e7ba
MS
10143
10144 if (!tree_fits_uhwi_p (size))
5039610b 10145 return NULL_RTX;
10a0d495 10146
cc269bb6 10147 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10a0d495 10148 {
ee92e7ba
MS
10149 /* Avoid transforming the checking call to an ordinary one when
10150 an overflow has been detected or when the call couldn't be
10151 validated because the size is not constant. */
10152 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10153 return NULL_RTX;
10a0d495 10154
ee92e7ba 10155 tree fn = NULL_TREE;
10a0d495
JJ
10156 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10157 mem{cpy,pcpy,move,set} is available. */
10158 switch (fcode)
10159 {
10160 case BUILT_IN_MEMCPY_CHK:
e79983f4 10161 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10a0d495
JJ
10162 break;
10163 case BUILT_IN_MEMPCPY_CHK:
e79983f4 10164 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10a0d495
JJ
10165 break;
10166 case BUILT_IN_MEMMOVE_CHK:
e79983f4 10167 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10a0d495
JJ
10168 break;
10169 case BUILT_IN_MEMSET_CHK:
e79983f4 10170 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10a0d495
JJ
10171 break;
10172 default:
10173 break;
10174 }
10175
10176 if (! fn)
5039610b 10177 return NULL_RTX;
10a0d495 10178
aa493694 10179 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
44e10129
MM
10180 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10181 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
10182 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10183 }
10184 else if (fcode == BUILT_IN_MEMSET_CHK)
5039610b 10185 return NULL_RTX;
10a0d495
JJ
10186 else
10187 {
0eb77834 10188 unsigned int dest_align = get_pointer_alignment (dest);
10a0d495
JJ
10189
10190 /* If DEST is not a pointer type, call the normal function. */
10191 if (dest_align == 0)
5039610b 10192 return NULL_RTX;
10a0d495
JJ
10193
10194 /* If SRC and DEST are the same (and not volatile), do nothing. */
10195 if (operand_equal_p (src, dest, 0))
10196 {
10197 tree expr;
10198
10199 if (fcode != BUILT_IN_MEMPCPY_CHK)
10200 {
10201 /* Evaluate and ignore LEN in case it has side-effects. */
10202 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10203 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10204 }
10205
5d49b6a7 10206 expr = fold_build_pointer_plus (dest, len);
10a0d495
JJ
10207 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10208 }
10209
10210 /* __memmove_chk special case. */
10211 if (fcode == BUILT_IN_MEMMOVE_CHK)
10212 {
0eb77834 10213 unsigned int src_align = get_pointer_alignment (src);
10a0d495
JJ
10214
10215 if (src_align == 0)
5039610b 10216 return NULL_RTX;
10a0d495
JJ
10217
10218 /* If src is categorized for a readonly section we can use
10219 normal __memcpy_chk. */
10220 if (readonly_data_expr (src))
10221 {
e79983f4 10222 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10a0d495 10223 if (!fn)
5039610b 10224 return NULL_RTX;
aa493694
JJ
10225 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10226 dest, src, len, size);
44e10129
MM
10227 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10228 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
10229 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10230 }
10231 }
5039610b 10232 return NULL_RTX;
10a0d495
JJ
10233 }
10234}
10235
10236/* Emit warning if a buffer overflow is detected at compile time. */
10237
10238static void
10239maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10240{
ee92e7ba
MS
10241 /* The source string. */
10242 tree srcstr = NULL_TREE;
10243 /* The size of the destination object. */
10244 tree objsize = NULL_TREE;
10245 /* The string that is being concatenated with (as in __strcat_chk)
10246 or null if it isn't. */
10247 tree catstr = NULL_TREE;
10248 /* The maximum length of the source sequence in a bounded operation
10249 (such as __strncat_chk) or null if the operation isn't bounded
10250 (such as __strcat_chk). */
cc8bea0a 10251 tree maxread = NULL_TREE;
9c1caf50
MS
10252 /* The exact size of the access (such as in __strncpy_chk). */
10253 tree size = NULL_TREE;
10a0d495
JJ
10254
10255 switch (fcode)
10256 {
10257 case BUILT_IN_STRCPY_CHK:
10258 case BUILT_IN_STPCPY_CHK:
ee92e7ba
MS
10259 srcstr = CALL_EXPR_ARG (exp, 1);
10260 objsize = CALL_EXPR_ARG (exp, 2);
10261 break;
10262
10a0d495 10263 case BUILT_IN_STRCAT_CHK:
ee92e7ba
MS
10264 /* For __strcat_chk the warning will be emitted only if overflowing
10265 by at least strlen (dest) + 1 bytes. */
10266 catstr = CALL_EXPR_ARG (exp, 0);
10267 srcstr = CALL_EXPR_ARG (exp, 1);
10268 objsize = CALL_EXPR_ARG (exp, 2);
10a0d495 10269 break;
ee92e7ba 10270
1c2fc017 10271 case BUILT_IN_STRNCAT_CHK:
ee92e7ba
MS
10272 catstr = CALL_EXPR_ARG (exp, 0);
10273 srcstr = CALL_EXPR_ARG (exp, 1);
cc8bea0a 10274 maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
10275 objsize = CALL_EXPR_ARG (exp, 3);
10276 break;
10277
10a0d495 10278 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 10279 case BUILT_IN_STPNCPY_CHK:
ee92e7ba 10280 srcstr = CALL_EXPR_ARG (exp, 1);
9c1caf50 10281 size = CALL_EXPR_ARG (exp, 2);
ee92e7ba 10282 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495 10283 break;
ee92e7ba 10284
10a0d495
JJ
10285 case BUILT_IN_SNPRINTF_CHK:
10286 case BUILT_IN_VSNPRINTF_CHK:
cc8bea0a 10287 maxread = CALL_EXPR_ARG (exp, 1);
ee92e7ba 10288 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495
JJ
10289 break;
10290 default:
10291 gcc_unreachable ();
10292 }
10293
cc8bea0a 10294 if (catstr && maxread)
10a0d495 10295 {
ee92e7ba
MS
10296 /* Check __strncat_chk. There is no way to determine the length
10297 of the string to which the source string is being appended so
10298 just warn when the length of the source string is not known. */
d9c5a8b9
MS
10299 check_strncat_sizes (exp, objsize);
10300 return;
10a0d495 10301 }
10a0d495 10302
cc8bea0a
MS
10303 /* The destination argument is the first one for all built-ins above. */
10304 tree dst = CALL_EXPR_ARG (exp, 0);
10305
10306 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10a0d495
JJ
10307}
10308
10309/* Emit warning if a buffer overflow is detected at compile time
10310 in __sprintf_chk/__vsprintf_chk calls. */
10311
10312static void
10313maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10314{
451409e4 10315 tree size, len, fmt;
10a0d495 10316 const char *fmt_str;
5039610b 10317 int nargs = call_expr_nargs (exp);
10a0d495
JJ
10318
10319 /* Verify the required arguments in the original call. */
b8698a0f 10320
5039610b 10321 if (nargs < 4)
10a0d495 10322 return;
5039610b
SL
10323 size = CALL_EXPR_ARG (exp, 2);
10324 fmt = CALL_EXPR_ARG (exp, 3);
10a0d495 10325
cc269bb6 10326 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10a0d495
JJ
10327 return;
10328
10329 /* Check whether the format is a literal string constant. */
10330 fmt_str = c_getstr (fmt);
10331 if (fmt_str == NULL)
10332 return;
10333
62e5bf5d 10334 if (!init_target_chars ())
000ba23d
KG
10335 return;
10336
10a0d495 10337 /* If the format doesn't contain % args or %%, we know its size. */
000ba23d 10338 if (strchr (fmt_str, target_percent) == 0)
10a0d495
JJ
10339 len = build_int_cstu (size_type_node, strlen (fmt_str));
10340 /* If the format is "%s" and first ... argument is a string literal,
10341 we know it too. */
5039610b
SL
10342 else if (fcode == BUILT_IN_SPRINTF_CHK
10343 && strcmp (fmt_str, target_percent_s) == 0)
10a0d495
JJ
10344 {
10345 tree arg;
10346
5039610b 10347 if (nargs < 5)
10a0d495 10348 return;
5039610b 10349 arg = CALL_EXPR_ARG (exp, 4);
10a0d495
JJ
10350 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10351 return;
10352
10353 len = c_strlen (arg, 1);
cc269bb6 10354 if (!len || ! tree_fits_uhwi_p (len))
10a0d495
JJ
10355 return;
10356 }
10357 else
10358 return;
10359
ee92e7ba
MS
10360 /* Add one for the terminating nul. */
10361 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
cc8bea0a
MS
10362
10363 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10364 /*maxread=*/NULL_TREE, len, size);
10a0d495
JJ
10365}
10366
f9555f40
JJ
10367/* Emit warning if a free is called with address of a variable. */
10368
10369static void
10370maybe_emit_free_warning (tree exp)
10371{
10372 tree arg = CALL_EXPR_ARG (exp, 0);
10373
10374 STRIP_NOPS (arg);
10375 if (TREE_CODE (arg) != ADDR_EXPR)
10376 return;
10377
10378 arg = get_base_address (TREE_OPERAND (arg, 0));
70f34814 10379 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
f9555f40
JJ
10380 return;
10381
10382 if (SSA_VAR_P (arg))
a3a704a4
MH
10383 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10384 "%Kattempt to free a non-heap object %qD", exp, arg);
f9555f40 10385 else
a3a704a4
MH
10386 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10387 "%Kattempt to free a non-heap object", exp);
f9555f40
JJ
10388}
10389
5039610b
SL
10390/* Fold a call to __builtin_object_size with arguments PTR and OST,
10391 if possible. */
10a0d495 10392
9b2b7279 10393static tree
5039610b 10394fold_builtin_object_size (tree ptr, tree ost)
10a0d495 10395{
88e06841 10396 unsigned HOST_WIDE_INT bytes;
10a0d495
JJ
10397 int object_size_type;
10398
5039610b
SL
10399 if (!validate_arg (ptr, POINTER_TYPE)
10400 || !validate_arg (ost, INTEGER_TYPE))
10401 return NULL_TREE;
10a0d495 10402
10a0d495
JJ
10403 STRIP_NOPS (ost);
10404
10405 if (TREE_CODE (ost) != INTEGER_CST
10406 || tree_int_cst_sgn (ost) < 0
10407 || compare_tree_int (ost, 3) > 0)
5039610b 10408 return NULL_TREE;
10a0d495 10409
9439e9a1 10410 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
10411
10412 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10413 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10414 and (size_t) 0 for types 2 and 3. */
10415 if (TREE_SIDE_EFFECTS (ptr))
2ac7cbb5 10416 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10a0d495
JJ
10417
10418 if (TREE_CODE (ptr) == ADDR_EXPR)
88e06841 10419 {
05a64756 10420 compute_builtin_object_size (ptr, object_size_type, &bytes);
807e902e 10421 if (wi::fits_to_tree_p (bytes, size_type_node))
88e06841
AS
10422 return build_int_cstu (size_type_node, bytes);
10423 }
10a0d495
JJ
10424 else if (TREE_CODE (ptr) == SSA_NAME)
10425 {
10a0d495
JJ
10426 /* If object size is not known yet, delay folding until
10427 later. Maybe subsequent passes will help determining
10428 it. */
05a64756
MS
10429 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10430 && wi::fits_to_tree_p (bytes, size_type_node))
88e06841 10431 return build_int_cstu (size_type_node, bytes);
10a0d495
JJ
10432 }
10433
88e06841 10434 return NULL_TREE;
10a0d495
JJ
10435}
10436
903c723b
TC
10437/* Builtins with folding operations that operate on "..." arguments
10438 need special handling; we need to store the arguments in a convenient
10439 data structure before attempting any folding. Fortunately there are
10440 only a few builtins that fall into this category. FNDECL is the
10441 function, EXP is the CALL_EXPR for the call. */
10442
10443static tree
10444fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10445{
10446 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10447 tree ret = NULL_TREE;
10448
10449 switch (fcode)
10450 {
10451 case BUILT_IN_FPCLASSIFY:
10452 ret = fold_builtin_fpclassify (loc, args, nargs);
10453 break;
10454
10455 default:
10456 break;
10457 }
10458 if (ret)
10459 {
10460 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10461 SET_EXPR_LOCATION (ret, loc);
10462 TREE_NO_WARNING (ret) = 1;
10463 return ret;
10464 }
10465 return NULL_TREE;
10466}
10467
000ba23d
KG
10468/* Initialize format string characters in the target charset. */
10469
fef5a0d9 10470bool
000ba23d
KG
10471init_target_chars (void)
10472{
10473 static bool init;
10474 if (!init)
10475 {
10476 target_newline = lang_hooks.to_target_charset ('\n');
10477 target_percent = lang_hooks.to_target_charset ('%');
10478 target_c = lang_hooks.to_target_charset ('c');
10479 target_s = lang_hooks.to_target_charset ('s');
10480 if (target_newline == 0 || target_percent == 0 || target_c == 0
10481 || target_s == 0)
10482 return false;
10483
10484 target_percent_c[0] = target_percent;
10485 target_percent_c[1] = target_c;
10486 target_percent_c[2] = '\0';
10487
10488 target_percent_s[0] = target_percent;
10489 target_percent_s[1] = target_s;
10490 target_percent_s[2] = '\0';
10491
10492 target_percent_s_newline[0] = target_percent;
10493 target_percent_s_newline[1] = target_s;
10494 target_percent_s_newline[2] = target_newline;
10495 target_percent_s_newline[3] = '\0';
c22cacf3 10496
000ba23d
KG
10497 init = true;
10498 }
10499 return true;
10500}
1f3f1f68 10501
4413d881
KG
10502/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10503 and no overflow/underflow occurred. INEXACT is true if M was not
2f8e468b 10504 exactly calculated. TYPE is the tree type for the result. This
4413d881
KG
10505 function assumes that you cleared the MPFR flags and then
10506 calculated M to see if anything subsequently set a flag prior to
10507 entering this function. Return NULL_TREE if any checks fail. */
10508
10509static tree
62e5bf5d 10510do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
4413d881
KG
10511{
10512 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10513 overflow/underflow occurred. If -frounding-math, proceed iff the
10514 result of calling FUNC was exact. */
62e5bf5d 10515 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
4413d881
KG
10516 && (!flag_rounding_math || !inexact))
10517 {
10518 REAL_VALUE_TYPE rr;
10519
205a4d09 10520 real_from_mpfr (&rr, m, type, GMP_RNDN);
4413d881
KG
10521 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10522 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10523 but the mpft_t is not, then we underflowed in the
10524 conversion. */
4c8c70e0 10525 if (real_isfinite (&rr)
4413d881
KG
10526 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10527 {
10528 REAL_VALUE_TYPE rmode;
10529
10530 real_convert (&rmode, TYPE_MODE (type), &rr);
10531 /* Proceed iff the specified mode can hold the value. */
10532 if (real_identical (&rmode, &rr))
10533 return build_real (type, rmode);
10534 }
10535 }
10536 return NULL_TREE;
10537}
10538
c128599a
KG
10539/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10540 number and no overflow/underflow occurred. INEXACT is true if M
10541 was not exactly calculated. TYPE is the tree type for the result.
10542 This function assumes that you cleared the MPFR flags and then
10543 calculated M to see if anything subsequently set a flag prior to
ca75b926
KG
10544 entering this function. Return NULL_TREE if any checks fail, if
10545 FORCE_CONVERT is true, then bypass the checks. */
c128599a
KG
10546
10547static tree
ca75b926 10548do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
c128599a
KG
10549{
10550 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10551 overflow/underflow occurred. If -frounding-math, proceed iff the
10552 result of calling FUNC was exact. */
ca75b926
KG
10553 if (force_convert
10554 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10555 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10556 && (!flag_rounding_math || !inexact)))
c128599a
KG
10557 {
10558 REAL_VALUE_TYPE re, im;
10559
14aa6352
DE
10560 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10561 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
c128599a
KG
10562 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10563 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10564 but the mpft_t is not, then we underflowed in the
10565 conversion. */
ca75b926
KG
10566 if (force_convert
10567 || (real_isfinite (&re) && real_isfinite (&im)
10568 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10569 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
c128599a
KG
10570 {
10571 REAL_VALUE_TYPE re_mode, im_mode;
10572
10573 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10574 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10575 /* Proceed iff the specified mode can hold the value. */
ca75b926
KG
10576 if (force_convert
10577 || (real_identical (&re_mode, &re)
10578 && real_identical (&im_mode, &im)))
c128599a
KG
10579 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10580 build_real (TREE_TYPE (type), im_mode));
10581 }
10582 }
10583 return NULL_TREE;
10584}
c128599a 10585
ea91f957
KG
10586/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10587 the pointer *(ARG_QUO) and return the result. The type is taken
10588 from the type of ARG0 and is used for setting the precision of the
10589 calculation and results. */
10590
10591static tree
10592do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10593{
10594 tree const type = TREE_TYPE (arg0);
10595 tree result = NULL_TREE;
b8698a0f 10596
ea91f957
KG
10597 STRIP_NOPS (arg0);
10598 STRIP_NOPS (arg1);
b8698a0f 10599
ea91f957
KG
10600 /* To proceed, MPFR must exactly represent the target floating point
10601 format, which only happens when the target base equals two. */
10602 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10603 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10604 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10605 {
10606 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10607 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10608
4c8c70e0 10609 if (real_isfinite (ra0) && real_isfinite (ra1))
ea91f957 10610 {
3e479de3
UW
10611 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10612 const int prec = fmt->p;
10613 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
ea91f957
KG
10614 tree result_rem;
10615 long integer_quo;
10616 mpfr_t m0, m1;
10617
10618 mpfr_inits2 (prec, m0, m1, NULL);
10619 mpfr_from_real (m0, ra0, GMP_RNDN);
10620 mpfr_from_real (m1, ra1, GMP_RNDN);
10621 mpfr_clear_flags ();
3e479de3 10622 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
ea91f957
KG
10623 /* Remquo is independent of the rounding mode, so pass
10624 inexact=0 to do_mpfr_ckconv(). */
10625 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10626 mpfr_clears (m0, m1, NULL);
10627 if (result_rem)
10628 {
10629 /* MPFR calculates quo in the host's long so it may
10630 return more bits in quo than the target int can hold
10631 if sizeof(host long) > sizeof(target int). This can
10632 happen even for native compilers in LP64 mode. In
10633 these cases, modulo the quo value with the largest
10634 number that the target int can hold while leaving one
10635 bit for the sign. */
10636 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10637 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10638
10639 /* Dereference the quo pointer argument. */
10640 arg_quo = build_fold_indirect_ref (arg_quo);
10641 /* Proceed iff a valid pointer type was passed in. */
10642 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10643 {
10644 /* Set the value. */
45a2c477
RG
10645 tree result_quo
10646 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10647 build_int_cst (TREE_TYPE (arg_quo),
10648 integer_quo));
ea91f957
KG
10649 TREE_SIDE_EFFECTS (result_quo) = 1;
10650 /* Combine the quo assignment with the rem. */
10651 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10652 result_quo, result_rem));
10653 }
10654 }
10655 }
10656 }
10657 return result;
10658}
752b7d38
KG
10659
10660/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10661 resulting value as a tree with type TYPE. The mpfr precision is
10662 set to the precision of TYPE. We assume that this mpfr function
10663 returns zero if the result could be calculated exactly within the
10664 requested precision. In addition, the integer pointer represented
10665 by ARG_SG will be dereferenced and set to the appropriate signgam
10666 (-1,1) value. */
10667
10668static tree
10669do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10670{
10671 tree result = NULL_TREE;
10672
10673 STRIP_NOPS (arg);
b8698a0f 10674
752b7d38
KG
10675 /* To proceed, MPFR must exactly represent the target floating point
10676 format, which only happens when the target base equals two. Also
10677 verify ARG is a constant and that ARG_SG is an int pointer. */
10678 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10679 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10680 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10681 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10682 {
10683 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10684
10685 /* In addition to NaN and Inf, the argument cannot be zero or a
10686 negative integer. */
4c8c70e0 10687 if (real_isfinite (ra)
752b7d38 10688 && ra->cl != rvc_zero
c3284718 10689 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
752b7d38 10690 {
3e479de3
UW
10691 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10692 const int prec = fmt->p;
10693 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
752b7d38
KG
10694 int inexact, sg;
10695 mpfr_t m;
10696 tree result_lg;
10697
10698 mpfr_init2 (m, prec);
10699 mpfr_from_real (m, ra, GMP_RNDN);
10700 mpfr_clear_flags ();
3e479de3 10701 inexact = mpfr_lgamma (m, &sg, m, rnd);
752b7d38
KG
10702 result_lg = do_mpfr_ckconv (m, type, inexact);
10703 mpfr_clear (m);
10704 if (result_lg)
10705 {
10706 tree result_sg;
10707
10708 /* Dereference the arg_sg pointer argument. */
10709 arg_sg = build_fold_indirect_ref (arg_sg);
10710 /* Assign the signgam value into *arg_sg. */
10711 result_sg = fold_build2 (MODIFY_EXPR,
10712 TREE_TYPE (arg_sg), arg_sg,
45a2c477 10713 build_int_cst (TREE_TYPE (arg_sg), sg));
752b7d38
KG
10714 TREE_SIDE_EFFECTS (result_sg) = 1;
10715 /* Combine the signgam assignment with the lgamma result. */
10716 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10717 result_sg, result_lg));
10718 }
10719 }
10720 }
10721
10722 return result;
10723}
726a989a 10724
a41d064d
KG
10725/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10726 mpc function FUNC on it and return the resulting value as a tree
10727 with type TYPE. The mpfr precision is set to the precision of
10728 TYPE. We assume that function FUNC returns zero if the result
ca75b926
KG
10729 could be calculated exactly within the requested precision. If
10730 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10731 in the arguments and/or results. */
a41d064d 10732
2f440f6a 10733tree
ca75b926 10734do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
a41d064d
KG
10735 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10736{
10737 tree result = NULL_TREE;
b8698a0f 10738
a41d064d
KG
10739 STRIP_NOPS (arg0);
10740 STRIP_NOPS (arg1);
10741
10742 /* To proceed, MPFR must exactly represent the target floating point
10743 format, which only happens when the target base equals two. */
10744 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10745 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10746 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10747 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10748 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10749 {
10750 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10751 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10752 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10753 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10754
ca75b926
KG
10755 if (do_nonfinite
10756 || (real_isfinite (re0) && real_isfinite (im0)
10757 && real_isfinite (re1) && real_isfinite (im1)))
a41d064d
KG
10758 {
10759 const struct real_format *const fmt =
10760 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10761 const int prec = fmt->p;
10762 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10763 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10764 int inexact;
10765 mpc_t m0, m1;
b8698a0f 10766
a41d064d
KG
10767 mpc_init2 (m0, prec);
10768 mpc_init2 (m1, prec);
c3284718
RS
10769 mpfr_from_real (mpc_realref (m0), re0, rnd);
10770 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10771 mpfr_from_real (mpc_realref (m1), re1, rnd);
10772 mpfr_from_real (mpc_imagref (m1), im1, rnd);
a41d064d
KG
10773 mpfr_clear_flags ();
10774 inexact = func (m0, m0, m1, crnd);
ca75b926 10775 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
a41d064d
KG
10776 mpc_clear (m0);
10777 mpc_clear (m1);
10778 }
10779 }
10780
10781 return result;
10782}
c128599a 10783
726a989a
RB
10784/* A wrapper function for builtin folding that prevents warnings for
10785 "statement without effect" and the like, caused by removing the
10786 call node earlier than the warning is generated. */
10787
10788tree
538dd0b7 10789fold_call_stmt (gcall *stmt, bool ignore)
726a989a
RB
10790{
10791 tree ret = NULL_TREE;
10792 tree fndecl = gimple_call_fndecl (stmt);
db3927fb 10793 location_t loc = gimple_location (stmt);
3d78e008 10794 if (fndecl && fndecl_built_in_p (fndecl)
726a989a
RB
10795 && !gimple_call_va_arg_pack_p (stmt))
10796 {
10797 int nargs = gimple_call_num_args (stmt);
8897c9ce
NF
10798 tree *args = (nargs > 0
10799 ? gimple_call_arg_ptr (stmt, 0)
10800 : &error_mark_node);
726a989a 10801
0889e9bc
JJ
10802 if (avoid_folding_inline_builtin (fndecl))
10803 return NULL_TREE;
726a989a
RB
10804 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10805 {
8897c9ce 10806 return targetm.fold_builtin (fndecl, nargs, args, ignore);
726a989a
RB
10807 }
10808 else
10809 {
a6a0570f 10810 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
726a989a
RB
10811 if (ret)
10812 {
10813 /* Propagate location information from original call to
10814 expansion of builtin. Otherwise things like
10815 maybe_emit_chk_warning, that operate on the expansion
10816 of a builtin, will use the wrong location information. */
10817 if (gimple_has_location (stmt))
10818 {
10819 tree realret = ret;
10820 if (TREE_CODE (ret) == NOP_EXPR)
10821 realret = TREE_OPERAND (ret, 0);
10822 if (CAN_HAVE_LOCATION_P (realret)
10823 && !EXPR_HAS_LOCATION (realret))
db3927fb 10824 SET_EXPR_LOCATION (realret, loc);
726a989a
RB
10825 return realret;
10826 }
10827 return ret;
10828 }
10829 }
10830 }
10831 return NULL_TREE;
10832}
d7f09764 10833
e79983f4 10834/* Look up the function in builtin_decl that corresponds to DECL
d7f09764
DN
10835 and set ASMSPEC as its user assembler name. DECL must be a
10836 function decl that declares a builtin. */
10837
10838void
10839set_builtin_user_assembler_name (tree decl, const char *asmspec)
10840{
3d78e008 10841 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
d7f09764
DN
10842 && asmspec != 0);
10843
ee516de9 10844 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
ce835863 10845 set_user_assembler_name (builtin, asmspec);
ee516de9
EB
10846
10847 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10848 && INT_TYPE_SIZE < BITS_PER_WORD)
d7f09764 10849 {
fffbab82 10850 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
ee516de9 10851 set_user_assembler_libfunc ("ffs", asmspec);
fffbab82 10852 set_optab_libfunc (ffs_optab, mode, "ffs");
d7f09764
DN
10853 }
10854}
bec922f0
SL
10855
10856/* Return true if DECL is a builtin that expands to a constant or similarly
10857 simple code. */
10858bool
10859is_simple_builtin (tree decl)
10860{
3d78e008 10861 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
bec922f0
SL
10862 switch (DECL_FUNCTION_CODE (decl))
10863 {
10864 /* Builtins that expand to constants. */
10865 case BUILT_IN_CONSTANT_P:
10866 case BUILT_IN_EXPECT:
10867 case BUILT_IN_OBJECT_SIZE:
10868 case BUILT_IN_UNREACHABLE:
10869 /* Simple register moves or loads from stack. */
45d439ac 10870 case BUILT_IN_ASSUME_ALIGNED:
bec922f0
SL
10871 case BUILT_IN_RETURN_ADDRESS:
10872 case BUILT_IN_EXTRACT_RETURN_ADDR:
10873 case BUILT_IN_FROB_RETURN_ADDR:
10874 case BUILT_IN_RETURN:
10875 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10876 case BUILT_IN_FRAME_ADDRESS:
10877 case BUILT_IN_VA_END:
10878 case BUILT_IN_STACK_SAVE:
10879 case BUILT_IN_STACK_RESTORE:
10880 /* Exception state returns or moves registers around. */
10881 case BUILT_IN_EH_FILTER:
10882 case BUILT_IN_EH_POINTER:
10883 case BUILT_IN_EH_COPY_VALUES:
10884 return true;
10885
10886 default:
10887 return false;
10888 }
10889
10890 return false;
10891}
10892
10893/* Return true if DECL is a builtin that is not expensive, i.e., they are
10894 most probably expanded inline into reasonably simple code. This is a
10895 superset of is_simple_builtin. */
10896bool
10897is_inexpensive_builtin (tree decl)
10898{
10899 if (!decl)
10900 return false;
10901 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10902 return true;
10903 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10904 switch (DECL_FUNCTION_CODE (decl))
10905 {
10906 case BUILT_IN_ABS:
9e878cf1 10907 CASE_BUILT_IN_ALLOCA:
ac868f29 10908 case BUILT_IN_BSWAP16:
bec922f0
SL
10909 case BUILT_IN_BSWAP32:
10910 case BUILT_IN_BSWAP64:
10911 case BUILT_IN_CLZ:
10912 case BUILT_IN_CLZIMAX:
10913 case BUILT_IN_CLZL:
10914 case BUILT_IN_CLZLL:
10915 case BUILT_IN_CTZ:
10916 case BUILT_IN_CTZIMAX:
10917 case BUILT_IN_CTZL:
10918 case BUILT_IN_CTZLL:
10919 case BUILT_IN_FFS:
10920 case BUILT_IN_FFSIMAX:
10921 case BUILT_IN_FFSL:
10922 case BUILT_IN_FFSLL:
10923 case BUILT_IN_IMAXABS:
10924 case BUILT_IN_FINITE:
10925 case BUILT_IN_FINITEF:
10926 case BUILT_IN_FINITEL:
10927 case BUILT_IN_FINITED32:
10928 case BUILT_IN_FINITED64:
10929 case BUILT_IN_FINITED128:
10930 case BUILT_IN_FPCLASSIFY:
10931 case BUILT_IN_ISFINITE:
10932 case BUILT_IN_ISINF_SIGN:
10933 case BUILT_IN_ISINF:
10934 case BUILT_IN_ISINFF:
10935 case BUILT_IN_ISINFL:
10936 case BUILT_IN_ISINFD32:
10937 case BUILT_IN_ISINFD64:
10938 case BUILT_IN_ISINFD128:
10939 case BUILT_IN_ISNAN:
10940 case BUILT_IN_ISNANF:
10941 case BUILT_IN_ISNANL:
10942 case BUILT_IN_ISNAND32:
10943 case BUILT_IN_ISNAND64:
10944 case BUILT_IN_ISNAND128:
10945 case BUILT_IN_ISNORMAL:
10946 case BUILT_IN_ISGREATER:
10947 case BUILT_IN_ISGREATEREQUAL:
10948 case BUILT_IN_ISLESS:
10949 case BUILT_IN_ISLESSEQUAL:
10950 case BUILT_IN_ISLESSGREATER:
10951 case BUILT_IN_ISUNORDERED:
10952 case BUILT_IN_VA_ARG_PACK:
10953 case BUILT_IN_VA_ARG_PACK_LEN:
10954 case BUILT_IN_VA_COPY:
10955 case BUILT_IN_TRAP:
10956 case BUILT_IN_SAVEREGS:
10957 case BUILT_IN_POPCOUNTL:
10958 case BUILT_IN_POPCOUNTLL:
10959 case BUILT_IN_POPCOUNTIMAX:
10960 case BUILT_IN_POPCOUNT:
10961 case BUILT_IN_PARITYL:
10962 case BUILT_IN_PARITYLL:
10963 case BUILT_IN_PARITYIMAX:
10964 case BUILT_IN_PARITY:
10965 case BUILT_IN_LABS:
10966 case BUILT_IN_LLABS:
10967 case BUILT_IN_PREFETCH:
41dbbb37 10968 case BUILT_IN_ACC_ON_DEVICE:
bec922f0
SL
10969 return true;
10970
10971 default:
10972 return is_simple_builtin (decl);
10973 }
10974
10975 return false;
10976}
488c6247
ML
10977
10978/* Return true if T is a constant and the value cast to a target char
10979 can be represented by a host char.
10980 Store the casted char constant in *P if so. */
10981
10982bool
10983target_char_cst_p (tree t, char *p)
10984{
10985 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10986 return false;
10987
10988 *p = (char)tree_to_uhwi (t);
10989 return true;
10990}
cc8bea0a
MS
10991
10992/* Return the maximum object size. */
10993
10994tree
10995max_object_size (void)
10996{
10997 /* To do: Make this a configurable parameter. */
10998 return TYPE_MAX_VALUE (ptrdiff_type_node);
10999}