]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
Update copyright years.
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
28f4ec01 1/* Expand builtin functions.
99dee823 2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
28f4ec01 3
1322177d 4This file is part of GCC.
28f4ec01 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
28f4ec01 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
28f4ec01
BS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
28f4ec01 19
25ab3b0a
RB
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
28f4ec01
BS
24#include "config.h"
25#include "system.h"
4977bab6 26#include "coretypes.h"
c7131fb2 27#include "backend.h"
957060b5
AM
28#include "target.h"
29#include "rtl.h"
c7131fb2 30#include "tree.h"
e73cf9a2 31#include "memmodel.h"
c7131fb2 32#include "gimple.h"
957060b5
AM
33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
f90aa46c 36#include "tree-vrp.h"
957060b5
AM
37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
957060b5
AM
40#include "emit-rtl.h"
41#include "recog.h"
957060b5 42#include "diagnostic-core.h"
40e23961 43#include "alias.h"
40e23961 44#include "fold-const.h"
5c1a2e63 45#include "fold-const-call.h"
cc8bea0a 46#include "gimple-ssa-warn-restrict.h"
d8a2d370
DN
47#include "stor-layout.h"
48#include "calls.h"
49#include "varasm.h"
50#include "tree-object-size.h"
ef29b12c 51#include "tree-ssa-strlen.h"
d49b6e1e 52#include "realmpfr.h"
60393bbc 53#include "cfgrtl.h"
28f4ec01 54#include "except.h"
36566b39
PK
55#include "dojump.h"
56#include "explow.h"
36566b39 57#include "stmt.h"
28f4ec01 58#include "expr.h"
e78d8e51 59#include "libfuncs.h"
28f4ec01
BS
60#include "output.h"
61#include "typeclass.h"
ab393bf1 62#include "langhooks.h"
079a182e 63#include "value-prof.h"
fa19795e 64#include "builtins.h"
314e6352
ML
65#include "stringpool.h"
66#include "attribs.h"
bdea98ca 67#include "asan.h"
686ee971 68#include "internal-fn.h"
b03ff92e 69#include "case-cfn-macros.h"
44a845ca 70#include "gimple-fold.h"
ee92e7ba 71#include "intl.h"
7365279f 72#include "file-prefix-map.h" /* remap_macro_filename() */
1f62d637
TV
73#include "gomp-constants.h"
74#include "omp-general.h"
464969eb 75#include "tree-dfa.h"
dce6c58d 76#include "gimple-iterator.h"
410675cb
JJ
77#include "gimple-ssa.h"
78#include "tree-ssa-live.h"
79#include "tree-outof-ssa.h"
4f8cfb42 80#include "attr-fnspec.h"
81f5094d 81
fa19795e
RS
82struct target_builtins default_target_builtins;
83#if SWITCHABLE_TARGET
84struct target_builtins *this_target_builtins = &default_target_builtins;
85#endif
86
9df2c88c 87/* Define the names of the builtin function types and codes. */
5e351e96 88const char *const built_in_class_names[BUILT_IN_LAST]
9df2c88c
RK
89 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
90
c6a912da 91#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
7e7e470f 92const char * built_in_names[(int) END_BUILTINS] =
cb1072f4
KG
93{
94#include "builtins.def"
95};
9df2c88c 96
cbf5d0e7 97/* Setup an array of builtin_info_type, make sure each element decl is
3ff5f682 98 initialized to NULL_TREE. */
cbf5d0e7 99builtin_info_type builtin_info[(int)END_BUILTINS];
3ff5f682 100
4e7d7b3d
JJ
101/* Non-zero if __builtin_constant_p should be folded right away. */
102bool force_folding_builtin_constant_p;
103
4682ae04 104static int target_char_cast (tree, char *);
435bb2a1 105static rtx get_memory_rtx (tree, tree);
4682ae04
AJ
106static int apply_args_size (void);
107static int apply_result_size (void);
4682ae04 108static rtx result_vector (int, rtx);
4682ae04
AJ
109static void expand_builtin_prefetch (tree);
110static rtx expand_builtin_apply_args (void);
111static rtx expand_builtin_apply_args_1 (void);
112static rtx expand_builtin_apply (rtx, rtx, rtx);
113static void expand_builtin_return (rtx);
114static enum type_class type_to_class (tree);
115static rtx expand_builtin_classify_type (tree);
6c7cf1f0 116static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
1b1562a5 117static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
4359dc2a 118static rtx expand_builtin_interclass_mathfn (tree, rtx);
403e54f0 119static rtx expand_builtin_sincos (tree);
4359dc2a 120static rtx expand_builtin_cexpi (tree, rtx);
1856c8dc
JH
121static rtx expand_builtin_int_roundingfn (tree, rtx);
122static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
8870e212 123static rtx expand_builtin_next_arg (void);
4682ae04
AJ
124static rtx expand_builtin_va_start (tree);
125static rtx expand_builtin_va_end (tree);
126static rtx expand_builtin_va_copy (tree);
d5803b98 127static rtx inline_expand_builtin_bytecmp (tree, rtx);
44e10129 128static rtx expand_builtin_strcmp (tree, rtx);
ef4bddc2 129static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
095a2d76 130static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
d9c5a8b9 131static rtx expand_builtin_memchr (tree, rtx);
44e10129 132static rtx expand_builtin_memcpy (tree, rtx);
671a00ee 133static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
2ff5ffb6 134 rtx target, tree exp,
03a9b90a
AS
135 memop_ret retmode,
136 bool might_overlap);
e50d56a5 137static rtx expand_builtin_memmove (tree, rtx);
671a00ee 138static rtx expand_builtin_mempcpy (tree, rtx);
2ff5ffb6 139static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
b5338fb3 140static rtx expand_builtin_strcat (tree);
44e10129 141static rtx expand_builtin_strcpy (tree, rtx);
e08341bb 142static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
ef4bddc2 143static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
e50d56a5 144static rtx expand_builtin_stpncpy (tree, rtx);
ee92e7ba 145static rtx expand_builtin_strncat (tree, rtx);
44e10129 146static rtx expand_builtin_strncpy (tree, rtx);
095a2d76 147static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
ef4bddc2
RS
148static rtx expand_builtin_memset (tree, rtx, machine_mode);
149static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
4682ae04 150static rtx expand_builtin_bzero (tree);
ef4bddc2 151static rtx expand_builtin_strlen (tree, rtx, machine_mode);
781ff3d8 152static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
b7e52782 153static rtx expand_builtin_alloca (tree);
ef4bddc2 154static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
4682ae04 155static rtx expand_builtin_frame_address (tree, tree);
db3927fb 156static tree stabilize_va_list_loc (location_t, tree, int);
4682ae04 157static rtx expand_builtin_expect (tree, rtx);
1e9168b2 158static rtx expand_builtin_expect_with_probability (tree, rtx);
4682ae04
AJ
159static tree fold_builtin_constant_p (tree);
160static tree fold_builtin_classify_type (tree);
d14c547a 161static tree fold_builtin_strlen (location_t, tree, tree, tree);
db3927fb 162static tree fold_builtin_inf (location_t, tree, int);
db3927fb 163static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
0dba7960 164static bool validate_arg (const_tree, enum tree_code code);
4682ae04 165static rtx expand_builtin_fabs (tree, rtx, rtx);
ef79730c 166static rtx expand_builtin_signbit (tree, rtx);
db3927fb 167static tree fold_builtin_memcmp (location_t, tree, tree, tree);
db3927fb
AH
168static tree fold_builtin_isascii (location_t, tree);
169static tree fold_builtin_toascii (location_t, tree);
170static tree fold_builtin_isdigit (location_t, tree);
171static tree fold_builtin_fabs (location_t, tree, tree);
172static tree fold_builtin_abs (location_t, tree, tree);
173static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
a35da91f 174 enum tree_code);
903c723b 175static tree fold_builtin_varargs (location_t, tree, tree*, int);
db3927fb 176
b5338fb3
MS
177static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
178static tree fold_builtin_strspn (location_t, tree, tree, tree);
179static tree fold_builtin_strcspn (location_t, tree, tree, tree);
6de9cd9a 180
10a0d495 181static rtx expand_builtin_object_size (tree);
ef4bddc2 182static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
10a0d495
JJ
183 enum built_in_function);
184static void maybe_emit_chk_warning (tree, enum built_in_function);
185static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
5039610b 186static tree fold_builtin_object_size (tree, tree);
d14c547a 187static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
d02c41dd
MS
188static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
189 pointer_query *);
000ba23d 190
ad03a744 191unsigned HOST_WIDE_INT target_newline;
fef5a0d9 192unsigned HOST_WIDE_INT target_percent;
000ba23d
KG
193static unsigned HOST_WIDE_INT target_c;
194static unsigned HOST_WIDE_INT target_s;
edd7ae68 195char target_percent_c[3];
fef5a0d9 196char target_percent_s[3];
ad03a744 197char target_percent_s_newline[4];
ea91f957 198static tree do_mpfr_remquo (tree, tree, tree);
752b7d38 199static tree do_mpfr_lgamma_r (tree, tree, tree);
86951993 200static void expand_builtin_sync_synchronize (void);
10a0d495 201
d14c547a
MS
202access_ref::access_ref (tree bound /* = NULL_TREE */,
203 bool minaccess /* = false */)
dce6c58d
MS
204: ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
205 base0 (true), parmarray ()
d14c547a
MS
206{
207 /* Set to valid. */
208 offrng[0] = offrng[1] = 0;
209 /* Invalidate. */
210 sizrng[0] = sizrng[1] = -1;
211
212 /* Set the default bounds of the access and adjust below. */
213 bndrng[0] = minaccess ? 1 : 0;
214 bndrng[1] = HOST_WIDE_INT_M1U;
215
216 /* When BOUND is nonnull and a range can be extracted from it,
217 set the bounds of the access to reflect both it and MINACCESS.
218 BNDRNG[0] is the size of the minimum access. */
6ccadc4c 219 tree rng[2];
83685efd 220 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
6ccadc4c
MS
221 {
222 bndrng[0] = wi::to_offset (rng[0]);
223 bndrng[1] = wi::to_offset (rng[1]);
224 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
225 }
d14c547a
MS
226}
227
eafe8ee7
MS
228/* Return the PHI node REF refers to or null if it doesn't. */
229
230gphi *
231access_ref::phi () const
232{
233 if (!ref || TREE_CODE (ref) != SSA_NAME)
234 return NULL;
235
236 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
237 if (gimple_code (def_stmt) != GIMPLE_PHI)
238 return NULL;
239
240 return as_a <gphi *> (def_stmt);
241}
242
243/* Determine and return the largest object to which *THIS. If *THIS
244 refers to a PHI and PREF is nonnull, fill *PREF with the details
245 of the object determined by compute_objsize(ARG, OSTYPE) for each
246 PHI argument ARG. */
247
248tree
249access_ref::get_ref (vec<access_ref> *all_refs,
250 access_ref *pref /* = NULL */,
251 int ostype /* = 1 */,
252 ssa_name_limit_t *psnlim /* = NULL */,
d02c41dd 253 pointer_query *qry /* = NULL */) const
eafe8ee7
MS
254{
255 gphi *phi_stmt = this->phi ();
256 if (!phi_stmt)
257 return ref;
258
259 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
260 cause unbounded recursion. */
261 ssa_name_limit_t snlim_buf;
262 if (!psnlim)
263 psnlim = &snlim_buf;
264
265 if (!psnlim->visit_phi (ref))
266 return NULL_TREE;
267
268 /* Reflects the range of offsets of all PHI arguments refer to the same
269 object (i.e., have the same REF). */
270 access_ref same_ref;
271 /* The conservative result of the PHI reflecting the offset and size
272 of the largest PHI argument, regardless of whether or not they all
273 refer to the same object. */
d02c41dd
MS
274 pointer_query empty_qry;
275 if (!qry)
276 qry = &empty_qry;
277
eafe8ee7
MS
278 access_ref phi_ref;
279 if (pref)
280 {
281 phi_ref = *pref;
282 same_ref = *pref;
283 }
284
285 /* Set if any argument is a function array (or VLA) parameter not
286 declared [static]. */
287 bool parmarray = false;
288 /* The size of the smallest object referenced by the PHI arguments. */
289 offset_int minsize = 0;
290 const offset_int maxobjsize = wi::to_offset (max_object_size ());
291 /* The offset of the PHI, not reflecting those of its arguments. */
292 const offset_int orng[2] = { phi_ref.offrng[0], phi_ref.offrng[1] };
293
294 const unsigned nargs = gimple_phi_num_args (phi_stmt);
295 for (unsigned i = 0; i < nargs; ++i)
296 {
297 access_ref phi_arg_ref;
298 tree arg = gimple_phi_arg_def (phi_stmt, i);
d02c41dd 299 if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
eafe8ee7
MS
300 || phi_arg_ref.sizrng[0] < 0)
301 /* A PHI with all null pointer arguments. */
302 return NULL_TREE;
303
304 /* Add PREF's offset to that of the argument. */
305 phi_arg_ref.add_offset (orng[0], orng[1]);
d02c41dd
MS
306 if (TREE_CODE (arg) == SSA_NAME)
307 qry->put_ref (arg, phi_arg_ref);
eafe8ee7
MS
308
309 if (all_refs)
310 all_refs->safe_push (phi_arg_ref);
311
312 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
313 || phi_arg_ref.sizrng[1] != maxobjsize);
314
315 parmarray |= phi_arg_ref.parmarray;
316
317 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
318
319 if (phi_ref.sizrng[0] < 0)
320 {
321 if (!nullp)
322 same_ref = phi_arg_ref;
323 phi_ref = phi_arg_ref;
324 if (arg_known_size)
325 minsize = phi_arg_ref.sizrng[0];
326 continue;
327 }
328
329 const bool phi_known_size = (phi_ref.sizrng[0] != 0
330 || phi_ref.sizrng[1] != maxobjsize);
331
332 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
333 minsize = phi_arg_ref.sizrng[0];
334
335 /* Disregard null pointers in PHIs with two or more arguments.
336 TODO: Handle this better! */
337 if (nullp)
338 continue;
339
340 /* Determine the amount of remaining space in the argument. */
341 offset_int argrem[2];
342 argrem[1] = phi_arg_ref.size_remaining (argrem);
343
344 /* Determine the amount of remaining space computed so far and
345 if the remaining space in the argument is more use it instead. */
346 offset_int phirem[2];
347 phirem[1] = phi_ref.size_remaining (phirem);
348
349 if (phi_arg_ref.ref != same_ref.ref)
350 same_ref.ref = NULL_TREE;
351
352 if (phirem[1] < argrem[1]
353 || (phirem[1] == argrem[1]
354 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
355 /* Use the argument with the most space remaining as the result,
356 or the larger one if the space is equal. */
357 phi_ref = phi_arg_ref;
358
359 /* Set SAME_REF.OFFRNG to the maximum range of all arguments. */
360 if (phi_arg_ref.offrng[0] < same_ref.offrng[0])
361 same_ref.offrng[0] = phi_arg_ref.offrng[0];
362 if (same_ref.offrng[1] < phi_arg_ref.offrng[1])
363 same_ref.offrng[1] = phi_arg_ref.offrng[1];
364 }
365
366 if (phi_ref.sizrng[0] < 0)
367 {
368 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
369 (perhaps because they have all been already visited by prior
370 recursive calls). */
371 psnlim->leave_phi (ref);
372 return NULL_TREE;
373 }
374
375 if (!same_ref.ref && same_ref.offrng[0] != 0)
376 /* Clear BASE0 if not all the arguments refer to the same object and
377 if not all their offsets are zero-based. This allows the final
378 PHI offset to out of bounds for some arguments but not for others
379 (or negative even of all the arguments are BASE0), which is overly
380 permissive. */
381 phi_ref.base0 = false;
382
383 if (same_ref.ref)
384 phi_ref = same_ref;
385 else
386 {
387 /* Replace the lower bound of the largest argument with the size
388 of the smallest argument, and set PARMARRAY if any argument
389 was one. */
390 phi_ref.sizrng[0] = minsize;
391 phi_ref.parmarray = parmarray;
392 }
393
394 /* Avoid changing *THIS. */
395 if (pref && pref != this)
396 *pref = phi_ref;
397
398 psnlim->leave_phi (ref);
399
400 return phi_ref.ref;
401}
402
83685efd
MS
403/* Return the maximum amount of space remaining and if non-null, set
404 argument to the minimum. */
405
406offset_int
407access_ref::size_remaining (offset_int *pmin /* = NULL */) const
408{
409 offset_int minbuf;
410 if (!pmin)
411 pmin = &minbuf;
412
413 /* add_offset() ensures the offset range isn't inverted. */
414 gcc_checking_assert (offrng[0] <= offrng[1]);
415
416 if (base0)
417 {
418 /* The offset into referenced object is zero-based (i.e., it's
419 not referenced by a pointer into middle of some unknown object). */
420 if (offrng[0] < 0 && offrng[1] < 0)
421 {
422 /* If the offset is negative the remaining size is zero. */
423 *pmin = 0;
424 return 0;
425 }
426
427 if (sizrng[1] <= offrng[0])
428 {
429 /* If the starting offset is greater than or equal to the upper
430 bound on the size of the object, the space remaining is zero.
431 As a special case, if it's equal, set *PMIN to -1 to let
432 the caller know the offset is valid and just past the end. */
433 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
434 return 0;
435 }
436
437 /* Otherwise return the size minus the lower bound of the offset. */
438 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
439
440 *pmin = sizrng[0] - or0;
441 return sizrng[1] - or0;
442 }
443
444 /* The offset to the referenced object isn't zero-based (i.e., it may
445 refer to a byte other than the first. The size of such an object
446 is constrained only by the size of the address space (the result
447 of max_object_size()). */
448 if (sizrng[1] <= offrng[0])
449 {
450 *pmin = 0;
451 return 0;
452 }
453
454 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
455
456 *pmin = sizrng[0] - or0;
457 return sizrng[1] - or0;
458}
459
460/* Add the range [MIN, MAX] to the offset range. For known objects (with
461 zero-based offsets) at least one of whose offset's bounds is in range,
462 constrain the other (or both) to the bounds of the object (i.e., zero
463 and the upper bound of its size). This improves the quality of
464 diagnostics. */
465
466void access_ref::add_offset (const offset_int &min, const offset_int &max)
467{
468 if (min <= max)
469 {
470 /* To add an ordinary range just add it to the bounds. */
471 offrng[0] += min;
472 offrng[1] += max;
473 }
474 else if (!base0)
475 {
476 /* To add an inverted range to an offset to an unknown object
477 expand it to the maximum. */
478 add_max_offset ();
479 return;
480 }
481 else
482 {
483 /* To add an inverted range to an offset to an known object set
484 the upper bound to the maximum representable offset value
485 (which may be greater than MAX_OBJECT_SIZE).
486 The lower bound is either the sum of the current offset and
487 MIN when abs(MAX) is greater than the former, or zero otherwise.
488 Zero because then then inverted range includes the negative of
489 the lower bound. */
490 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
491 offrng[1] = maxoff;
492
493 if (max >= 0)
494 {
495 offrng[0] = 0;
496 return;
497 }
498
83685efd
MS
499 offset_int absmax = wi::abs (max);
500 if (offrng[0] < absmax)
bdf6524b
MS
501 {
502 offrng[0] += min;
503 /* Cap the lower bound at the upper (set to MAXOFF above)
504 to avoid inadvertently recreating an inverted range. */
505 if (offrng[1] < offrng[0])
506 offrng[0] = offrng[1];
507 }
83685efd
MS
508 else
509 offrng[0] = 0;
510 }
511
512 if (!base0)
513 return;
514
515 /* When referencing a known object check to see if the offset computed
516 so far is in bounds... */
517 offset_int remrng[2];
518 remrng[1] = size_remaining (remrng);
519 if (remrng[1] > 0 || remrng[0] < 0)
520 {
521 /* ...if so, constrain it so that neither bound exceeds the size of
522 the object. Out of bounds offsets are left unchanged, and, for
523 better or worse, become in bounds later. They should be detected
524 and diagnosed at the point they first become invalid by
525 -Warray-bounds. */
526 if (offrng[0] < 0)
527 offrng[0] = 0;
528 if (offrng[1] > sizrng[1])
529 offrng[1] = sizrng[1];
530 }
531}
532
eafe8ee7
MS
533/* Set a bit for the PHI in VISITED and return true if it wasn't
534 already set. */
535
536bool
537ssa_name_limit_t::visit_phi (tree ssa_name)
538{
539 if (!visited)
540 visited = BITMAP_ALLOC (NULL);
541
542 /* Return false if SSA_NAME has already been visited. */
543 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
544}
545
546/* Clear a bit for the PHI in VISITED. */
547
548void
549ssa_name_limit_t::leave_phi (tree ssa_name)
550{
551 /* Return false if SSA_NAME has already been visited. */
552 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
553}
554
555/* Return false if the SSA_NAME chain length counter has reached
556 the limit, otherwise increment the counter and return true. */
557
558bool
559ssa_name_limit_t::next ()
560{
561 /* Return a negative value to let caller avoid recursing beyond
562 the specified limit. */
563 if (ssa_def_max == 0)
564 return false;
565
566 --ssa_def_max;
eafe8ee7
MS
567 return true;
568}
569
570/* If the SSA_NAME has already been "seen" return a positive value.
571 Otherwise add it to VISITED. If the SSA_NAME limit has been
572 reached, return a negative value. Otherwise return zero. */
573
574int
575ssa_name_limit_t::next_phi (tree ssa_name)
576{
577 {
578 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
579 /* Return a positive value if the PHI has already been visited. */
580 if (gimple_code (def_stmt) == GIMPLE_PHI
581 && !visit_phi (ssa_name))
582 return 1;
583 }
584
585 /* Return a negative value to let caller avoid recursing beyond
586 the specified limit. */
587 if (ssa_def_max == 0)
588 return -1;
589
590 --ssa_def_max;
591
592 return 0;
593}
594
595ssa_name_limit_t::~ssa_name_limit_t ()
596{
597 if (visited)
598 BITMAP_FREE (visited);
599}
600
d02c41dd
MS
601/* Default ctor. Initialize object with pointers to the range_query
602 and cache_type instances to use or null. */
603
604pointer_query::pointer_query (range_query *qry /* = NULL */,
605 cache_type *cache /* = NULL */)
606: rvals (qry), var_cache (cache), hits (), misses (),
607 failures (), depth (), max_depth ()
608{
609 /* No op. */
610}
611
612/* Return a pointer to the cached access_ref instance for the SSA_NAME
613 PTR if it's there or null otherwise. */
614
615const access_ref *
616pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
617{
618 if (!var_cache)
619 {
620 ++misses;
621 return NULL;
622 }
623
624 unsigned version = SSA_NAME_VERSION (ptr);
625 unsigned idx = version << 1 | (ostype & 1);
626 if (var_cache->indices.length () <= idx)
627 {
628 ++misses;
629 return NULL;
630 }
631
632 unsigned cache_idx = var_cache->indices[idx];
633 if (var_cache->access_refs.length () <= cache_idx)
634 {
635 ++misses;
636 return NULL;
637 }
638
639 access_ref &cache_ref = var_cache->access_refs[cache_idx];
640 if (cache_ref.ref)
641 {
642 ++hits;
643 return &cache_ref;
644 }
645
646 ++misses;
647 return NULL;
648}
649
650/* Retrieve the access_ref instance for a variable from the cache if it's
651 there or compute it and insert it into the cache if it's nonnonull. */
652
653bool
654pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
655{
656 const unsigned version
657 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
658
659 if (var_cache && version)
660 {
661 unsigned idx = version << 1 | (ostype & 1);
662 if (idx < var_cache->indices.length ())
663 {
664 unsigned cache_idx = var_cache->indices[idx] - 1;
665 if (cache_idx < var_cache->access_refs.length ()
666 && var_cache->access_refs[cache_idx].ref)
667 {
668 ++hits;
669 *pref = var_cache->access_refs[cache_idx];
670 return true;
671 }
672 }
673
674 ++misses;
675 }
676
677 if (!compute_objsize (ptr, ostype, pref, this))
678 {
679 ++failures;
680 return false;
681 }
682
683 return true;
684}
685
686/* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
687 nonnull. */
688
689void
690pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
691{
692 /* Only add populated/valid entries. */
693 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
694 return;
695
696 /* Add REF to the two-level cache. */
697 unsigned version = SSA_NAME_VERSION (ptr);
698 unsigned idx = version << 1 | (ostype & 1);
699
700 /* Grow INDICES if necessary. An index is valid if it's nonzero.
701 Its value minus one is the index into ACCESS_REFS. Not all
702 entries are valid. */
703 if (var_cache->indices.length () <= idx)
704 var_cache->indices.safe_grow_cleared (idx + 1);
705
706 if (!var_cache->indices[idx])
707 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
708
709 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
710 REF member is nonnull. All entries except for the last two
711 are valid. Once nonnull, the REF value must stay unchanged. */
712 unsigned cache_idx = var_cache->indices[idx];
713 if (var_cache->access_refs.length () <= cache_idx)
714 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
715
716 access_ref cache_ref = var_cache->access_refs[cache_idx - 1];
717 if (cache_ref.ref)
718 {
719 gcc_checking_assert (cache_ref.ref == ref.ref);
720 return;
721 }
722
723 cache_ref = ref;
724}
725
726/* Flush the cache if it's nonnull. */
727
728void
729pointer_query::flush_cache ()
730{
731 if (!var_cache)
732 return;
733 var_cache->indices.release ();
734 var_cache->access_refs.release ();
735}
736
d7f09764
DN
737/* Return true if NAME starts with __builtin_ or __sync_. */
738
0c1e7e42 739static bool
bbf7ce11 740is_builtin_name (const char *name)
48ae6c13 741{
48ae6c13
RH
742 if (strncmp (name, "__builtin_", 10) == 0)
743 return true;
744 if (strncmp (name, "__sync_", 7) == 0)
745 return true;
86951993
AM
746 if (strncmp (name, "__atomic_", 9) == 0)
747 return true;
48ae6c13
RH
748 return false;
749}
6de9cd9a 750
bbf7ce11
RAE
751/* Return true if NODE should be considered for inline expansion regardless
752 of the optimization level. This means whenever a function is invoked with
753 its "internal" name, which normally contains the prefix "__builtin". */
754
4cfe7a6c 755bool
bbf7ce11
RAE
756called_as_built_in (tree node)
757{
758 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
759 we want the name used to call the function, not the name it
760 will have. */
761 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
762 return is_builtin_name (name);
763}
764
644ffefd
MJ
765/* Compute values M and N such that M divides (address of EXP - N) and such
766 that N < M. If these numbers can be determined, store M in alignp and N in
767 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
768 *alignp and any bit-offset to *bitposp.
73f6eabc
RS
769
770 Note that the address (and thus the alignment) computed here is based
771 on the address to which a symbol resolves, whereas DECL_ALIGN is based
772 on the address at which an object is actually located. These two
773 addresses are not always the same. For example, on ARM targets,
774 the address &foo of a Thumb function foo() has the lowest bit set,
b0f4a35f 775 whereas foo() itself starts on an even address.
df96b059 776
b0f4a35f
RG
777 If ADDR_P is true we are taking the address of the memory reference EXP
778 and thus cannot rely on the access taking place. */
779
780static bool
781get_object_alignment_2 (tree exp, unsigned int *alignp,
782 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
df96b059 783{
7df9b6f1 784 poly_int64 bitsize, bitpos;
e80c2726 785 tree offset;
ef4bddc2 786 machine_mode mode;
ee45a32d 787 int unsignedp, reversep, volatilep;
eae76e53 788 unsigned int align = BITS_PER_UNIT;
644ffefd 789 bool known_alignment = false;
df96b059 790
e80c2726
RG
791 /* Get the innermost object and the constant (bitpos) and possibly
792 variable (offset) offset of the access. */
ee45a32d 793 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
25b75a48 794 &unsignedp, &reversep, &volatilep);
e80c2726
RG
795
796 /* Extract alignment information from the innermost object and
797 possibly adjust bitpos and offset. */
b0f4a35f 798 if (TREE_CODE (exp) == FUNCTION_DECL)
73f6eabc 799 {
b0f4a35f
RG
800 /* Function addresses can encode extra information besides their
801 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
802 allows the low bit to be used as a virtual bit, we know
803 that the address itself must be at least 2-byte aligned. */
804 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
805 align = 2 * BITS_PER_UNIT;
73f6eabc 806 }
b0f4a35f
RG
807 else if (TREE_CODE (exp) == LABEL_DECL)
808 ;
809 else if (TREE_CODE (exp) == CONST_DECL)
e80c2726 810 {
b0f4a35f
RG
811 /* The alignment of a CONST_DECL is determined by its initializer. */
812 exp = DECL_INITIAL (exp);
e80c2726 813 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 814 if (CONSTANT_CLASS_P (exp))
58e17cf8 815 align = targetm.constant_alignment (exp, align);
6b00e42d 816
b0f4a35f 817 known_alignment = true;
e80c2726 818 }
b0f4a35f 819 else if (DECL_P (exp))
644ffefd 820 {
b0f4a35f 821 align = DECL_ALIGN (exp);
644ffefd 822 known_alignment = true;
644ffefd 823 }
b0f4a35f
RG
824 else if (TREE_CODE (exp) == INDIRECT_REF
825 || TREE_CODE (exp) == MEM_REF
826 || TREE_CODE (exp) == TARGET_MEM_REF)
e80c2726
RG
827 {
828 tree addr = TREE_OPERAND (exp, 0);
644ffefd
MJ
829 unsigned ptr_align;
830 unsigned HOST_WIDE_INT ptr_bitpos;
4ceae7e9 831 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
644ffefd 832
4ceae7e9 833 /* If the address is explicitely aligned, handle that. */
e80c2726
RG
834 if (TREE_CODE (addr) == BIT_AND_EXPR
835 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
836 {
4ceae7e9
RB
837 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
838 ptr_bitmask *= BITS_PER_UNIT;
146ec50f 839 align = least_bit_hwi (ptr_bitmask);
e80c2726
RG
840 addr = TREE_OPERAND (addr, 0);
841 }
644ffefd 842
b0f4a35f
RG
843 known_alignment
844 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
b0f4a35f
RG
845 align = MAX (ptr_align, align);
846
4ceae7e9
RB
847 /* Re-apply explicit alignment to the bitpos. */
848 ptr_bitpos &= ptr_bitmask;
849
3c82efd9
RG
850 /* The alignment of the pointer operand in a TARGET_MEM_REF
851 has to take the variable offset parts into account. */
b0f4a35f 852 if (TREE_CODE (exp) == TARGET_MEM_REF)
1be38ccb 853 {
b0f4a35f
RG
854 if (TMR_INDEX (exp))
855 {
856 unsigned HOST_WIDE_INT step = 1;
857 if (TMR_STEP (exp))
858 step = TREE_INT_CST_LOW (TMR_STEP (exp));
146ec50f 859 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
b0f4a35f
RG
860 }
861 if (TMR_INDEX2 (exp))
862 align = BITS_PER_UNIT;
863 known_alignment = false;
1be38ccb 864 }
644ffefd 865
b0f4a35f
RG
866 /* When EXP is an actual memory reference then we can use
867 TYPE_ALIGN of a pointer indirection to derive alignment.
868 Do so only if get_pointer_alignment_1 did not reveal absolute
3c82efd9
RG
869 alignment knowledge and if using that alignment would
870 improve the situation. */
a4cf4b64 871 unsigned int talign;
3c82efd9 872 if (!addr_p && !known_alignment
a4cf4b64
RB
873 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
874 && talign > align)
875 align = talign;
3c82efd9
RG
876 else
877 {
878 /* Else adjust bitpos accordingly. */
879 bitpos += ptr_bitpos;
880 if (TREE_CODE (exp) == MEM_REF
881 || TREE_CODE (exp) == TARGET_MEM_REF)
aca52e6f 882 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
3c82efd9 883 }
e80c2726 884 }
b0f4a35f 885 else if (TREE_CODE (exp) == STRING_CST)
1be38ccb 886 {
b0f4a35f
RG
887 /* STRING_CST are the only constant objects we allow to be not
888 wrapped inside a CONST_DECL. */
889 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 890 if (CONSTANT_CLASS_P (exp))
58e17cf8 891 align = targetm.constant_alignment (exp, align);
6b00e42d 892
b0f4a35f 893 known_alignment = true;
e80c2726 894 }
e80c2726
RG
895
896 /* If there is a non-constant offset part extract the maximum
897 alignment that can prevail. */
eae76e53 898 if (offset)
e80c2726 899 {
e75fde1a 900 unsigned int trailing_zeros = tree_ctz (offset);
eae76e53 901 if (trailing_zeros < HOST_BITS_PER_INT)
e80c2726 902 {
eae76e53
JJ
903 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
904 if (inner)
905 align = MIN (align, inner);
e80c2726 906 }
e80c2726
RG
907 }
908
7df9b6f1
RS
909 /* Account for the alignment of runtime coefficients, so that the constant
910 bitpos is guaranteed to be accurate. */
911 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
912 if (alt_align != 0 && alt_align < align)
913 {
914 align = alt_align;
915 known_alignment = false;
916 }
917
b0f4a35f 918 *alignp = align;
7df9b6f1 919 *bitposp = bitpos.coeffs[0] & (align - 1);
644ffefd 920 return known_alignment;
daade206
RG
921}
922
b0f4a35f
RG
923/* For a memory reference expression EXP compute values M and N such that M
924 divides (&EXP - N) and such that N < M. If these numbers can be determined,
925 store M in alignp and N in *BITPOSP and return true. Otherwise return false
926 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
927
928bool
929get_object_alignment_1 (tree exp, unsigned int *alignp,
930 unsigned HOST_WIDE_INT *bitposp)
931{
932 return get_object_alignment_2 (exp, alignp, bitposp, false);
933}
934
0eb77834 935/* Return the alignment in bits of EXP, an object. */
daade206
RG
936
937unsigned int
0eb77834 938get_object_alignment (tree exp)
daade206
RG
939{
940 unsigned HOST_WIDE_INT bitpos = 0;
941 unsigned int align;
942
644ffefd 943 get_object_alignment_1 (exp, &align, &bitpos);
daade206 944
e80c2726
RG
945 /* align and bitpos now specify known low bits of the pointer.
946 ptr & (align - 1) == bitpos. */
947
948 if (bitpos != 0)
146ec50f 949 align = least_bit_hwi (bitpos);
0eb77834 950 return align;
df96b059
JJ
951}
952
644ffefd
MJ
953/* For a pointer valued expression EXP compute values M and N such that M
954 divides (EXP - N) and such that N < M. If these numbers can be determined,
b0f4a35f
RG
955 store M in alignp and N in *BITPOSP and return true. Return false if
956 the results are just a conservative approximation.
28f4ec01 957
644ffefd 958 If EXP is not a pointer, false is returned too. */
28f4ec01 959
644ffefd
MJ
960bool
961get_pointer_alignment_1 (tree exp, unsigned int *alignp,
962 unsigned HOST_WIDE_INT *bitposp)
28f4ec01 963{
1be38ccb 964 STRIP_NOPS (exp);
6026b73e 965
1be38ccb 966 if (TREE_CODE (exp) == ADDR_EXPR)
b0f4a35f
RG
967 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
968 alignp, bitposp, true);
5fa79de8
RB
969 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
970 {
971 unsigned int align;
972 unsigned HOST_WIDE_INT bitpos;
973 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
974 &align, &bitpos);
975 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
976 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
977 else
978 {
979 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
980 if (trailing_zeros < HOST_BITS_PER_INT)
981 {
982 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
983 if (inner)
984 align = MIN (align, inner);
985 }
986 }
987 *alignp = align;
988 *bitposp = bitpos & (align - 1);
989 return res;
990 }
1be38ccb
RG
991 else if (TREE_CODE (exp) == SSA_NAME
992 && POINTER_TYPE_P (TREE_TYPE (exp)))
28f4ec01 993 {
644ffefd 994 unsigned int ptr_align, ptr_misalign;
1be38ccb 995 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
644ffefd
MJ
996
997 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
998 {
999 *bitposp = ptr_misalign * BITS_PER_UNIT;
1000 *alignp = ptr_align * BITS_PER_UNIT;
5505978a
RB
1001 /* Make sure to return a sensible alignment when the multiplication
1002 by BITS_PER_UNIT overflowed. */
1003 if (*alignp == 0)
1004 *alignp = 1u << (HOST_BITS_PER_INT - 1);
b0f4a35f 1005 /* We cannot really tell whether this result is an approximation. */
5f9a167b 1006 return false;
644ffefd
MJ
1007 }
1008 else
87c0fb4b
RG
1009 {
1010 *bitposp = 0;
644ffefd
MJ
1011 *alignp = BITS_PER_UNIT;
1012 return false;
87c0fb4b 1013 }
28f4ec01 1014 }
44fabee4
RG
1015 else if (TREE_CODE (exp) == INTEGER_CST)
1016 {
1017 *alignp = BIGGEST_ALIGNMENT;
1018 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
1019 & (BIGGEST_ALIGNMENT - 1));
1020 return true;
1021 }
1be38ccb 1022
87c0fb4b 1023 *bitposp = 0;
644ffefd
MJ
1024 *alignp = BITS_PER_UNIT;
1025 return false;
28f4ec01
BS
1026}
1027
87c0fb4b
RG
1028/* Return the alignment in bits of EXP, a pointer valued expression.
1029 The alignment returned is, by default, the alignment of the thing that
1030 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
1031
1032 Otherwise, look at the expression to see if we can do better, i.e., if the
1033 expression is actually pointing at an object whose alignment is tighter. */
1034
1035unsigned int
1036get_pointer_alignment (tree exp)
1037{
1038 unsigned HOST_WIDE_INT bitpos = 0;
1039 unsigned int align;
644ffefd
MJ
1040
1041 get_pointer_alignment_1 (exp, &align, &bitpos);
87c0fb4b
RG
1042
1043 /* align and bitpos now specify known low bits of the pointer.
1044 ptr & (align - 1) == bitpos. */
1045
1046 if (bitpos != 0)
146ec50f 1047 align = least_bit_hwi (bitpos);
87c0fb4b
RG
1048
1049 return align;
1050}
1051
bfb9bd47 1052/* Return the number of leading non-zero elements in the sequence
1eb4547b
MS
1053 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
1054 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
1055
bfb9bd47 1056unsigned
1eb4547b
MS
1057string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
1058{
1059 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1060
1061 unsigned n;
1062
1063 if (eltsize == 1)
1064 {
1065 /* Optimize the common case of plain char. */
1066 for (n = 0; n < maxelts; n++)
1067 {
1068 const char *elt = (const char*) ptr + n;
1069 if (!*elt)
1070 break;
1071 }
1072 }
1073 else
1074 {
1075 for (n = 0; n < maxelts; n++)
1076 {
1077 const char *elt = (const char*) ptr + n * eltsize;
1078 if (!memcmp (elt, "\0\0\0\0", eltsize))
1079 break;
1080 }
1081 }
1082 return n;
1083}
1084
d14c547a
MS
1085/* For a call EXPR at LOC to a function FNAME that expects a string
1086 in the argument ARG, issue a diagnostic due to it being a called
1087 with an argument that is a character array with no terminating
1088 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
1089 of characters in which the NUL is expected. Either EXPR or FNAME
1090 may be null but noth both. SIZE may be null when BNDRNG is null. */
6ab24ea8
MS
1091
1092void
d14c547a
MS
1093warn_string_no_nul (location_t loc, tree expr, const char *fname,
1094 tree arg, tree decl, tree size /* = NULL_TREE */,
1095 bool exact /* = false */,
1096 const wide_int bndrng[2] /* = NULL */)
6ab24ea8 1097{
d14c547a 1098 if ((expr && TREE_NO_WARNING (expr)) || TREE_NO_WARNING (arg))
6ab24ea8
MS
1099 return;
1100
1101 loc = expansion_point_location_if_in_system_header (loc);
d14c547a
MS
1102 bool warned;
1103
1104 /* Format the bound range as a string to keep the nuber of messages
1105 from exploding. */
1106 char bndstr[80];
1107 *bndstr = 0;
1108 if (bndrng)
1109 {
1110 if (bndrng[0] == bndrng[1])
1111 sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
1112 else
1113 sprintf (bndstr, "[%llu, %llu]",
1114 (unsigned long long) bndrng[0].to_uhwi (),
1115 (unsigned long long) bndrng[1].to_uhwi ());
1116 }
1117
1118 const tree maxobjsize = max_object_size ();
1119 const wide_int maxsiz = wi::to_wide (maxobjsize);
1120 if (expr)
1121 {
1122 tree func = get_callee_fndecl (expr);
1123 if (bndrng)
1124 {
1125 if (wi::ltu_p (maxsiz, bndrng[0]))
1126 warned = warning_at (loc, OPT_Wstringop_overread,
1127 "%K%qD specified bound %s exceeds "
1128 "maximum object size %E",
1129 expr, func, bndstr, maxobjsize);
1130 else
1131 {
1132 bool maybe = wi::to_wide (size) == bndrng[0];
1133 warned = warning_at (loc, OPT_Wstringop_overread,
1134 exact
1135 ? G_("%K%qD specified bound %s exceeds "
1136 "the size %E of unterminated array")
1137 : (maybe
1138 ? G_("%K%qD specified bound %s may "
1139 "exceed the size of at most %E "
1140 "of unterminated array")
1141 : G_("%K%qD specified bound %s exceeds "
1142 "the size of at most %E "
1143 "of unterminated array")),
1144 expr, func, bndstr, size);
1145 }
1146 }
1147 else
1148 warned = warning_at (loc, OPT_Wstringop_overread,
1149 "%K%qD argument missing terminating nul",
1150 expr, func);
1151 }
1152 else
1153 {
1154 if (bndrng)
1155 {
1156 if (wi::ltu_p (maxsiz, bndrng[0]))
1157 warned = warning_at (loc, OPT_Wstringop_overread,
1158 "%qs specified bound %s exceeds "
1159 "maximum object size %E",
1160 fname, bndstr, maxobjsize);
1161 else
1162 {
1163 bool maybe = wi::to_wide (size) == bndrng[0];
1164 warned = warning_at (loc, OPT_Wstringop_overread,
1165 exact
1166 ? G_("%qs specified bound %s exceeds "
1167 "the size %E of unterminated array")
1168 : (maybe
1169 ? G_("%qs specified bound %s may "
1170 "exceed the size of at most %E "
1171 "of unterminated array")
1172 : G_("%qs specified bound %s exceeds "
1173 "the size of at most %E "
1174 "of unterminated array")),
1175 fname, bndstr, size);
1176 }
1177 }
1178 else
1179 warned = warning_at (loc, OPT_Wstringop_overread,
1180 "%qsargument missing terminating nul",
1181 fname);
1182 }
6ab24ea8 1183
d14c547a 1184 if (warned)
6ab24ea8
MS
1185 {
1186 inform (DECL_SOURCE_LOCATION (decl),
1187 "referenced argument declared here");
1188 TREE_NO_WARNING (arg) = 1;
d14c547a
MS
1189 if (expr)
1190 TREE_NO_WARNING (expr) = 1;
6ab24ea8
MS
1191 }
1192}
1193
b5338fb3 1194/* For a call EXPR (which may be null) that expects a string argument
d14c547a
MS
1195 SRC as an argument, returns false if SRC is a character array with
1196 no terminating NUL. When nonnull, BOUND is the number of characters
1197 in which to expect the terminating NUL. RDONLY is true for read-only
1198 accesses such as strcmp, false for read-write such as strcpy. When
1199 EXPR is also issues a warning. */
b5338fb3
MS
1200
1201bool
d14c547a
MS
1202check_nul_terminated_array (tree expr, tree src,
1203 tree bound /* = NULL_TREE */)
b5338fb3 1204{
d14c547a
MS
1205 /* The constant size of the array SRC points to. The actual size
1206 may be less of EXACT is true, but not more. */
b5338fb3 1207 tree size;
d14c547a 1208 /* True if SRC involves a non-constant offset into the array. */
b5338fb3 1209 bool exact;
d14c547a 1210 /* The unterminated constant array SRC points to. */
b5338fb3
MS
1211 tree nonstr = unterminated_array (src, &size, &exact);
1212 if (!nonstr)
1213 return true;
1214
1215 /* NONSTR refers to the non-nul terminated constant array and SIZE
1216 is the constant size of the array in bytes. EXACT is true when
1217 SIZE is exact. */
1218
d14c547a 1219 wide_int bndrng[2];
b5338fb3
MS
1220 if (bound)
1221 {
b5338fb3 1222 if (TREE_CODE (bound) == INTEGER_CST)
d14c547a 1223 bndrng[0] = bndrng[1] = wi::to_wide (bound);
b5338fb3
MS
1224 else
1225 {
d14c547a 1226 value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1);
b5338fb3
MS
1227 if (rng != VR_RANGE)
1228 return true;
1229 }
1230
d14c547a
MS
1231 if (exact)
1232 {
1233 if (wi::leu_p (bndrng[0], wi::to_wide (size)))
1234 return true;
1235 }
1236 else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
b5338fb3
MS
1237 return true;
1238 }
1239
d14c547a
MS
1240 if (expr)
1241 warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr,
1242 size, exact, bound ? bndrng : NULL);
b5338fb3
MS
1243
1244 return false;
1245}
1246
e08341bb
MS
1247/* If EXP refers to an unterminated constant character array return
1248 the declaration of the object of which the array is a member or
6c4aa5f6
MS
1249 element and if SIZE is not null, set *SIZE to the size of
1250 the unterminated array and set *EXACT if the size is exact or
1251 clear it otherwise. Otherwise return null. */
e08341bb 1252
01b0acb7 1253tree
6c4aa5f6 1254unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
e08341bb 1255{
6c4aa5f6
MS
1256 /* C_STRLEN will return NULL and set DECL in the info
1257 structure if EXP references a unterminated array. */
e09aa5bd
MS
1258 c_strlen_data lendata = { };
1259 tree len = c_strlen (exp, 1, &lendata);
b71bbbe2 1260 if (len == NULL_TREE && lendata.minlen && lendata.decl)
6c4aa5f6
MS
1261 {
1262 if (size)
1263 {
b71bbbe2 1264 len = lendata.minlen;
e09aa5bd 1265 if (lendata.off)
6c4aa5f6 1266 {
e09aa5bd
MS
1267 /* Constant offsets are already accounted for in LENDATA.MINLEN,
1268 but not in a SSA_NAME + CST expression. */
1269 if (TREE_CODE (lendata.off) == INTEGER_CST)
6c4aa5f6 1270 *exact = true;
e09aa5bd
MS
1271 else if (TREE_CODE (lendata.off) == PLUS_EXPR
1272 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
6c4aa5f6
MS
1273 {
1274 /* Subtract the offset from the size of the array. */
1275 *exact = false;
e09aa5bd 1276 tree temp = TREE_OPERAND (lendata.off, 1);
6c4aa5f6
MS
1277 temp = fold_convert (ssizetype, temp);
1278 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
1279 }
1280 else
1281 *exact = false;
1282 }
1283 else
1284 *exact = true;
1285
1286 *size = len;
1287 }
e09aa5bd 1288 return lendata.decl;
6c4aa5f6
MS
1289 }
1290
1291 return NULL_TREE;
e08341bb
MS
1292}
1293
1eb4547b
MS
1294/* Compute the length of a null-terminated character string or wide
1295 character string handling character sizes of 1, 2, and 4 bytes.
1296 TREE_STRING_LENGTH is not the right way because it evaluates to
1297 the size of the character array in bytes (as opposed to characters)
1298 and because it can contain a zero byte in the middle.
28f4ec01 1299
f1ba665b 1300 ONLY_VALUE should be nonzero if the result is not going to be emitted
88373ed0 1301 into the instruction stream and zero if it is going to be expanded.
f1ba665b 1302 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
ae808627 1303 is returned, otherwise NULL, since
14b7950f 1304 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
ae808627
JJ
1305 evaluate the side-effects.
1306
21e8fb22
RB
1307 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
1308 accesses. Note that this implies the result is not going to be emitted
1309 into the instruction stream.
1310
7d583f42 1311 Additional information about the string accessed may be recorded
14b7950f 1312 in DATA. For example, if ARG references an unterminated string,
7d583f42
JL
1313 then the declaration will be stored in the DECL field. If the
1314 length of the unterminated string can be determined, it'll be
1315 stored in the LEN field. Note this length could well be different
1316 than what a C strlen call would return.
6ab24ea8 1317
4148b00d
BE
1318 ELTSIZE is 1 for normal single byte character strings, and 2 or
1319 4 for wide characer strings. ELTSIZE is by default 1.
fed3cef0 1320
4148b00d 1321 The value returned is of type `ssizetype'. */
28f4ec01 1322
6de9cd9a 1323tree
14b7950f 1324c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
28f4ec01 1325{
7d583f42
JL
1326 /* If we were not passed a DATA pointer, then get one to a local
1327 structure. That avoids having to check DATA for NULL before
1328 each time we want to use it. */
3f46ef1f 1329 c_strlen_data local_strlen_data = { };
7d583f42
JL
1330 if (!data)
1331 data = &local_strlen_data;
1332
1ebf0641 1333 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
14b7950f
MS
1334
1335 tree src = STRIP_NOPS (arg);
ae808627
JJ
1336 if (TREE_CODE (src) == COND_EXPR
1337 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1338 {
1339 tree len1, len2;
1340
7d583f42
JL
1341 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1342 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
33521f7d 1343 if (tree_int_cst_equal (len1, len2))
ae808627
JJ
1344 return len1;
1345 }
1346
1347 if (TREE_CODE (src) == COMPOUND_EXPR
1348 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
7d583f42 1349 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
ae808627 1350
1eb4547b 1351 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
59d49708 1352
1eb4547b
MS
1353 /* Offset from the beginning of the string in bytes. */
1354 tree byteoff;
4148b00d 1355 tree memsize;
6ab24ea8
MS
1356 tree decl;
1357 src = string_constant (src, &byteoff, &memsize, &decl);
28f4ec01 1358 if (src == 0)
5039610b 1359 return NULL_TREE;
fed3cef0 1360
1eb4547b 1361 /* Determine the size of the string element. */
4148b00d
BE
1362 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
1363 return NULL_TREE;
1eb4547b
MS
1364
1365 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
35b4d3a6 1366 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
bfb9bd47
MS
1367 in case the latter is less than the size of the array, such as when
1368 SRC refers to a short string literal used to initialize a large array.
1369 In that case, the elements of the array after the terminating NUL are
1370 all NUL. */
1371 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
d01b568a 1372 strelts = strelts / eltsize;
bfb9bd47 1373
4148b00d
BE
1374 if (!tree_fits_uhwi_p (memsize))
1375 return NULL_TREE;
1376
d01b568a 1377 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
1eb4547b
MS
1378
1379 /* PTR can point to the byte representation of any string type, including
1380 char* and wchar_t*. */
1381 const char *ptr = TREE_STRING_POINTER (src);
fed3cef0 1382
1eb4547b 1383 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
28f4ec01 1384 {
4148b00d
BE
1385 /* The code below works only for single byte character types. */
1386 if (eltsize != 1)
1387 return NULL_TREE;
1388
bfb9bd47
MS
1389 /* If the string has an internal NUL character followed by any
1390 non-NUL characters (e.g., "foo\0bar"), we can't compute
1391 the offset to the following NUL if we don't know where to
28f4ec01 1392 start searching for it. */
bfb9bd47 1393 unsigned len = string_length (ptr, eltsize, strelts);
fed3cef0 1394
7d583f42
JL
1395 /* Return when an embedded null character is found or none at all.
1396 In the latter case, set the DECL/LEN field in the DATA structure
1397 so that callers may examine them. */
6ab24ea8 1398 if (len + 1 < strelts)
4148b00d 1399 return NULL_TREE;
6ab24ea8
MS
1400 else if (len >= maxelts)
1401 {
7d583f42 1402 data->decl = decl;
6c4aa5f6 1403 data->off = byteoff;
b71bbbe2 1404 data->minlen = ssize_int (len);
6ab24ea8
MS
1405 return NULL_TREE;
1406 }
c42d0aa0 1407
d01b568a
BE
1408 /* For empty strings the result should be zero. */
1409 if (len == 0)
1410 return ssize_int (0);
1411
28f4ec01 1412 /* We don't know the starting offset, but we do know that the string
bfb9bd47
MS
1413 has no internal zero bytes. If the offset falls within the bounds
1414 of the string subtract the offset from the length of the string,
1415 and return that. Otherwise the length is zero. Take care to
1416 use SAVE_EXPR in case the OFFSET has side-effects. */
e8bf3d5e
BE
1417 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
1418 : byteoff;
1419 offsave = fold_convert_loc (loc, sizetype, offsave);
bfb9bd47 1420 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
e8bf3d5e
BE
1421 size_int (len));
1422 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
1423 offsave);
1424 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
bfb9bd47
MS
1425 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
1426 build_zero_cst (ssizetype));
28f4ec01
BS
1427 }
1428
1eb4547b
MS
1429 /* Offset from the beginning of the string in elements. */
1430 HOST_WIDE_INT eltoff;
1431
28f4ec01 1432 /* We have a known offset into the string. Start searching there for
5197bd50 1433 a null character if we can represent it as a single HOST_WIDE_INT. */
1eb4547b
MS
1434 if (byteoff == 0)
1435 eltoff = 0;
1ebf0641 1436 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1eb4547b 1437 eltoff = -1;
28f4ec01 1438 else
1ebf0641 1439 eltoff = tree_to_uhwi (byteoff) / eltsize;
fed3cef0 1440
b2ed71b6
BE
1441 /* If the offset is known to be out of bounds, warn, and call strlen at
1442 runtime. */
d01b568a 1443 if (eltoff < 0 || eltoff >= maxelts)
28f4ec01 1444 {
1db01ff9 1445 /* Suppress multiple warnings for propagated constant strings. */
3b57ff81 1446 if (only_value != 2
14b7950f 1447 && !TREE_NO_WARNING (arg)
1db01ff9
JJ
1448 && warning_at (loc, OPT_Warray_bounds,
1449 "offset %qwi outside bounds of constant string",
1450 eltoff))
14b7950f
MS
1451 {
1452 if (decl)
1453 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
1454 TREE_NO_WARNING (arg) = 1;
1455 }
5039610b 1456 return NULL_TREE;
28f4ec01 1457 }
fed3cef0 1458
4148b00d
BE
1459 /* If eltoff is larger than strelts but less than maxelts the
1460 string length is zero, since the excess memory will be zero. */
1461 if (eltoff > strelts)
1462 return ssize_int (0);
1463
28f4ec01
BS
1464 /* Use strlen to search for the first zero byte. Since any strings
1465 constructed with build_string will have nulls appended, we win even
1466 if we get handed something like (char[4])"abcd".
1467
1eb4547b 1468 Since ELTOFF is our starting index into the string, no further
28f4ec01 1469 calculation is needed. */
1eb4547b 1470 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
4148b00d 1471 strelts - eltoff);
1eb4547b 1472
d01b568a 1473 /* Don't know what to return if there was no zero termination.
7d583f42
JL
1474 Ideally this would turn into a gcc_checking_assert over time.
1475 Set DECL/LEN so callers can examine them. */
d01b568a 1476 if (len >= maxelts - eltoff)
6ab24ea8 1477 {
7d583f42 1478 data->decl = decl;
6c4aa5f6 1479 data->off = byteoff;
b71bbbe2 1480 data->minlen = ssize_int (len);
6ab24ea8
MS
1481 return NULL_TREE;
1482 }
1ebf0641 1483
1eb4547b 1484 return ssize_int (len);
28f4ec01
BS
1485}
1486
807e902e 1487/* Return a constant integer corresponding to target reading
3140b2ed
JJ
1488 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
1489 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
1490 are assumed to be zero, otherwise it reads as many characters
1491 as needed. */
1492
1493rtx
1494c_readstr (const char *str, scalar_int_mode mode,
1495 bool null_terminated_p/*=true*/)
57814e5e 1496{
57814e5e
JJ
1497 HOST_WIDE_INT ch;
1498 unsigned int i, j;
807e902e 1499 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
57814e5e 1500
298e6adc 1501 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
807e902e
KZ
1502 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
1503 / HOST_BITS_PER_WIDE_INT;
1504
1505 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
1506 for (i = 0; i < len; i++)
1507 tmp[i] = 0;
5906d013 1508
57814e5e
JJ
1509 ch = 1;
1510 for (i = 0; i < GET_MODE_SIZE (mode); i++)
1511 {
1512 j = i;
1513 if (WORDS_BIG_ENDIAN)
1514 j = GET_MODE_SIZE (mode) - i - 1;
1515 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
e046112d 1516 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
57814e5e
JJ
1517 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
1518 j *= BITS_PER_UNIT;
5906d013 1519
3140b2ed 1520 if (ch || !null_terminated_p)
57814e5e 1521 ch = (unsigned char) str[i];
807e902e 1522 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
57814e5e 1523 }
807e902e
KZ
1524
1525 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1526 return immed_wide_int_const (c, mode);
57814e5e
JJ
1527}
1528
ab937357 1529/* Cast a target constant CST to target CHAR and if that value fits into
206048bd 1530 host char type, return zero and put that value into variable pointed to by
ab937357
JJ
1531 P. */
1532
1533static int
4682ae04 1534target_char_cast (tree cst, char *p)
ab937357
JJ
1535{
1536 unsigned HOST_WIDE_INT val, hostval;
1537
de77ab75 1538 if (TREE_CODE (cst) != INTEGER_CST
ab937357
JJ
1539 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
1540 return 1;
1541
807e902e 1542 /* Do not care if it fits or not right here. */
de77ab75 1543 val = TREE_INT_CST_LOW (cst);
807e902e 1544
ab937357 1545 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
fecfbfa4 1546 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ab937357
JJ
1547
1548 hostval = val;
1549 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
fecfbfa4 1550 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ab937357
JJ
1551
1552 if (val != hostval)
1553 return 1;
1554
1555 *p = hostval;
1556 return 0;
1557}
1558
6de9cd9a
DN
1559/* Similar to save_expr, but assumes that arbitrary code is not executed
1560 in between the multiple evaluations. In particular, we assume that a
1561 non-addressable local variable will not be modified. */
1562
1563static tree
1564builtin_save_expr (tree exp)
1565{
5cbf5c20
RG
1566 if (TREE_CODE (exp) == SSA_NAME
1567 || (TREE_ADDRESSABLE (exp) == 0
1568 && (TREE_CODE (exp) == PARM_DECL
8813a647 1569 || (VAR_P (exp) && !TREE_STATIC (exp)))))
6de9cd9a
DN
1570 return exp;
1571
1572 return save_expr (exp);
1573}
1574
28f4ec01
BS
1575/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1576 times to get the address of either a higher stack frame, or a return
1577 address located within it (depending on FNDECL_CODE). */
fed3cef0 1578
54e62799 1579static rtx
c6d01079 1580expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
28f4ec01
BS
1581{
1582 int i;
c6d01079 1583 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
579f2946 1584 if (tem == NULL_RTX)
c8f27794 1585 {
579f2946
TS
1586 /* For a zero count with __builtin_return_address, we don't care what
1587 frame address we return, because target-specific definitions will
1588 override us. Therefore frame pointer elimination is OK, and using
1589 the soft frame pointer is OK.
1590
1591 For a nonzero count, or a zero count with __builtin_frame_address,
1592 we require a stable offset from the current frame pointer to the
1593 previous one, so we must use the hard frame pointer, and
1594 we must disable frame pointer elimination. */
1595 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1596 tem = frame_pointer_rtx;
1597 else
1598 {
1599 tem = hard_frame_pointer_rtx;
c8f27794 1600
579f2946
TS
1601 /* Tell reload not to eliminate the frame pointer. */
1602 crtl->accesses_prior_frames = 1;
1603 }
c8f27794 1604 }
c6d01079 1605
28f4ec01
BS
1606 if (count > 0)
1607 SETUP_FRAME_ADDRESSES ();
28f4ec01 1608
224869d9 1609 /* On the SPARC, the return address is not in the frame, it is in a
28f4ec01
BS
1610 register. There is no way to access it off of the current frame
1611 pointer, but it can be accessed off the previous frame pointer by
1612 reading the value from the register window save area. */
2e612c47 1613 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
28f4ec01 1614 count--;
28f4ec01
BS
1615
1616 /* Scan back COUNT frames to the specified frame. */
1617 for (i = 0; i < count; i++)
1618 {
1619 /* Assume the dynamic chain pointer is in the word that the
1620 frame address points to, unless otherwise specified. */
28f4ec01 1621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
28f4ec01 1622 tem = memory_address (Pmode, tem);
bf877a76 1623 tem = gen_frame_mem (Pmode, tem);
432fd734 1624 tem = copy_to_reg (tem);
28f4ec01
BS
1625 }
1626
224869d9
EB
1627 /* For __builtin_frame_address, return what we've got. But, on
1628 the SPARC for example, we may have to add a bias. */
28f4ec01 1629 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
224869d9 1630 return FRAME_ADDR_RTX (tem);
28f4ec01 1631
224869d9 1632 /* For __builtin_return_address, get the return address from that frame. */
28f4ec01
BS
1633#ifdef RETURN_ADDR_RTX
1634 tem = RETURN_ADDR_RTX (count, tem);
1635#else
1636 tem = memory_address (Pmode,
0a81f074 1637 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
bf877a76 1638 tem = gen_frame_mem (Pmode, tem);
28f4ec01
BS
1639#endif
1640 return tem;
1641}
1642
3bdf5ad1 1643/* Alias set used for setjmp buffer. */
4862826d 1644static alias_set_type setjmp_alias_set = -1;
3bdf5ad1 1645
250d07b6 1646/* Construct the leading half of a __builtin_setjmp call. Control will
4f6c2131
EB
1647 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1648 exception handling code. */
28f4ec01 1649
250d07b6 1650void
4682ae04 1651expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
28f4ec01 1652{
ef4bddc2 1653 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 1654 rtx stack_save;
3bdf5ad1 1655 rtx mem;
28f4ec01 1656
3bdf5ad1
RK
1657 if (setjmp_alias_set == -1)
1658 setjmp_alias_set = new_alias_set ();
1659
5ae6cd0d 1660 buf_addr = convert_memory_address (Pmode, buf_addr);
28f4ec01 1661
7d505b82 1662 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
28f4ec01 1663
250d07b6
RH
1664 /* We store the frame pointer and the address of receiver_label in
1665 the buffer and use the rest of it for the stack save area, which
1666 is machine-dependent. */
28f4ec01 1667
3bdf5ad1 1668 mem = gen_rtx_MEM (Pmode, buf_addr);
ba4828e0 1669 set_mem_alias_set (mem, setjmp_alias_set);
25403c41 1670 emit_move_insn (mem, hard_frame_pointer_rtx);
3bdf5ad1 1671
0a81f074
RS
1672 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1673 GET_MODE_SIZE (Pmode))),
ba4828e0 1674 set_mem_alias_set (mem, setjmp_alias_set);
3bdf5ad1
RK
1675
1676 emit_move_insn (validize_mem (mem),
250d07b6 1677 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
28f4ec01
BS
1678
1679 stack_save = gen_rtx_MEM (sa_mode,
0a81f074 1680 plus_constant (Pmode, buf_addr,
28f4ec01 1681 2 * GET_MODE_SIZE (Pmode)));
ba4828e0 1682 set_mem_alias_set (stack_save, setjmp_alias_set);
9eac0f2a 1683 emit_stack_save (SAVE_NONLOCAL, &stack_save);
28f4ec01
BS
1684
1685 /* If there is further processing to do, do it. */
95a3fb9d
RS
1686 if (targetm.have_builtin_setjmp_setup ())
1687 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
28f4ec01 1688
ecaebb9e 1689 /* We have a nonlocal label. */
e3b5732b 1690 cfun->has_nonlocal_label = 1;
250d07b6 1691}
28f4ec01 1692
4f6c2131 1693/* Construct the trailing part of a __builtin_setjmp call. This is
e90d1568
HPN
1694 also called directly by the SJLJ exception handling code.
1695 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
250d07b6
RH
1696
1697void
95a3fb9d 1698expand_builtin_setjmp_receiver (rtx receiver_label)
250d07b6 1699{
531ca746
RH
1700 rtx chain;
1701
e90d1568 1702 /* Mark the FP as used when we get here, so we have to make sure it's
28f4ec01 1703 marked as used by this function. */
c41c1387 1704 emit_use (hard_frame_pointer_rtx);
28f4ec01
BS
1705
1706 /* Mark the static chain as clobbered here so life information
1707 doesn't get messed up for it. */
4b522b8f 1708 chain = rtx_for_static_chain (current_function_decl, true);
531ca746
RH
1709 if (chain && REG_P (chain))
1710 emit_clobber (chain);
28f4ec01 1711
38b0b093 1712 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
28f4ec01 1713 {
e90d1568
HPN
1714 /* If the argument pointer can be eliminated in favor of the
1715 frame pointer, we don't need to restore it. We assume here
1716 that if such an elimination is present, it can always be used.
1717 This is the case on all known machines; if we don't make this
1718 assumption, we do unnecessary saving on many machines. */
28f4ec01 1719 size_t i;
8b60264b 1720 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
28f4ec01 1721
b6a1cbae 1722 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
28f4ec01
BS
1723 if (elim_regs[i].from == ARG_POINTER_REGNUM
1724 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1725 break;
1726
b6a1cbae 1727 if (i == ARRAY_SIZE (elim_regs))
28f4ec01
BS
1728 {
1729 /* Now restore our arg pointer from the address at which it
278ed218 1730 was saved in our stack frame. */
2e3f842f 1731 emit_move_insn (crtl->args.internal_arg_pointer,
bd60bab2 1732 copy_to_reg (get_arg_pointer_save_area ()));
28f4ec01
BS
1733 }
1734 }
28f4ec01 1735
95a3fb9d
RS
1736 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1737 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1738 else if (targetm.have_nonlocal_goto_receiver ())
1739 emit_insn (targetm.gen_nonlocal_goto_receiver ());
28f4ec01 1740 else
95a3fb9d 1741 { /* Nothing */ }
bcd7edfe 1742
6fb5fa3c
DB
1743 /* We must not allow the code we just generated to be reordered by
1744 scheduling. Specifically, the update of the frame pointer must
f1257268 1745 happen immediately, not later. */
6fb5fa3c 1746 emit_insn (gen_blockage ());
250d07b6 1747}
28f4ec01 1748
28f4ec01
BS
1749/* __builtin_longjmp is passed a pointer to an array of five words (not
1750 all will be used on all machines). It operates similarly to the C
1751 library function of the same name, but is more efficient. Much of
4f6c2131 1752 the code below is copied from the handling of non-local gotos. */
28f4ec01 1753
54e62799 1754static void
4682ae04 1755expand_builtin_longjmp (rtx buf_addr, rtx value)
28f4ec01 1756{
58f4cf2a
DM
1757 rtx fp, lab, stack;
1758 rtx_insn *insn, *last;
ef4bddc2 1759 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 1760
b8698a0f 1761 /* DRAP is needed for stack realign if longjmp is expanded to current
2e3f842f
L
1762 function */
1763 if (SUPPORTS_STACK_ALIGNMENT)
1764 crtl->need_drap = true;
1765
3bdf5ad1
RK
1766 if (setjmp_alias_set == -1)
1767 setjmp_alias_set = new_alias_set ();
1768
5ae6cd0d 1769 buf_addr = convert_memory_address (Pmode, buf_addr);
4b6c1672 1770
28f4ec01
BS
1771 buf_addr = force_reg (Pmode, buf_addr);
1772
531ca746
RH
1773 /* We require that the user must pass a second argument of 1, because
1774 that is what builtin_setjmp will return. */
298e6adc 1775 gcc_assert (value == const1_rtx);
28f4ec01 1776
d337d653 1777 last = get_last_insn ();
95a3fb9d
RS
1778 if (targetm.have_builtin_longjmp ())
1779 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
28f4ec01 1780 else
28f4ec01
BS
1781 {
1782 fp = gen_rtx_MEM (Pmode, buf_addr);
0a81f074 1783 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
28f4ec01
BS
1784 GET_MODE_SIZE (Pmode)));
1785
0a81f074 1786 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
28f4ec01 1787 2 * GET_MODE_SIZE (Pmode)));
ba4828e0
RK
1788 set_mem_alias_set (fp, setjmp_alias_set);
1789 set_mem_alias_set (lab, setjmp_alias_set);
1790 set_mem_alias_set (stack, setjmp_alias_set);
28f4ec01
BS
1791
1792 /* Pick up FP, label, and SP from the block and jump. This code is
1793 from expand_goto in stmt.c; see there for detailed comments. */
95a3fb9d 1794 if (targetm.have_nonlocal_goto ())
28f4ec01
BS
1795 /* We have to pass a value to the nonlocal_goto pattern that will
1796 get copied into the static_chain pointer, but it does not matter
1797 what that value is, because builtin_setjmp does not use it. */
95a3fb9d 1798 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
28f4ec01 1799 else
28f4ec01 1800 {
c41c1387
RS
1801 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1802 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
41439bf6 1803
511ed59d
WD
1804 lab = copy_to_reg (lab);
1805
71b14428
WD
1806 /* Restore the frame pointer and stack pointer. We must use a
1807 temporary since the setjmp buffer may be a local. */
1808 fp = copy_to_reg (fp);
9eac0f2a 1809 emit_stack_restore (SAVE_NONLOCAL, stack);
511ed59d
WD
1810
1811 /* Ensure the frame pointer move is not optimized. */
1812 emit_insn (gen_blockage ());
1813 emit_clobber (hard_frame_pointer_rtx);
1814 emit_clobber (frame_pointer_rtx);
71b14428 1815 emit_move_insn (hard_frame_pointer_rtx, fp);
28f4ec01 1816
c41c1387
RS
1817 emit_use (hard_frame_pointer_rtx);
1818 emit_use (stack_pointer_rtx);
28f4ec01
BS
1819 emit_indirect_jump (lab);
1820 }
1821 }
4b01bd16
RH
1822
1823 /* Search backwards and mark the jump insn as a non-local goto.
1824 Note that this precludes the use of __builtin_longjmp to a
1825 __builtin_setjmp target in the same function. However, we've
1826 already cautioned the user that these functions are for
1827 internal exception handling use only. */
8206fc89
AM
1828 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1829 {
298e6adc 1830 gcc_assert (insn != last);
5906d013 1831
4b4bf941 1832 if (JUMP_P (insn))
8206fc89 1833 {
65c5f2a6 1834 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
8206fc89
AM
1835 break;
1836 }
4b4bf941 1837 else if (CALL_P (insn))
ca7fd9cd 1838 break;
8206fc89 1839 }
28f4ec01
BS
1840}
1841
862d0b35
DN
1842static inline bool
1843more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1844{
1845 return (iter->i < iter->n);
1846}
1847
1848/* This function validates the types of a function call argument list
1849 against a specified list of tree_codes. If the last specifier is a 0,
474da67e 1850 that represents an ellipsis, otherwise the last specifier must be a
862d0b35
DN
1851 VOID_TYPE. */
1852
1853static bool
1854validate_arglist (const_tree callexpr, ...)
1855{
1856 enum tree_code code;
1857 bool res = 0;
1858 va_list ap;
1859 const_call_expr_arg_iterator iter;
1860 const_tree arg;
1861
1862 va_start (ap, callexpr);
1863 init_const_call_expr_arg_iterator (callexpr, &iter);
1864
474da67e 1865 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
0dba7960
JJ
1866 tree fn = CALL_EXPR_FN (callexpr);
1867 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
474da67e
MS
1868
1869 for (unsigned argno = 1; ; ++argno)
862d0b35
DN
1870 {
1871 code = (enum tree_code) va_arg (ap, int);
474da67e 1872
862d0b35
DN
1873 switch (code)
1874 {
1875 case 0:
1876 /* This signifies an ellipses, any further arguments are all ok. */
1877 res = true;
1878 goto end;
1879 case VOID_TYPE:
1880 /* This signifies an endlink, if no arguments remain, return
1881 true, otherwise return false. */
1882 res = !more_const_call_expr_args_p (&iter);
1883 goto end;
474da67e
MS
1884 case POINTER_TYPE:
1885 /* The actual argument must be nonnull when either the whole
1886 called function has been declared nonnull, or when the formal
1887 argument corresponding to the actual argument has been. */
0dba7960
JJ
1888 if (argmap
1889 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1890 {
1891 arg = next_const_call_expr_arg (&iter);
1892 if (!validate_arg (arg, code) || integer_zerop (arg))
1893 goto end;
1894 break;
1895 }
474da67e 1896 /* FALLTHRU */
862d0b35
DN
1897 default:
1898 /* If no parameters remain or the parameter's code does not
1899 match the specified code, return false. Otherwise continue
1900 checking any remaining arguments. */
1901 arg = next_const_call_expr_arg (&iter);
0dba7960 1902 if (!validate_arg (arg, code))
862d0b35
DN
1903 goto end;
1904 break;
1905 }
1906 }
862d0b35
DN
1907
1908 /* We need gotos here since we can only have one VA_CLOSE in a
1909 function. */
1910 end: ;
1911 va_end (ap);
1912
474da67e
MS
1913 BITMAP_FREE (argmap);
1914
862d0b35
DN
1915 return res;
1916}
1917
6de9cd9a
DN
1918/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1919 and the address of the save area. */
1920
1921static rtx
5039610b 1922expand_builtin_nonlocal_goto (tree exp)
6de9cd9a
DN
1923{
1924 tree t_label, t_save_area;
58f4cf2a
DM
1925 rtx r_label, r_save_area, r_fp, r_sp;
1926 rtx_insn *insn;
6de9cd9a 1927
5039610b 1928 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
1929 return NULL_RTX;
1930
5039610b
SL
1931 t_label = CALL_EXPR_ARG (exp, 0);
1932 t_save_area = CALL_EXPR_ARG (exp, 1);
6de9cd9a 1933
84217346 1934 r_label = expand_normal (t_label);
5e89a381 1935 r_label = convert_memory_address (Pmode, r_label);
84217346 1936 r_save_area = expand_normal (t_save_area);
5e89a381 1937 r_save_area = convert_memory_address (Pmode, r_save_area);
bc6d3f91
EB
1938 /* Copy the address of the save location to a register just in case it was
1939 based on the frame pointer. */
cba2d79f 1940 r_save_area = copy_to_reg (r_save_area);
6de9cd9a
DN
1941 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1942 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
0a81f074
RS
1943 plus_constant (Pmode, r_save_area,
1944 GET_MODE_SIZE (Pmode)));
6de9cd9a 1945
e3b5732b 1946 crtl->has_nonlocal_goto = 1;
6de9cd9a 1947
6de9cd9a 1948 /* ??? We no longer need to pass the static chain value, afaik. */
95a3fb9d
RS
1949 if (targetm.have_nonlocal_goto ())
1950 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
6de9cd9a 1951 else
6de9cd9a 1952 {
c41c1387
RS
1953 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1954 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
caf93cb0 1955
511ed59d
WD
1956 r_label = copy_to_reg (r_label);
1957
71b14428
WD
1958 /* Restore the frame pointer and stack pointer. We must use a
1959 temporary since the setjmp buffer may be a local. */
1960 r_fp = copy_to_reg (r_fp);
9eac0f2a 1961 emit_stack_restore (SAVE_NONLOCAL, r_sp);
511ed59d
WD
1962
1963 /* Ensure the frame pointer move is not optimized. */
1964 emit_insn (gen_blockage ());
1965 emit_clobber (hard_frame_pointer_rtx);
1966 emit_clobber (frame_pointer_rtx);
71b14428 1967 emit_move_insn (hard_frame_pointer_rtx, r_fp);
caf93cb0 1968
6de9cd9a
DN
1969 /* USE of hard_frame_pointer_rtx added for consistency;
1970 not clear if really needed. */
c41c1387
RS
1971 emit_use (hard_frame_pointer_rtx);
1972 emit_use (stack_pointer_rtx);
eae645b6
RS
1973
1974 /* If the architecture is using a GP register, we must
1975 conservatively assume that the target function makes use of it.
1976 The prologue of functions with nonlocal gotos must therefore
1977 initialize the GP register to the appropriate value, and we
1978 must then make sure that this value is live at the point
1979 of the jump. (Note that this doesn't necessarily apply
1980 to targets with a nonlocal_goto pattern; they are free
1981 to implement it in their own way. Note also that this is
1982 a no-op if the GP register is a global invariant.) */
959c1e20
AH
1983 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1984 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
c41c1387 1985 emit_use (pic_offset_table_rtx);
eae645b6 1986
6de9cd9a
DN
1987 emit_indirect_jump (r_label);
1988 }
caf93cb0 1989
6de9cd9a
DN
1990 /* Search backwards to the jump insn and mark it as a
1991 non-local goto. */
1992 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1993 {
4b4bf941 1994 if (JUMP_P (insn))
6de9cd9a 1995 {
65c5f2a6 1996 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
6de9cd9a
DN
1997 break;
1998 }
4b4bf941 1999 else if (CALL_P (insn))
6de9cd9a
DN
2000 break;
2001 }
2002
2003 return const0_rtx;
2004}
2005
2b92e7f5
RK
2006/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
2007 (not all will be used on all machines) that was passed to __builtin_setjmp.
d33606c3
EB
2008 It updates the stack pointer in that block to the current value. This is
2009 also called directly by the SJLJ exception handling code. */
2b92e7f5 2010
d33606c3 2011void
2b92e7f5
RK
2012expand_builtin_update_setjmp_buf (rtx buf_addr)
2013{
ef4bddc2 2014 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
4887028b 2015 buf_addr = convert_memory_address (Pmode, buf_addr);
bc6d3f91 2016 rtx stack_save
2b92e7f5
RK
2017 = gen_rtx_MEM (sa_mode,
2018 memory_address
2019 (sa_mode,
0a81f074
RS
2020 plus_constant (Pmode, buf_addr,
2021 2 * GET_MODE_SIZE (Pmode))));
2b92e7f5 2022
9eac0f2a 2023 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2b92e7f5
RK
2024}
2025
a9ccbb60
JJ
2026/* Expand a call to __builtin_prefetch. For a target that does not support
2027 data prefetch, evaluate the memory address argument in case it has side
2028 effects. */
2029
2030static void
5039610b 2031expand_builtin_prefetch (tree exp)
a9ccbb60
JJ
2032{
2033 tree arg0, arg1, arg2;
5039610b 2034 int nargs;
a9ccbb60
JJ
2035 rtx op0, op1, op2;
2036
5039610b 2037 if (!validate_arglist (exp, POINTER_TYPE, 0))
e83d297b
JJ
2038 return;
2039
5039610b
SL
2040 arg0 = CALL_EXPR_ARG (exp, 0);
2041
e83d297b
JJ
2042 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
2043 zero (read) and argument 2 (locality) defaults to 3 (high degree of
2044 locality). */
5039610b
SL
2045 nargs = call_expr_nargs (exp);
2046 if (nargs > 1)
2047 arg1 = CALL_EXPR_ARG (exp, 1);
e83d297b 2048 else
5039610b
SL
2049 arg1 = integer_zero_node;
2050 if (nargs > 2)
2051 arg2 = CALL_EXPR_ARG (exp, 2);
2052 else
9a9d280e 2053 arg2 = integer_three_node;
a9ccbb60
JJ
2054
2055 /* Argument 0 is an address. */
2056 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
2057
2058 /* Argument 1 (read/write flag) must be a compile-time constant int. */
2059 if (TREE_CODE (arg1) != INTEGER_CST)
2060 {
40b97a2e 2061 error ("second argument to %<__builtin_prefetch%> must be a constant");
ca7fd9cd 2062 arg1 = integer_zero_node;
a9ccbb60 2063 }
84217346 2064 op1 = expand_normal (arg1);
a9ccbb60
JJ
2065 /* Argument 1 must be either zero or one. */
2066 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
2067 {
d4ee4d25 2068 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
40b97a2e 2069 " using zero");
a9ccbb60
JJ
2070 op1 = const0_rtx;
2071 }
2072
2073 /* Argument 2 (locality) must be a compile-time constant int. */
2074 if (TREE_CODE (arg2) != INTEGER_CST)
2075 {
40b97a2e 2076 error ("third argument to %<__builtin_prefetch%> must be a constant");
a9ccbb60
JJ
2077 arg2 = integer_zero_node;
2078 }
84217346 2079 op2 = expand_normal (arg2);
a9ccbb60
JJ
2080 /* Argument 2 must be 0, 1, 2, or 3. */
2081 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
2082 {
d4ee4d25 2083 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
a9ccbb60
JJ
2084 op2 = const0_rtx;
2085 }
2086
134b044d 2087 if (targetm.have_prefetch ())
a9ccbb60 2088 {
99b1c316 2089 class expand_operand ops[3];
a5c7d693
RS
2090
2091 create_address_operand (&ops[0], op0);
2092 create_integer_operand (&ops[1], INTVAL (op1));
2093 create_integer_operand (&ops[2], INTVAL (op2));
134b044d 2094 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
a5c7d693 2095 return;
a9ccbb60 2096 }
ad76cef8 2097
5ab2f7b7
KH
2098 /* Don't do anything with direct references to volatile memory, but
2099 generate code to handle other side effects. */
3c0cb5de 2100 if (!MEM_P (op0) && side_effects_p (op0))
5ab2f7b7 2101 emit_insn (op0);
a9ccbb60
JJ
2102}
2103
3bdf5ad1 2104/* Get a MEM rtx for expression EXP which is the address of an operand
76715c32 2105 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
435bb2a1
JJ
2106 the maximum length of the block of memory that might be accessed or
2107 NULL if unknown. */
3bdf5ad1 2108
28f4ec01 2109static rtx
435bb2a1 2110get_memory_rtx (tree exp, tree len)
28f4ec01 2111{
805903b5
JJ
2112 tree orig_exp = exp;
2113 rtx addr, mem;
805903b5
JJ
2114
2115 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
2116 from its expression, for expr->a.b only <variable>.a.b is recorded. */
2117 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
2118 exp = TREE_OPERAND (exp, 0);
2119
2120 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2121 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
8ac61af7 2122
3bdf5ad1 2123 /* Get an expression we can use to find the attributes to assign to MEM.
625ed172 2124 First remove any nops. */
1043771b 2125 while (CONVERT_EXPR_P (exp)
3bdf5ad1
RK
2126 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2127 exp = TREE_OPERAND (exp, 0);
2128
625ed172
MM
2129 /* Build a MEM_REF representing the whole accessed area as a byte blob,
2130 (as builtin stringops may alias with anything). */
2131 exp = fold_build2 (MEM_REF,
2132 build_array_type (char_type_node,
2133 build_range_type (sizetype,
2134 size_one_node, len)),
2135 exp, build_int_cst (ptr_type_node, 0));
2136
2137 /* If the MEM_REF has no acceptable address, try to get the base object
2138 from the original address we got, and build an all-aliasing
2139 unknown-sized access to that one. */
2140 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2141 set_mem_attributes (mem, exp, 0);
2142 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2143 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
2144 0))))
343fb412 2145 {
625ed172
MM
2146 exp = build_fold_addr_expr (exp);
2147 exp = fold_build2 (MEM_REF,
2148 build_array_type (char_type_node,
2149 build_range_type (sizetype,
2150 size_zero_node,
2151 NULL)),
2152 exp, build_int_cst (ptr_type_node, 0));
931e6c29 2153 set_mem_attributes (mem, exp, 0);
343fb412 2154 }
625ed172 2155 set_mem_alias_set (mem, 0);
28f4ec01
BS
2156 return mem;
2157}
2158\f
2159/* Built-in functions to perform an untyped call and return. */
2160
fa19795e
RS
2161#define apply_args_mode \
2162 (this_target_builtins->x_apply_args_mode)
2163#define apply_result_mode \
2164 (this_target_builtins->x_apply_result_mode)
28f4ec01 2165
28f4ec01
BS
2166/* Return the size required for the block returned by __builtin_apply_args,
2167 and initialize apply_args_mode. */
2168
2169static int
4682ae04 2170apply_args_size (void)
28f4ec01
BS
2171{
2172 static int size = -1;
cbf5468f
AH
2173 int align;
2174 unsigned int regno;
28f4ec01
BS
2175
2176 /* The values computed by this function never change. */
2177 if (size < 0)
2178 {
2179 /* The first value is the incoming arg-pointer. */
2180 size = GET_MODE_SIZE (Pmode);
2181
2182 /* The second value is the structure value address unless this is
2183 passed as an "invisible" first argument. */
92f6864c 2184 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
2185 size += GET_MODE_SIZE (Pmode);
2186
2187 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2188 if (FUNCTION_ARG_REGNO_P (regno))
2189 {
b660eccf 2190 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
33521f7d 2191
298e6adc 2192 gcc_assert (mode != VOIDmode);
28f4ec01
BS
2193
2194 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2195 if (size % align != 0)
2196 size = CEIL (size, align) * align;
28f4ec01
BS
2197 size += GET_MODE_SIZE (mode);
2198 apply_args_mode[regno] = mode;
2199 }
2200 else
2201 {
b660eccf 2202 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
2203 }
2204 }
2205 return size;
2206}
2207
2208/* Return the size required for the block returned by __builtin_apply,
2209 and initialize apply_result_mode. */
2210
2211static int
4682ae04 2212apply_result_size (void)
28f4ec01
BS
2213{
2214 static int size = -1;
2215 int align, regno;
28f4ec01
BS
2216
2217 /* The values computed by this function never change. */
2218 if (size < 0)
2219 {
2220 size = 0;
2221
2222 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
82f81f18 2223 if (targetm.calls.function_value_regno_p (regno))
28f4ec01 2224 {
b660eccf 2225 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
33521f7d 2226
298e6adc 2227 gcc_assert (mode != VOIDmode);
28f4ec01
BS
2228
2229 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2230 if (size % align != 0)
2231 size = CEIL (size, align) * align;
2232 size += GET_MODE_SIZE (mode);
2233 apply_result_mode[regno] = mode;
2234 }
2235 else
b660eccf 2236 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
2237
2238 /* Allow targets that use untyped_call and untyped_return to override
2239 the size so that machine-specific information can be stored here. */
2240#ifdef APPLY_RESULT_SIZE
2241 size = APPLY_RESULT_SIZE;
2242#endif
2243 }
2244 return size;
2245}
2246
28f4ec01
BS
2247/* Create a vector describing the result block RESULT. If SAVEP is true,
2248 the result block is used to save the values; otherwise it is used to
2249 restore the values. */
2250
2251static rtx
4682ae04 2252result_vector (int savep, rtx result)
28f4ec01
BS
2253{
2254 int regno, size, align, nelts;
b660eccf 2255 fixed_size_mode mode;
28f4ec01 2256 rtx reg, mem;
f883e0a7 2257 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
8d51ecf8 2258
28f4ec01
BS
2259 size = nelts = 0;
2260 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2261 if ((mode = apply_result_mode[regno]) != VOIDmode)
2262 {
2263 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2264 if (size % align != 0)
2265 size = CEIL (size, align) * align;
2266 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
f4ef873c 2267 mem = adjust_address (result, mode, size);
28f4ec01 2268 savevec[nelts++] = (savep
f7df4a84
RS
2269 ? gen_rtx_SET (mem, reg)
2270 : gen_rtx_SET (reg, mem));
28f4ec01
BS
2271 size += GET_MODE_SIZE (mode);
2272 }
2273 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
2274}
28f4ec01
BS
2275
2276/* Save the state required to perform an untyped call with the same
2277 arguments as were passed to the current function. */
2278
2279static rtx
4682ae04 2280expand_builtin_apply_args_1 (void)
28f4ec01 2281{
88e541e1 2282 rtx registers, tem;
28f4ec01 2283 int size, align, regno;
b660eccf 2284 fixed_size_mode mode;
92f6864c 2285 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
28f4ec01
BS
2286
2287 /* Create a block where the arg-pointer, structure value address,
2288 and argument registers can be saved. */
2289 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
2290
2291 /* Walk past the arg-pointer and structure value address. */
2292 size = GET_MODE_SIZE (Pmode);
92f6864c 2293 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
2294 size += GET_MODE_SIZE (Pmode);
2295
2296 /* Save each register used in calling a function to the block. */
2297 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2298 if ((mode = apply_args_mode[regno]) != VOIDmode)
2299 {
28f4ec01
BS
2300 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2301 if (size % align != 0)
2302 size = CEIL (size, align) * align;
2303
2304 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2305
f4ef873c 2306 emit_move_insn (adjust_address (registers, mode, size), tem);
28f4ec01
BS
2307 size += GET_MODE_SIZE (mode);
2308 }
2309
2310 /* Save the arg pointer to the block. */
2e3f842f 2311 tem = copy_to_reg (crtl->args.internal_arg_pointer);
88e541e1 2312 /* We need the pointer as the caller actually passed them to us, not
ac3f5df7
HPN
2313 as we might have pretended they were passed. Make sure it's a valid
2314 operand, as emit_move_insn isn't expected to handle a PLUS. */
581edfa3
TS
2315 if (STACK_GROWS_DOWNWARD)
2316 tem
2317 = force_operand (plus_constant (Pmode, tem,
2318 crtl->args.pretend_args_size),
2319 NULL_RTX);
88e541e1 2320 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
33521f7d 2321
28f4ec01
BS
2322 size = GET_MODE_SIZE (Pmode);
2323
2324 /* Save the structure value address unless this is passed as an
2325 "invisible" first argument. */
61f71b34 2326 if (struct_incoming_value)
45309d28
ML
2327 emit_move_insn (adjust_address (registers, Pmode, size),
2328 copy_to_reg (struct_incoming_value));
28f4ec01
BS
2329
2330 /* Return the address of the block. */
2331 return copy_addr_to_reg (XEXP (registers, 0));
2332}
2333
2334/* __builtin_apply_args returns block of memory allocated on
2335 the stack into which is stored the arg pointer, structure
2336 value address, static chain, and all the registers that might
2337 possibly be used in performing a function call. The code is
2338 moved to the start of the function so the incoming values are
2339 saved. */
5197bd50 2340
28f4ec01 2341static rtx
4682ae04 2342expand_builtin_apply_args (void)
28f4ec01
BS
2343{
2344 /* Don't do __builtin_apply_args more than once in a function.
2345 Save the result of the first call and reuse it. */
2346 if (apply_args_value != 0)
2347 return apply_args_value;
2348 {
2349 /* When this function is called, it means that registers must be
2350 saved on entry to this function. So we migrate the
2351 call to the first insn of this function. */
2352 rtx temp;
28f4ec01
BS
2353
2354 start_sequence ();
2355 temp = expand_builtin_apply_args_1 ();
e67d1102 2356 rtx_insn *seq = get_insns ();
28f4ec01
BS
2357 end_sequence ();
2358
2359 apply_args_value = temp;
2360
2f937369
DM
2361 /* Put the insns after the NOTE that starts the function.
2362 If this is inside a start_sequence, make the outer-level insn
28f4ec01 2363 chain current, so the code is placed at the start of the
1f21b6f4
JJ
2364 function. If internal_arg_pointer is a non-virtual pseudo,
2365 it needs to be placed after the function that initializes
2366 that pseudo. */
28f4ec01 2367 push_topmost_sequence ();
1f21b6f4
JJ
2368 if (REG_P (crtl->args.internal_arg_pointer)
2369 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
2370 emit_insn_before (seq, parm_birth_insn);
2371 else
2372 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
28f4ec01
BS
2373 pop_topmost_sequence ();
2374 return temp;
2375 }
2376}
2377
2378/* Perform an untyped call and save the state required to perform an
2379 untyped return of whatever value was returned by the given function. */
2380
2381static rtx
4682ae04 2382expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
28f4ec01
BS
2383{
2384 int size, align, regno;
b660eccf 2385 fixed_size_mode mode;
58f4cf2a
DM
2386 rtx incoming_args, result, reg, dest, src;
2387 rtx_call_insn *call_insn;
28f4ec01
BS
2388 rtx old_stack_level = 0;
2389 rtx call_fusage = 0;
92f6864c 2390 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
28f4ec01 2391
5ae6cd0d 2392 arguments = convert_memory_address (Pmode, arguments);
ce2d32cd 2393
28f4ec01
BS
2394 /* Create a block where the return registers can be saved. */
2395 result = assign_stack_local (BLKmode, apply_result_size (), -1);
2396
28f4ec01
BS
2397 /* Fetch the arg pointer from the ARGUMENTS block. */
2398 incoming_args = gen_reg_rtx (Pmode);
ce2d32cd 2399 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
581edfa3
TS
2400 if (!STACK_GROWS_DOWNWARD)
2401 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
2402 incoming_args, 0, OPTAB_LIB_WIDEN);
28f4ec01 2403
9d53e585
JM
2404 /* Push a new argument block and copy the arguments. Do not allow
2405 the (potential) memcpy call below to interfere with our stack
2406 manipulations. */
28f4ec01 2407 do_pending_stack_adjust ();
9d53e585 2408 NO_DEFER_POP;
28f4ec01 2409
f9da5064 2410 /* Save the stack with nonlocal if available. */
4476e1a0 2411 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 2412 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
28f4ec01 2413 else
9eac0f2a 2414 emit_stack_save (SAVE_BLOCK, &old_stack_level);
28f4ec01 2415
316d0b19 2416 /* Allocate a block of memory onto the stack and copy the memory
d3c12306
EB
2417 arguments to the outgoing arguments address. We can pass TRUE
2418 as the 4th argument because we just saved the stack pointer
2419 and will restore it right after the call. */
9e878cf1 2420 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2e3f842f
L
2421
2422 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
2423 may have already set current_function_calls_alloca to true.
2424 current_function_calls_alloca won't be set if argsize is zero,
2425 so we have to guarantee need_drap is true here. */
2426 if (SUPPORTS_STACK_ALIGNMENT)
2427 crtl->need_drap = true;
2428
316d0b19 2429 dest = virtual_outgoing_args_rtx;
581edfa3
TS
2430 if (!STACK_GROWS_DOWNWARD)
2431 {
2432 if (CONST_INT_P (argsize))
2433 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
2434 else
2435 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
2436 }
8ac61af7
RK
2437 dest = gen_rtx_MEM (BLKmode, dest);
2438 set_mem_align (dest, PARM_BOUNDARY);
2439 src = gen_rtx_MEM (BLKmode, incoming_args);
2440 set_mem_align (src, PARM_BOUNDARY);
44bb111a 2441 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
28f4ec01
BS
2442
2443 /* Refer to the argument block. */
2444 apply_args_size ();
2445 arguments = gen_rtx_MEM (BLKmode, arguments);
8ac61af7 2446 set_mem_align (arguments, PARM_BOUNDARY);
28f4ec01
BS
2447
2448 /* Walk past the arg-pointer and structure value address. */
2449 size = GET_MODE_SIZE (Pmode);
61f71b34 2450 if (struct_value)
28f4ec01
BS
2451 size += GET_MODE_SIZE (Pmode);
2452
2453 /* Restore each of the registers previously saved. Make USE insns
2454 for each of these registers for use in making the call. */
2455 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2456 if ((mode = apply_args_mode[regno]) != VOIDmode)
2457 {
2458 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2459 if (size % align != 0)
2460 size = CEIL (size, align) * align;
2461 reg = gen_rtx_REG (mode, regno);
f4ef873c 2462 emit_move_insn (reg, adjust_address (arguments, mode, size));
28f4ec01
BS
2463 use_reg (&call_fusage, reg);
2464 size += GET_MODE_SIZE (mode);
2465 }
2466
2467 /* Restore the structure value address unless this is passed as an
2468 "invisible" first argument. */
2469 size = GET_MODE_SIZE (Pmode);
61f71b34 2470 if (struct_value)
28f4ec01
BS
2471 {
2472 rtx value = gen_reg_rtx (Pmode);
f4ef873c 2473 emit_move_insn (value, adjust_address (arguments, Pmode, size));
61f71b34 2474 emit_move_insn (struct_value, value);
f8cfc6aa 2475 if (REG_P (struct_value))
61f71b34 2476 use_reg (&call_fusage, struct_value);
28f4ec01
BS
2477 }
2478
2479 /* All arguments and registers used for the call are set up by now! */
531ca746 2480 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
28f4ec01
BS
2481
2482 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
2483 and we don't want to load it into a register as an optimization,
2484 because prepare_call_address already did it if it should be done. */
2485 if (GET_CODE (function) != SYMBOL_REF)
2486 function = memory_address (FUNCTION_MODE, function);
2487
2488 /* Generate the actual call instruction and save the return value. */
43c7dca8
RS
2489 if (targetm.have_untyped_call ())
2490 {
2491 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
2492 emit_call_insn (targetm.gen_untyped_call (mem, result,
2493 result_vector (1, result)));
2494 }
58d745ec 2495 else if (targetm.have_call_value ())
28f4ec01
BS
2496 {
2497 rtx valreg = 0;
2498
2499 /* Locate the unique return register. It is not possible to
2500 express a call that sets more than one return register using
2501 call_value; use untyped_call for that. In fact, untyped_call
2502 only needs to save the return registers in the given block. */
2503 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2504 if ((mode = apply_result_mode[regno]) != VOIDmode)
2505 {
58d745ec 2506 gcc_assert (!valreg); /* have_untyped_call required. */
5906d013 2507
28f4ec01
BS
2508 valreg = gen_rtx_REG (mode, regno);
2509 }
2510
58d745ec
RS
2511 emit_insn (targetm.gen_call_value (valreg,
2512 gen_rtx_MEM (FUNCTION_MODE, function),
2513 const0_rtx, NULL_RTX, const0_rtx));
28f4ec01 2514
f4ef873c 2515 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
28f4ec01
BS
2516 }
2517 else
298e6adc 2518 gcc_unreachable ();
28f4ec01 2519
ee960939
OH
2520 /* Find the CALL insn we just emitted, and attach the register usage
2521 information. */
2522 call_insn = last_call_insn ();
2523 add_function_usage_to (call_insn, call_fusage);
28f4ec01
BS
2524
2525 /* Restore the stack. */
4476e1a0 2526 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 2527 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
28f4ec01 2528 else
9eac0f2a 2529 emit_stack_restore (SAVE_BLOCK, old_stack_level);
c3284718 2530 fixup_args_size_notes (call_insn, get_last_insn (), 0);
28f4ec01 2531
9d53e585
JM
2532 OK_DEFER_POP;
2533
28f4ec01 2534 /* Return the address of the result block. */
5ae6cd0d
MM
2535 result = copy_addr_to_reg (XEXP (result, 0));
2536 return convert_memory_address (ptr_mode, result);
28f4ec01
BS
2537}
2538
2539/* Perform an untyped return. */
2540
2541static void
4682ae04 2542expand_builtin_return (rtx result)
28f4ec01
BS
2543{
2544 int size, align, regno;
b660eccf 2545 fixed_size_mode mode;
28f4ec01 2546 rtx reg;
fee3e72c 2547 rtx_insn *call_fusage = 0;
28f4ec01 2548
5ae6cd0d 2549 result = convert_memory_address (Pmode, result);
ce2d32cd 2550
28f4ec01
BS
2551 apply_result_size ();
2552 result = gen_rtx_MEM (BLKmode, result);
2553
43c7dca8 2554 if (targetm.have_untyped_return ())
28f4ec01 2555 {
43c7dca8
RS
2556 rtx vector = result_vector (0, result);
2557 emit_jump_insn (targetm.gen_untyped_return (result, vector));
28f4ec01
BS
2558 emit_barrier ();
2559 return;
2560 }
28f4ec01
BS
2561
2562 /* Restore the return value and note that each value is used. */
2563 size = 0;
2564 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2565 if ((mode = apply_result_mode[regno]) != VOIDmode)
2566 {
2567 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2568 if (size % align != 0)
2569 size = CEIL (size, align) * align;
2570 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
f4ef873c 2571 emit_move_insn (reg, adjust_address (result, mode, size));
28f4ec01
BS
2572
2573 push_to_sequence (call_fusage);
c41c1387 2574 emit_use (reg);
28f4ec01
BS
2575 call_fusage = get_insns ();
2576 end_sequence ();
2577 size += GET_MODE_SIZE (mode);
2578 }
2579
2580 /* Put the USE insns before the return. */
2f937369 2581 emit_insn (call_fusage);
28f4ec01
BS
2582
2583 /* Return whatever values was restored by jumping directly to the end
2584 of the function. */
6e3077c6 2585 expand_naked_return ();
28f4ec01
BS
2586}
2587
ad82abb8 2588/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
5197bd50 2589
ad82abb8 2590static enum type_class
4682ae04 2591type_to_class (tree type)
ad82abb8
ZW
2592{
2593 switch (TREE_CODE (type))
2594 {
2595 case VOID_TYPE: return void_type_class;
2596 case INTEGER_TYPE: return integer_type_class;
ad82abb8
ZW
2597 case ENUMERAL_TYPE: return enumeral_type_class;
2598 case BOOLEAN_TYPE: return boolean_type_class;
2599 case POINTER_TYPE: return pointer_type_class;
2600 case REFERENCE_TYPE: return reference_type_class;
2601 case OFFSET_TYPE: return offset_type_class;
2602 case REAL_TYPE: return real_type_class;
2603 case COMPLEX_TYPE: return complex_type_class;
2604 case FUNCTION_TYPE: return function_type_class;
2605 case METHOD_TYPE: return method_type_class;
2606 case RECORD_TYPE: return record_type_class;
2607 case UNION_TYPE:
2608 case QUAL_UNION_TYPE: return union_type_class;
2609 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
2610 ? string_type_class : array_type_class);
ad82abb8 2611 case LANG_TYPE: return lang_type_class;
1e2d8575 2612 case OPAQUE_TYPE: return opaque_type_class;
ad82abb8
ZW
2613 default: return no_type_class;
2614 }
2615}
8d51ecf8 2616
5039610b 2617/* Expand a call EXP to __builtin_classify_type. */
5197bd50 2618
28f4ec01 2619static rtx
5039610b 2620expand_builtin_classify_type (tree exp)
28f4ec01 2621{
5039610b
SL
2622 if (call_expr_nargs (exp))
2623 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
28f4ec01
BS
2624 return GEN_INT (no_type_class);
2625}
2626
ee5fd23a
MM
2627/* This helper macro, meant to be used in mathfn_built_in below, determines
2628 which among a set of builtin math functions is appropriate for a given type
2629 mode. The `F' (float) and `L' (long double) are automatically generated
2630 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2631 types, there are additional types that are considered with 'F32', 'F64',
2632 'F128', etc. suffixes. */
b03ff92e
RS
2633#define CASE_MATHFN(MATHFN) \
2634 CASE_CFN_##MATHFN: \
2635 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2636 fcodel = BUILT_IN_##MATHFN##L ; break;
ee5fd23a
MM
2637/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2638 types. */
2639#define CASE_MATHFN_FLOATN(MATHFN) \
2640 CASE_CFN_##MATHFN: \
2641 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2642 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2643 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2644 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2645 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2646 break;
bf460eec 2647/* Similar to above, but appends _R after any F/L suffix. */
b03ff92e
RS
2648#define CASE_MATHFN_REENT(MATHFN) \
2649 case CFN_BUILT_IN_##MATHFN##_R: \
2650 case CFN_BUILT_IN_##MATHFN##F_R: \
2651 case CFN_BUILT_IN_##MATHFN##L_R: \
2652 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2653 fcodel = BUILT_IN_##MATHFN##L_R ; break;
daa027cc 2654
5c1a2e63
RS
2655/* Return a function equivalent to FN but operating on floating-point
2656 values of type TYPE, or END_BUILTINS if no such function exists.
b03ff92e
RS
2657 This is purely an operation on function codes; it does not guarantee
2658 that the target actually has an implementation of the function. */
05f41289 2659
5c1a2e63 2660static built_in_function
b03ff92e 2661mathfn_built_in_2 (tree type, combined_fn fn)
272f51a3 2662{
ee5fd23a 2663 tree mtype;
5c1a2e63 2664 built_in_function fcode, fcodef, fcodel;
ee5fd23a
MM
2665 built_in_function fcodef16 = END_BUILTINS;
2666 built_in_function fcodef32 = END_BUILTINS;
2667 built_in_function fcodef64 = END_BUILTINS;
2668 built_in_function fcodef128 = END_BUILTINS;
2669 built_in_function fcodef32x = END_BUILTINS;
2670 built_in_function fcodef64x = END_BUILTINS;
2671 built_in_function fcodef128x = END_BUILTINS;
daa027cc
KG
2672
2673 switch (fn)
2674 {
a500588a
AO
2675#define SEQ_OF_CASE_MATHFN \
2676 CASE_MATHFN (ACOS) \
2677 CASE_MATHFN (ACOSH) \
2678 CASE_MATHFN (ASIN) \
2679 CASE_MATHFN (ASINH) \
2680 CASE_MATHFN (ATAN) \
2681 CASE_MATHFN (ATAN2) \
2682 CASE_MATHFN (ATANH) \
2683 CASE_MATHFN (CBRT) \
2684 CASE_MATHFN_FLOATN (CEIL) \
2685 CASE_MATHFN (CEXPI) \
2686 CASE_MATHFN_FLOATN (COPYSIGN) \
2687 CASE_MATHFN (COS) \
2688 CASE_MATHFN (COSH) \
2689 CASE_MATHFN (DREM) \
2690 CASE_MATHFN (ERF) \
2691 CASE_MATHFN (ERFC) \
2692 CASE_MATHFN (EXP) \
2693 CASE_MATHFN (EXP10) \
2694 CASE_MATHFN (EXP2) \
2695 CASE_MATHFN (EXPM1) \
2696 CASE_MATHFN (FABS) \
2697 CASE_MATHFN (FDIM) \
2698 CASE_MATHFN_FLOATN (FLOOR) \
2699 CASE_MATHFN_FLOATN (FMA) \
2700 CASE_MATHFN_FLOATN (FMAX) \
2701 CASE_MATHFN_FLOATN (FMIN) \
2702 CASE_MATHFN (FMOD) \
2703 CASE_MATHFN (FREXP) \
2704 CASE_MATHFN (GAMMA) \
2705 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2706 CASE_MATHFN (HUGE_VAL) \
2707 CASE_MATHFN (HYPOT) \
2708 CASE_MATHFN (ILOGB) \
2709 CASE_MATHFN (ICEIL) \
2710 CASE_MATHFN (IFLOOR) \
2711 CASE_MATHFN (INF) \
2712 CASE_MATHFN (IRINT) \
2713 CASE_MATHFN (IROUND) \
2714 CASE_MATHFN (ISINF) \
2715 CASE_MATHFN (J0) \
2716 CASE_MATHFN (J1) \
2717 CASE_MATHFN (JN) \
2718 CASE_MATHFN (LCEIL) \
2719 CASE_MATHFN (LDEXP) \
2720 CASE_MATHFN (LFLOOR) \
2721 CASE_MATHFN (LGAMMA) \
2722 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2723 CASE_MATHFN (LLCEIL) \
2724 CASE_MATHFN (LLFLOOR) \
2725 CASE_MATHFN (LLRINT) \
2726 CASE_MATHFN (LLROUND) \
2727 CASE_MATHFN (LOG) \
2728 CASE_MATHFN (LOG10) \
2729 CASE_MATHFN (LOG1P) \
2730 CASE_MATHFN (LOG2) \
2731 CASE_MATHFN (LOGB) \
2732 CASE_MATHFN (LRINT) \
2733 CASE_MATHFN (LROUND) \
2734 CASE_MATHFN (MODF) \
2735 CASE_MATHFN (NAN) \
2736 CASE_MATHFN (NANS) \
2737 CASE_MATHFN_FLOATN (NEARBYINT) \
2738 CASE_MATHFN (NEXTAFTER) \
2739 CASE_MATHFN (NEXTTOWARD) \
2740 CASE_MATHFN (POW) \
2741 CASE_MATHFN (POWI) \
2742 CASE_MATHFN (POW10) \
2743 CASE_MATHFN (REMAINDER) \
2744 CASE_MATHFN (REMQUO) \
2745 CASE_MATHFN_FLOATN (RINT) \
2746 CASE_MATHFN_FLOATN (ROUND) \
2747 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2748 CASE_MATHFN (SCALB) \
2749 CASE_MATHFN (SCALBLN) \
2750 CASE_MATHFN (SCALBN) \
2751 CASE_MATHFN (SIGNBIT) \
2752 CASE_MATHFN (SIGNIFICAND) \
2753 CASE_MATHFN (SIN) \
2754 CASE_MATHFN (SINCOS) \
2755 CASE_MATHFN (SINH) \
2756 CASE_MATHFN_FLOATN (SQRT) \
2757 CASE_MATHFN (TAN) \
2758 CASE_MATHFN (TANH) \
2759 CASE_MATHFN (TGAMMA) \
2760 CASE_MATHFN_FLOATN (TRUNC) \
2761 CASE_MATHFN (Y0) \
2762 CASE_MATHFN (Y1) \
b03ff92e 2763 CASE_MATHFN (YN)
daa027cc 2764
a500588a
AO
2765 SEQ_OF_CASE_MATHFN
2766
b03ff92e
RS
2767 default:
2768 return END_BUILTINS;
2769 }
daa027cc 2770
ee5fd23a
MM
2771 mtype = TYPE_MAIN_VARIANT (type);
2772 if (mtype == double_type_node)
5c1a2e63 2773 return fcode;
ee5fd23a 2774 else if (mtype == float_type_node)
5c1a2e63 2775 return fcodef;
ee5fd23a 2776 else if (mtype == long_double_type_node)
5c1a2e63 2777 return fcodel;
ee5fd23a
MM
2778 else if (mtype == float16_type_node)
2779 return fcodef16;
2780 else if (mtype == float32_type_node)
2781 return fcodef32;
2782 else if (mtype == float64_type_node)
2783 return fcodef64;
2784 else if (mtype == float128_type_node)
2785 return fcodef128;
2786 else if (mtype == float32x_type_node)
2787 return fcodef32x;
2788 else if (mtype == float64x_type_node)
2789 return fcodef64x;
2790 else if (mtype == float128x_type_node)
2791 return fcodef128x;
daa027cc 2792 else
5c1a2e63
RS
2793 return END_BUILTINS;
2794}
2795
a500588a
AO
2796#undef CASE_MATHFN
2797#undef CASE_MATHFN_FLOATN
2798#undef CASE_MATHFN_REENT
2799
5c1a2e63
RS
2800/* Return mathematic function equivalent to FN but operating directly on TYPE,
2801 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2802 otherwise use the explicit declaration. If we can't do the conversion,
2803 return null. */
2804
2805static tree
b03ff92e 2806mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
5c1a2e63
RS
2807{
2808 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2809 if (fcode2 == END_BUILTINS)
5039610b 2810 return NULL_TREE;
e79983f4
MM
2811
2812 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2813 return NULL_TREE;
2814
2815 return builtin_decl_explicit (fcode2);
272f51a3
JH
2816}
2817
b03ff92e 2818/* Like mathfn_built_in_1, but always use the implicit array. */
05f41289
KG
2819
2820tree
b03ff92e 2821mathfn_built_in (tree type, combined_fn fn)
05f41289
KG
2822{
2823 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2824}
2825
b03ff92e
RS
2826/* Like mathfn_built_in_1, but take a built_in_function and
2827 always use the implicit array. */
2828
2829tree
2830mathfn_built_in (tree type, enum built_in_function fn)
2831{
2832 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2833}
2834
a500588a
AO
2835/* Return the type associated with a built in function, i.e., the one
2836 to be passed to mathfn_built_in to get the type-specific
2837 function. */
2838
2839tree
2840mathfn_built_in_type (combined_fn fn)
2841{
2842#define CASE_MATHFN(MATHFN) \
455c3d2e 2843 case CFN_BUILT_IN_##MATHFN: \
a500588a 2844 return double_type_node; \
455c3d2e 2845 case CFN_BUILT_IN_##MATHFN##F: \
a500588a 2846 return float_type_node; \
455c3d2e 2847 case CFN_BUILT_IN_##MATHFN##L: \
a500588a
AO
2848 return long_double_type_node;
2849
2850#define CASE_MATHFN_FLOATN(MATHFN) \
2851 CASE_MATHFN(MATHFN) \
455c3d2e 2852 case CFN_BUILT_IN_##MATHFN##F16: \
a500588a 2853 return float16_type_node; \
455c3d2e 2854 case CFN_BUILT_IN_##MATHFN##F32: \
a500588a 2855 return float32_type_node; \
455c3d2e 2856 case CFN_BUILT_IN_##MATHFN##F64: \
a500588a 2857 return float64_type_node; \
455c3d2e 2858 case CFN_BUILT_IN_##MATHFN##F128: \
a500588a 2859 return float128_type_node; \
455c3d2e 2860 case CFN_BUILT_IN_##MATHFN##F32X: \
a500588a 2861 return float32x_type_node; \
455c3d2e 2862 case CFN_BUILT_IN_##MATHFN##F64X: \
a500588a 2863 return float64x_type_node; \
455c3d2e 2864 case CFN_BUILT_IN_##MATHFN##F128X: \
a500588a
AO
2865 return float128x_type_node;
2866
2867/* Similar to above, but appends _R after any F/L suffix. */
2868#define CASE_MATHFN_REENT(MATHFN) \
455c3d2e 2869 case CFN_BUILT_IN_##MATHFN##_R: \
a500588a 2870 return double_type_node; \
455c3d2e 2871 case CFN_BUILT_IN_##MATHFN##F_R: \
a500588a 2872 return float_type_node; \
455c3d2e 2873 case CFN_BUILT_IN_##MATHFN##L_R: \
a500588a
AO
2874 return long_double_type_node;
2875
2876 switch (fn)
2877 {
2878 SEQ_OF_CASE_MATHFN
2879
2880 default:
2881 return NULL_TREE;
2882 }
2883
2884#undef CASE_MATHFN
2885#undef CASE_MATHFN_FLOATN
2886#undef CASE_MATHFN_REENT
2887#undef SEQ_OF_CASE_MATHFN
2888}
2889
686ee971
RS
2890/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2891 return its code, otherwise return IFN_LAST. Note that this function
2892 only tests whether the function is defined in internals.def, not whether
2893 it is actually available on the target. */
2894
2895internal_fn
2896associated_internal_fn (tree fndecl)
2897{
2898 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2899 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2900 switch (DECL_FUNCTION_CODE (fndecl))
2901 {
2902#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2903 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
ee5fd23a
MM
2904#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2905 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2906 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
4959a752
RS
2907#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2908 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
686ee971
RS
2909#include "internal-fn.def"
2910
2911 CASE_FLT_FN (BUILT_IN_POW10):
2912 return IFN_EXP10;
2913
2914 CASE_FLT_FN (BUILT_IN_DREM):
2915 return IFN_REMAINDER;
2916
2917 CASE_FLT_FN (BUILT_IN_SCALBN):
2918 CASE_FLT_FN (BUILT_IN_SCALBLN):
2919 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2920 return IFN_LDEXP;
2921 return IFN_LAST;
2922
2923 default:
2924 return IFN_LAST;
2925 }
2926}
2927
2928/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2929 on the current target by a call to an internal function, return the
2930 code of that internal function, otherwise return IFN_LAST. The caller
2931 is responsible for ensuring that any side-effects of the built-in
2932 call are dealt with correctly. E.g. if CALL sets errno, the caller
2933 must decide that the errno result isn't needed or make it available
2934 in some other way. */
2935
2936internal_fn
2937replacement_internal_fn (gcall *call)
2938{
2939 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2940 {
2941 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2942 if (ifn != IFN_LAST)
2943 {
2944 tree_pair types = direct_internal_fn_types (ifn, call);
d95ab70a
RS
2945 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2946 if (direct_internal_fn_supported_p (ifn, types, opt_type))
686ee971
RS
2947 return ifn;
2948 }
2949 }
2950 return IFN_LAST;
2951}
2952
1b1562a5
MM
2953/* Expand a call to the builtin trinary math functions (fma).
2954 Return NULL_RTX if a normal call should be emitted rather than expanding the
2955 function in-line. EXP is the expression that is a call to the builtin
2956 function; if convenient, the result should be placed in TARGET.
2957 SUBTARGET may be used as the target for computing one of EXP's
2958 operands. */
2959
2960static rtx
2961expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2962{
2963 optab builtin_optab;
58f4cf2a
DM
2964 rtx op0, op1, op2, result;
2965 rtx_insn *insns;
1b1562a5
MM
2966 tree fndecl = get_callee_fndecl (exp);
2967 tree arg0, arg1, arg2;
ef4bddc2 2968 machine_mode mode;
1b1562a5
MM
2969
2970 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2971 return NULL_RTX;
2972
2973 arg0 = CALL_EXPR_ARG (exp, 0);
2974 arg1 = CALL_EXPR_ARG (exp, 1);
2975 arg2 = CALL_EXPR_ARG (exp, 2);
2976
2977 switch (DECL_FUNCTION_CODE (fndecl))
2978 {
2979 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 2980 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
2981 builtin_optab = fma_optab; break;
2982 default:
2983 gcc_unreachable ();
2984 }
2985
2986 /* Make a suitable register to place result in. */
2987 mode = TYPE_MODE (TREE_TYPE (exp));
2988
2989 /* Before working hard, check whether the instruction is available. */
2990 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2991 return NULL_RTX;
2992
04b80dbb 2993 result = gen_reg_rtx (mode);
1b1562a5
MM
2994
2995 /* Always stabilize the argument list. */
2996 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2997 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2998 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2999
3000 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3001 op1 = expand_normal (arg1);
3002 op2 = expand_normal (arg2);
3003
3004 start_sequence ();
3005
04b80dbb
RS
3006 /* Compute into RESULT.
3007 Set RESULT to wherever the result comes back. */
3008 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
3009 result, 0);
1b1562a5
MM
3010
3011 /* If we were unable to expand via the builtin, stop the sequence
3012 (without outputting the insns) and call to the library function
3013 with the stabilized argument list. */
04b80dbb 3014 if (result == 0)
1b1562a5
MM
3015 {
3016 end_sequence ();
3017 return expand_call (exp, target, target == const0_rtx);
3018 }
3019
3020 /* Output the entire sequence. */
3021 insns = get_insns ();
3022 end_sequence ();
3023 emit_insn (insns);
3024
04b80dbb 3025 return result;
1b1562a5
MM
3026}
3027
6c7cf1f0 3028/* Expand a call to the builtin sin and cos math functions.
5039610b 3029 Return NULL_RTX if a normal call should be emitted rather than expanding the
6c7cf1f0
UB
3030 function in-line. EXP is the expression that is a call to the builtin
3031 function; if convenient, the result should be placed in TARGET.
3032 SUBTARGET may be used as the target for computing one of EXP's
3033 operands. */
3034
3035static rtx
3036expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
3037{
3038 optab builtin_optab;
58f4cf2a
DM
3039 rtx op0;
3040 rtx_insn *insns;
6c7cf1f0 3041 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 3042 machine_mode mode;
5799f732 3043 tree arg;
6c7cf1f0 3044
5039610b
SL
3045 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3046 return NULL_RTX;
6c7cf1f0 3047
5039610b 3048 arg = CALL_EXPR_ARG (exp, 0);
6c7cf1f0
UB
3049
3050 switch (DECL_FUNCTION_CODE (fndecl))
3051 {
ea6a6627
VR
3052 CASE_FLT_FN (BUILT_IN_SIN):
3053 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
3054 builtin_optab = sincos_optab; break;
3055 default:
298e6adc 3056 gcc_unreachable ();
6c7cf1f0
UB
3057 }
3058
3059 /* Make a suitable register to place result in. */
3060 mode = TYPE_MODE (TREE_TYPE (exp));
3061
6c7cf1f0 3062 /* Check if sincos insn is available, otherwise fallback
9cf737f8 3063 to sin or cos insn. */
947131ba 3064 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6c7cf1f0
UB
3065 switch (DECL_FUNCTION_CODE (fndecl))
3066 {
ea6a6627 3067 CASE_FLT_FN (BUILT_IN_SIN):
6c7cf1f0 3068 builtin_optab = sin_optab; break;
ea6a6627 3069 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
3070 builtin_optab = cos_optab; break;
3071 default:
298e6adc 3072 gcc_unreachable ();
6c7cf1f0 3073 }
6c7cf1f0
UB
3074
3075 /* Before working hard, check whether the instruction is available. */
947131ba 3076 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6c7cf1f0 3077 {
04b80dbb 3078 rtx result = gen_reg_rtx (mode);
6c7cf1f0
UB
3079
3080 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3081 need to expand the argument again. This way, we will not perform
3082 side-effects more the once. */
5799f732 3083 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6c7cf1f0 3084
49452c07 3085 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6c7cf1f0 3086
6c7cf1f0
UB
3087 start_sequence ();
3088
04b80dbb
RS
3089 /* Compute into RESULT.
3090 Set RESULT to wherever the result comes back. */
6c7cf1f0
UB
3091 if (builtin_optab == sincos_optab)
3092 {
04b80dbb 3093 int ok;
5906d013 3094
6c7cf1f0
UB
3095 switch (DECL_FUNCTION_CODE (fndecl))
3096 {
ea6a6627 3097 CASE_FLT_FN (BUILT_IN_SIN):
04b80dbb 3098 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6c7cf1f0 3099 break;
ea6a6627 3100 CASE_FLT_FN (BUILT_IN_COS):
04b80dbb 3101 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6c7cf1f0
UB
3102 break;
3103 default:
298e6adc 3104 gcc_unreachable ();
6c7cf1f0 3105 }
04b80dbb 3106 gcc_assert (ok);
6c7cf1f0
UB
3107 }
3108 else
04b80dbb 3109 result = expand_unop (mode, builtin_optab, op0, result, 0);
6c7cf1f0 3110
04b80dbb 3111 if (result != 0)
6c7cf1f0 3112 {
6c7cf1f0
UB
3113 /* Output the entire sequence. */
3114 insns = get_insns ();
3115 end_sequence ();
3116 emit_insn (insns);
04b80dbb 3117 return result;
6c7cf1f0
UB
3118 }
3119
3120 /* If we were unable to expand via the builtin, stop the sequence
3121 (without outputting the insns) and call to the library function
3122 with the stabilized argument list. */
3123 end_sequence ();
3124 }
3125
04b80dbb 3126 return expand_call (exp, target, target == const0_rtx);
6c7cf1f0
UB
3127}
3128
44e10129
MM
3129/* Given an interclass math builtin decl FNDECL and it's argument ARG
3130 return an RTL instruction code that implements the functionality.
3131 If that isn't possible or available return CODE_FOR_nothing. */
eaee4464 3132
44e10129
MM
3133static enum insn_code
3134interclass_mathfn_icode (tree arg, tree fndecl)
eaee4464 3135{
44e10129 3136 bool errno_set = false;
2225b9f2 3137 optab builtin_optab = unknown_optab;
ef4bddc2 3138 machine_mode mode;
eaee4464
UB
3139
3140 switch (DECL_FUNCTION_CODE (fndecl))
3141 {
3142 CASE_FLT_FN (BUILT_IN_ILOGB):
903c723b
TC
3143 errno_set = true; builtin_optab = ilogb_optab; break;
3144 CASE_FLT_FN (BUILT_IN_ISINF):
3145 builtin_optab = isinf_optab; break;
3146 case BUILT_IN_ISNORMAL:
3147 case BUILT_IN_ISFINITE:
3148 CASE_FLT_FN (BUILT_IN_FINITE):
3149 case BUILT_IN_FINITED32:
3150 case BUILT_IN_FINITED64:
3151 case BUILT_IN_FINITED128:
3152 case BUILT_IN_ISINFD32:
3153 case BUILT_IN_ISINFD64:
3154 case BUILT_IN_ISINFD128:
3155 /* These builtins have no optabs (yet). */
0c8d3c2b 3156 break;
eaee4464
UB
3157 default:
3158 gcc_unreachable ();
3159 }
3160
3161 /* There's no easy way to detect the case we need to set EDOM. */
3162 if (flag_errno_math && errno_set)
44e10129 3163 return CODE_FOR_nothing;
eaee4464
UB
3164
3165 /* Optab mode depends on the mode of the input argument. */
3166 mode = TYPE_MODE (TREE_TYPE (arg));
3167
0c8d3c2b 3168 if (builtin_optab)
947131ba 3169 return optab_handler (builtin_optab, mode);
44e10129
MM
3170 return CODE_FOR_nothing;
3171}
3172
3173/* Expand a call to one of the builtin math functions that operate on
903c723b
TC
3174 floating point argument and output an integer result (ilogb, isinf,
3175 isnan, etc).
44e10129
MM
3176 Return 0 if a normal call should be emitted rather than expanding the
3177 function in-line. EXP is the expression that is a call to the builtin
4359dc2a 3178 function; if convenient, the result should be placed in TARGET. */
44e10129
MM
3179
3180static rtx
4359dc2a 3181expand_builtin_interclass_mathfn (tree exp, rtx target)
44e10129
MM
3182{
3183 enum insn_code icode = CODE_FOR_nothing;
3184 rtx op0;
3185 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 3186 machine_mode mode;
44e10129
MM
3187 tree arg;
3188
3189 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3190 return NULL_RTX;
3191
3192 arg = CALL_EXPR_ARG (exp, 0);
3193 icode = interclass_mathfn_icode (arg, fndecl);
3194 mode = TYPE_MODE (TREE_TYPE (arg));
3195
eaee4464
UB
3196 if (icode != CODE_FOR_nothing)
3197 {
99b1c316 3198 class expand_operand ops[1];
58f4cf2a 3199 rtx_insn *last = get_last_insn ();
8a0b1aa4 3200 tree orig_arg = arg;
eaee4464
UB
3201
3202 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3203 need to expand the argument again. This way, we will not perform
3204 side-effects more the once. */
5799f732 3205 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
eaee4464 3206
4359dc2a 3207 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
eaee4464
UB
3208
3209 if (mode != GET_MODE (op0))
3210 op0 = convert_to_mode (mode, op0, 0);
3211
a5c7d693
RS
3212 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
3213 if (maybe_legitimize_operands (icode, 0, 1, ops)
3214 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
3215 return ops[0].value;
3216
8a0b1aa4
MM
3217 delete_insns_since (last);
3218 CALL_EXPR_ARG (exp, 0) = orig_arg;
eaee4464
UB
3219 }
3220
44e10129 3221 return NULL_RTX;
eaee4464
UB
3222}
3223
403e54f0 3224/* Expand a call to the builtin sincos math function.
5039610b 3225 Return NULL_RTX if a normal call should be emitted rather than expanding the
403e54f0
RG
3226 function in-line. EXP is the expression that is a call to the builtin
3227 function. */
3228
3229static rtx
3230expand_builtin_sincos (tree exp)
3231{
3232 rtx op0, op1, op2, target1, target2;
ef4bddc2 3233 machine_mode mode;
403e54f0
RG
3234 tree arg, sinp, cosp;
3235 int result;
db3927fb 3236 location_t loc = EXPR_LOCATION (exp);
ca818bd9 3237 tree alias_type, alias_off;
403e54f0 3238
5039610b
SL
3239 if (!validate_arglist (exp, REAL_TYPE,
3240 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3241 return NULL_RTX;
403e54f0 3242
5039610b
SL
3243 arg = CALL_EXPR_ARG (exp, 0);
3244 sinp = CALL_EXPR_ARG (exp, 1);
3245 cosp = CALL_EXPR_ARG (exp, 2);
403e54f0
RG
3246
3247 /* Make a suitable register to place result in. */
3248 mode = TYPE_MODE (TREE_TYPE (arg));
3249
3250 /* Check if sincos insn is available, otherwise emit the call. */
947131ba 3251 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
403e54f0
RG
3252 return NULL_RTX;
3253
3254 target1 = gen_reg_rtx (mode);
3255 target2 = gen_reg_rtx (mode);
3256
84217346 3257 op0 = expand_normal (arg);
ca818bd9
RG
3258 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
3259 alias_off = build_int_cst (alias_type, 0);
3260 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3261 sinp, alias_off));
3262 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3263 cosp, alias_off));
403e54f0
RG
3264
3265 /* Compute into target1 and target2.
3266 Set TARGET to wherever the result comes back. */
3267 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
3268 gcc_assert (result);
3269
3270 /* Move target1 and target2 to the memory locations indicated
3271 by op1 and op2. */
3272 emit_move_insn (op1, target1);
3273 emit_move_insn (op2, target2);
3274
3275 return const0_rtx;
3276}
3277
75c7c595
RG
3278/* Expand a call to the internal cexpi builtin to the sincos math function.
3279 EXP is the expression that is a call to the builtin function; if convenient,
4359dc2a 3280 the result should be placed in TARGET. */
75c7c595
RG
3281
3282static rtx
4359dc2a 3283expand_builtin_cexpi (tree exp, rtx target)
75c7c595
RG
3284{
3285 tree fndecl = get_callee_fndecl (exp);
75c7c595 3286 tree arg, type;
ef4bddc2 3287 machine_mode mode;
75c7c595 3288 rtx op0, op1, op2;
db3927fb 3289 location_t loc = EXPR_LOCATION (exp);
75c7c595 3290
5039610b
SL
3291 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3292 return NULL_RTX;
75c7c595 3293
5039610b 3294 arg = CALL_EXPR_ARG (exp, 0);
75c7c595
RG
3295 type = TREE_TYPE (arg);
3296 mode = TYPE_MODE (TREE_TYPE (arg));
3297
3298 /* Try expanding via a sincos optab, fall back to emitting a libcall
b54c5497
RG
3299 to sincos or cexp. We are sure we have sincos or cexp because cexpi
3300 is only generated from sincos, cexp or if we have either of them. */
947131ba 3301 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
75c7c595
RG
3302 {
3303 op1 = gen_reg_rtx (mode);
3304 op2 = gen_reg_rtx (mode);
3305
4359dc2a 3306 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
3307
3308 /* Compute into op1 and op2. */
3309 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
3310 }
bae974e6 3311 else if (targetm.libc_has_function (function_sincos, type))
75c7c595 3312 {
5039610b 3313 tree call, fn = NULL_TREE;
75c7c595
RG
3314 tree top1, top2;
3315 rtx op1a, op2a;
3316
3317 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 3318 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
75c7c595 3319 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 3320 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
75c7c595 3321 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 3322 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
5039610b
SL
3323 else
3324 gcc_unreachable ();
b8698a0f 3325
9474e8ab
MM
3326 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
3327 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
18ae1560
UB
3328 op1a = copy_addr_to_reg (XEXP (op1, 0));
3329 op2a = copy_addr_to_reg (XEXP (op2, 0));
75c7c595
RG
3330 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
3331 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
3332
75c7c595
RG
3333 /* Make sure not to fold the sincos call again. */
3334 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5039610b
SL
3335 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
3336 call, 3, arg, top1, top2));
75c7c595 3337 }
b54c5497
RG
3338 else
3339 {
9d972b2d 3340 tree call, fn = NULL_TREE, narg;
b54c5497
RG
3341 tree ctype = build_complex_type (type);
3342
9d972b2d 3343 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 3344 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
9d972b2d 3345 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 3346 fn = builtin_decl_explicit (BUILT_IN_CEXP);
9d972b2d 3347 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 3348 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
5039610b
SL
3349 else
3350 gcc_unreachable ();
34a24c11
RG
3351
3352 /* If we don't have a decl for cexp create one. This is the
3353 friendliest fallback if the user calls __builtin_cexpi
3354 without full target C99 function support. */
3355 if (fn == NULL_TREE)
3356 {
3357 tree fntype;
3358 const char *name = NULL;
3359
3360 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3361 name = "cexpf";
3362 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3363 name = "cexp";
3364 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3365 name = "cexpl";
3366
3367 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
3368 fn = build_fn_decl (name, fntype);
3369 }
3370
db3927fb 3371 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
b54c5497
RG
3372 build_real (type, dconst0), arg);
3373
3374 /* Make sure not to fold the cexp call again. */
3375 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
b8698a0f 3376 return expand_expr (build_call_nary (ctype, call, 1, narg),
49452c07 3377 target, VOIDmode, EXPAND_NORMAL);
b54c5497 3378 }
75c7c595
RG
3379
3380 /* Now build the proper return type. */
3381 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
3382 make_tree (TREE_TYPE (arg), op2),
3383 make_tree (TREE_TYPE (arg), op1)),
49452c07 3384 target, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
3385}
3386
44e10129
MM
3387/* Conveniently construct a function call expression. FNDECL names the
3388 function to be called, N is the number of arguments, and the "..."
3389 parameters are the argument expressions. Unlike build_call_exr
3390 this doesn't fold the call, hence it will always return a CALL_EXPR. */
3391
3392static tree
3393build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
3394{
3395 va_list ap;
3396 tree fntype = TREE_TYPE (fndecl);
3397 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
3398
3399 va_start (ap, n);
3400 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
3401 va_end (ap);
3402 SET_EXPR_LOCATION (fn, loc);
3403 return fn;
3404}
44e10129 3405
0bfa1541
RG
3406/* Expand a call to one of the builtin rounding functions gcc defines
3407 as an extension (lfloor and lceil). As these are gcc extensions we
3408 do not need to worry about setting errno to EDOM.
d8b42d06
UB
3409 If expanding via optab fails, lower expression to (int)(floor(x)).
3410 EXP is the expression that is a call to the builtin function;
1856c8dc 3411 if convenient, the result should be placed in TARGET. */
d8b42d06
UB
3412
3413static rtx
1856c8dc 3414expand_builtin_int_roundingfn (tree exp, rtx target)
d8b42d06 3415{
c3a4177f 3416 convert_optab builtin_optab;
58f4cf2a
DM
3417 rtx op0, tmp;
3418 rtx_insn *insns;
d8b42d06 3419 tree fndecl = get_callee_fndecl (exp);
d8b42d06
UB
3420 enum built_in_function fallback_fn;
3421 tree fallback_fndecl;
ef4bddc2 3422 machine_mode mode;
968fc3b6 3423 tree arg;
d8b42d06 3424
5039610b 3425 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1c178a5a 3426 return NULL_RTX;
d8b42d06 3427
5039610b 3428 arg = CALL_EXPR_ARG (exp, 0);
d8b42d06
UB
3429
3430 switch (DECL_FUNCTION_CODE (fndecl))
3431 {
6c32ee74 3432 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
3433 CASE_FLT_FN (BUILT_IN_LCEIL):
3434 CASE_FLT_FN (BUILT_IN_LLCEIL):
f94b1661
UB
3435 builtin_optab = lceil_optab;
3436 fallback_fn = BUILT_IN_CEIL;
3437 break;
3438
6c32ee74 3439 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627
VR
3440 CASE_FLT_FN (BUILT_IN_LFLOOR):
3441 CASE_FLT_FN (BUILT_IN_LLFLOOR):
d8b42d06
UB
3442 builtin_optab = lfloor_optab;
3443 fallback_fn = BUILT_IN_FLOOR;
3444 break;
3445
3446 default:
3447 gcc_unreachable ();
3448 }
3449
3450 /* Make a suitable register to place result in. */
3451 mode = TYPE_MODE (TREE_TYPE (exp));
3452
c3a4177f 3453 target = gen_reg_rtx (mode);
d8b42d06 3454
c3a4177f
RG
3455 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3456 need to expand the argument again. This way, we will not perform
3457 side-effects more the once. */
5799f732 3458 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
d8b42d06 3459
1856c8dc 3460 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
d8b42d06 3461
c3a4177f 3462 start_sequence ();
d8b42d06 3463
c3a4177f
RG
3464 /* Compute into TARGET. */
3465 if (expand_sfix_optab (target, op0, builtin_optab))
3466 {
3467 /* Output the entire sequence. */
3468 insns = get_insns ();
d8b42d06 3469 end_sequence ();
c3a4177f
RG
3470 emit_insn (insns);
3471 return target;
d8b42d06
UB
3472 }
3473
c3a4177f
RG
3474 /* If we were unable to expand via the builtin, stop the sequence
3475 (without outputting the insns). */
3476 end_sequence ();
3477
d8b42d06
UB
3478 /* Fall back to floating point rounding optab. */
3479 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
34a24c11
RG
3480
3481 /* For non-C99 targets we may end up without a fallback fndecl here
3482 if the user called __builtin_lfloor directly. In this case emit
3483 a call to the floor/ceil variants nevertheless. This should result
3484 in the best user experience for not full C99 targets. */
3485 if (fallback_fndecl == NULL_TREE)
3486 {
3487 tree fntype;
3488 const char *name = NULL;
3489
3490 switch (DECL_FUNCTION_CODE (fndecl))
3491 {
6c32ee74 3492 case BUILT_IN_ICEIL:
34a24c11
RG
3493 case BUILT_IN_LCEIL:
3494 case BUILT_IN_LLCEIL:
3495 name = "ceil";
3496 break;
6c32ee74 3497 case BUILT_IN_ICEILF:
34a24c11
RG
3498 case BUILT_IN_LCEILF:
3499 case BUILT_IN_LLCEILF:
3500 name = "ceilf";
3501 break;
6c32ee74 3502 case BUILT_IN_ICEILL:
34a24c11
RG
3503 case BUILT_IN_LCEILL:
3504 case BUILT_IN_LLCEILL:
3505 name = "ceill";
3506 break;
6c32ee74 3507 case BUILT_IN_IFLOOR:
34a24c11
RG
3508 case BUILT_IN_LFLOOR:
3509 case BUILT_IN_LLFLOOR:
3510 name = "floor";
3511 break;
6c32ee74 3512 case BUILT_IN_IFLOORF:
34a24c11
RG
3513 case BUILT_IN_LFLOORF:
3514 case BUILT_IN_LLFLOORF:
3515 name = "floorf";
3516 break;
6c32ee74 3517 case BUILT_IN_IFLOORL:
34a24c11
RG
3518 case BUILT_IN_LFLOORL:
3519 case BUILT_IN_LLFLOORL:
3520 name = "floorl";
3521 break;
3522 default:
3523 gcc_unreachable ();
3524 }
3525
3526 fntype = build_function_type_list (TREE_TYPE (arg),
3527 TREE_TYPE (arg), NULL_TREE);
3528 fallback_fndecl = build_fn_decl (name, fntype);
3529 }
3530
aa493694 3531 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
d8b42d06 3532
39b1ec97 3533 tmp = expand_normal (exp);
9a002da8 3534 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
d8b42d06
UB
3535
3536 /* Truncate the result of floating point optab to integer
3537 via expand_fix (). */
3538 target = gen_reg_rtx (mode);
3539 expand_fix (target, tmp, 0);
3540
3541 return target;
3542}
3543
0bfa1541
RG
3544/* Expand a call to one of the builtin math functions doing integer
3545 conversion (lrint).
3546 Return 0 if a normal call should be emitted rather than expanding the
3547 function in-line. EXP is the expression that is a call to the builtin
1856c8dc 3548 function; if convenient, the result should be placed in TARGET. */
0bfa1541
RG
3549
3550static rtx
1856c8dc 3551expand_builtin_int_roundingfn_2 (tree exp, rtx target)
0bfa1541 3552{
bb7f0423 3553 convert_optab builtin_optab;
58f4cf2a
DM
3554 rtx op0;
3555 rtx_insn *insns;
0bfa1541 3556 tree fndecl = get_callee_fndecl (exp);
968fc3b6 3557 tree arg;
ef4bddc2 3558 machine_mode mode;
ff63ac4d 3559 enum built_in_function fallback_fn = BUILT_IN_NONE;
0bfa1541 3560
5039610b 3561 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1c178a5a 3562 return NULL_RTX;
b8698a0f 3563
5039610b 3564 arg = CALL_EXPR_ARG (exp, 0);
0bfa1541
RG
3565
3566 switch (DECL_FUNCTION_CODE (fndecl))
3567 {
6c32ee74 3568 CASE_FLT_FN (BUILT_IN_IRINT):
ff63ac4d 3569 fallback_fn = BUILT_IN_LRINT;
81fea426 3570 gcc_fallthrough ();
0bfa1541
RG
3571 CASE_FLT_FN (BUILT_IN_LRINT):
3572 CASE_FLT_FN (BUILT_IN_LLRINT):
ff63ac4d
JJ
3573 builtin_optab = lrint_optab;
3574 break;
6c32ee74
UB
3575
3576 CASE_FLT_FN (BUILT_IN_IROUND):
ff63ac4d 3577 fallback_fn = BUILT_IN_LROUND;
81fea426 3578 gcc_fallthrough ();
4d81bf84
RG
3579 CASE_FLT_FN (BUILT_IN_LROUND):
3580 CASE_FLT_FN (BUILT_IN_LLROUND):
ff63ac4d
JJ
3581 builtin_optab = lround_optab;
3582 break;
6c32ee74 3583
0bfa1541
RG
3584 default:
3585 gcc_unreachable ();
3586 }
3587
ff63ac4d
JJ
3588 /* There's no easy way to detect the case we need to set EDOM. */
3589 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3590 return NULL_RTX;
3591
0bfa1541
RG
3592 /* Make a suitable register to place result in. */
3593 mode = TYPE_MODE (TREE_TYPE (exp));
3594
ff63ac4d
JJ
3595 /* There's no easy way to detect the case we need to set EDOM. */
3596 if (!flag_errno_math)
3597 {
04b80dbb 3598 rtx result = gen_reg_rtx (mode);
0bfa1541 3599
ff63ac4d
JJ
3600 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3601 need to expand the argument again. This way, we will not perform
3602 side-effects more the once. */
3603 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
0bfa1541 3604
ff63ac4d 3605 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
0bfa1541 3606
ff63ac4d 3607 start_sequence ();
0bfa1541 3608
04b80dbb 3609 if (expand_sfix_optab (result, op0, builtin_optab))
ff63ac4d
JJ
3610 {
3611 /* Output the entire sequence. */
3612 insns = get_insns ();
3613 end_sequence ();
3614 emit_insn (insns);
04b80dbb 3615 return result;
ff63ac4d
JJ
3616 }
3617
3618 /* If we were unable to expand via the builtin, stop the sequence
3619 (without outputting the insns) and call to the library function
3620 with the stabilized argument list. */
0bfa1541
RG
3621 end_sequence ();
3622 }
3623
ff63ac4d
JJ
3624 if (fallback_fn != BUILT_IN_NONE)
3625 {
3626 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3627 targets, (int) round (x) should never be transformed into
3628 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3629 a call to lround in the hope that the target provides at least some
3630 C99 functions. This should result in the best user experience for
3631 not full C99 targets. */
b03ff92e
RS
3632 tree fallback_fndecl = mathfn_built_in_1
3633 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
ff63ac4d
JJ
3634
3635 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3636 fallback_fndecl, 1, arg);
3637
3638 target = expand_call (exp, NULL_RTX, target == const0_rtx);
9a002da8 3639 target = maybe_emit_group_store (target, TREE_TYPE (exp));
ff63ac4d
JJ
3640 return convert_to_mode (mode, target, 0);
3641 }
bb7f0423 3642
04b80dbb 3643 return expand_call (exp, target, target == const0_rtx);
0bfa1541
RG
3644}
3645
5039610b 3646/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
17684d46
RG
3647 a normal call should be emitted rather than expanding the function
3648 in-line. EXP is the expression that is a call to the builtin
3649 function; if convenient, the result should be placed in TARGET. */
3650
3651static rtx
4359dc2a 3652expand_builtin_powi (tree exp, rtx target)
17684d46 3653{
17684d46
RG
3654 tree arg0, arg1;
3655 rtx op0, op1;
ef4bddc2
RS
3656 machine_mode mode;
3657 machine_mode mode2;
17684d46 3658
5039610b
SL
3659 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3660 return NULL_RTX;
17684d46 3661
5039610b
SL
3662 arg0 = CALL_EXPR_ARG (exp, 0);
3663 arg1 = CALL_EXPR_ARG (exp, 1);
17684d46
RG
3664 mode = TYPE_MODE (TREE_TYPE (exp));
3665
17684d46
RG
3666 /* Emit a libcall to libgcc. */
3667
5039610b 3668 /* Mode of the 2nd argument must match that of an int. */
f4b31647 3669 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
0b8495ae 3670
17684d46
RG
3671 if (target == NULL_RTX)
3672 target = gen_reg_rtx (mode);
3673
4359dc2a 3674 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
17684d46
RG
3675 if (GET_MODE (op0) != mode)
3676 op0 = convert_to_mode (mode, op0, 0);
49452c07 3677 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
0b8495ae
FJ
3678 if (GET_MODE (op1) != mode2)
3679 op1 = convert_to_mode (mode2, op1, 0);
17684d46 3680
8a33f100 3681 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
db69559b 3682 target, LCT_CONST, mode,
0b8495ae 3683 op0, mode, op1, mode2);
17684d46
RG
3684
3685 return target;
3686}
3687
b8698a0f 3688/* Expand expression EXP which is a call to the strlen builtin. Return
781ff3d8 3689 NULL_RTX if we failed and the caller should emit a normal call, otherwise
0e9295cf 3690 try to get the result in TARGET, if convenient. */
3bdf5ad1 3691
28f4ec01 3692static rtx
5039610b 3693expand_builtin_strlen (tree exp, rtx target,
ef4bddc2 3694 machine_mode target_mode)
28f4ec01 3695{
5039610b
SL
3696 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3697 return NULL_RTX;
712b7a05 3698
16155777 3699 tree src = CALL_EXPR_ARG (exp, 0);
d14c547a
MS
3700 if (!check_read_access (exp, src))
3701 return NULL_RTX;
ae808627 3702
16155777 3703 /* If the length can be computed at compile-time, return it. */
d14c547a 3704 if (tree len = c_strlen (src, 0))
16155777
MS
3705 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3706
3707 /* If the length can be computed at compile-time and is constant
3708 integer, but there are side-effects in src, evaluate
3709 src for side-effects, then return len.
3710 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3711 can be optimized into: i++; x = 3; */
d14c547a 3712 tree len = c_strlen (src, 1);
16155777
MS
3713 if (len && TREE_CODE (len) == INTEGER_CST)
3714 {
3715 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3716 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3717 }
28f4ec01 3718
d14c547a 3719 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
28f4ec01 3720
16155777
MS
3721 /* If SRC is not a pointer type, don't do this operation inline. */
3722 if (align == 0)
3723 return NULL_RTX;
3724
3725 /* Bail out if we can't compute strlen in the right mode. */
d14c547a
MS
3726 machine_mode insn_mode;
3727 enum insn_code icode = CODE_FOR_nothing;
16155777
MS
3728 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3729 {
3730 icode = optab_handler (strlen_optab, insn_mode);
3731 if (icode != CODE_FOR_nothing)
3732 break;
3733 }
3734 if (insn_mode == VOIDmode)
3735 return NULL_RTX;
28f4ec01 3736
16155777
MS
3737 /* Make a place to hold the source address. We will not expand
3738 the actual source until we are sure that the expansion will
3739 not fail -- there are trees that cannot be expanded twice. */
d14c547a 3740 rtx src_reg = gen_reg_rtx (Pmode);
28f4ec01 3741
16155777
MS
3742 /* Mark the beginning of the strlen sequence so we can emit the
3743 source operand later. */
d14c547a 3744 rtx_insn *before_strlen = get_last_insn ();
28f4ec01 3745
d14c547a 3746 class expand_operand ops[4];
16155777
MS
3747 create_output_operand (&ops[0], target, insn_mode);
3748 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3749 create_integer_operand (&ops[2], 0);
3750 create_integer_operand (&ops[3], align);
3751 if (!maybe_expand_insn (icode, 4, ops))
3752 return NULL_RTX;
dd05e4fa 3753
16155777
MS
3754 /* Check to see if the argument was declared attribute nonstring
3755 and if so, issue a warning since at this point it's not known
3756 to be nul-terminated. */
3757 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6a33d0ff 3758
16155777
MS
3759 /* Now that we are assured of success, expand the source. */
3760 start_sequence ();
d14c547a 3761 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
16155777
MS
3762 if (pat != src_reg)
3763 {
fa465762 3764#ifdef POINTERS_EXTEND_UNSIGNED
16155777
MS
3765 if (GET_MODE (pat) != Pmode)
3766 pat = convert_to_mode (Pmode, pat,
3767 POINTERS_EXTEND_UNSIGNED);
fa465762 3768#endif
16155777
MS
3769 emit_move_insn (src_reg, pat);
3770 }
3771 pat = get_insns ();
3772 end_sequence ();
fca9f642 3773
16155777
MS
3774 if (before_strlen)
3775 emit_insn_after (pat, before_strlen);
3776 else
3777 emit_insn_before (pat, get_insns ());
28f4ec01 3778
16155777
MS
3779 /* Return the value in the proper mode for this function. */
3780 if (GET_MODE (ops[0].value) == target_mode)
3781 target = ops[0].value;
3782 else if (target != 0)
3783 convert_move (target, ops[0].value, 0);
3784 else
3785 target = convert_to_mode (target_mode, ops[0].value, 0);
dd05e4fa 3786
16155777 3787 return target;
28f4ec01
BS
3788}
3789
781ff3d8
MS
3790/* Expand call EXP to the strnlen built-in, returning the result
3791 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3792
3793static rtx
3794expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3795{
3796 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3797 return NULL_RTX;
3798
3799 tree src = CALL_EXPR_ARG (exp, 0);
3800 tree bound = CALL_EXPR_ARG (exp, 1);
3801
3802 if (!bound)
3803 return NULL_RTX;
3804
d14c547a
MS
3805 check_read_access (exp, src, bound);
3806
781ff3d8
MS
3807 location_t loc = UNKNOWN_LOCATION;
3808 if (EXPR_HAS_LOCATION (exp))
3809 loc = EXPR_LOCATION (exp);
3810
1583124e
MS
3811 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3812 so these conversions aren't necessary. */
e09aa5bd
MS
3813 c_strlen_data lendata = { };
3814 tree len = c_strlen (src, 0, &lendata, 1);
1583124e
MS
3815 if (len)
3816 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
781ff3d8
MS
3817
3818 if (TREE_CODE (bound) == INTEGER_CST)
3819 {
6c4aa5f6 3820 if (!len)
781ff3d8
MS
3821 return NULL_RTX;
3822
781ff3d8
MS
3823 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3824 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3825 }
3826
3827 if (TREE_CODE (bound) != SSA_NAME)
3828 return NULL_RTX;
3829
3830 wide_int min, max;
54994253 3831 enum value_range_kind rng = get_range_info (bound, &min, &max);
781ff3d8
MS
3832 if (rng != VR_RANGE)
3833 return NULL_RTX;
3834
781ff3d8 3835 if (!len || TREE_CODE (len) != INTEGER_CST)
f3431652 3836 {
d14c547a 3837 bool exact;
e09aa5bd
MS
3838 lendata.decl = unterminated_array (src, &len, &exact);
3839 if (!lendata.decl)
f3431652
MS
3840 return NULL_RTX;
3841 }
781ff3d8 3842
e09aa5bd 3843 if (lendata.decl)
f3431652
MS
3844 return NULL_RTX;
3845
781ff3d8
MS
3846 if (wi::gtu_p (min, wi::to_wide (len)))
3847 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3848
3849 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3850 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3851}
3852
57814e5e 3853/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
d5803b98
MS
3854 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3855 a target constant. */
57814e5e
JJ
3856
3857static rtx
4682ae04 3858builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 3859 scalar_int_mode mode)
57814e5e 3860{
d5803b98
MS
3861 /* The REPresentation pointed to by DATA need not be a nul-terminated
3862 string but the caller guarantees it's large enough for MODE. */
3863 const char *rep = (const char *) data;
57814e5e 3864
d5803b98 3865 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
57814e5e
JJ
3866}
3867
3918b108 3868/* LEN specify length of the block of memcpy/memset operation.
82bb7d4e
JH
3869 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3870 In some cases we can make very likely guess on max size, then we
3871 set it into PROBABLE_MAX_SIZE. */
3918b108
JH
3872
3873static void
3874determine_block_size (tree len, rtx len_rtx,
3875 unsigned HOST_WIDE_INT *min_size,
82bb7d4e
JH
3876 unsigned HOST_WIDE_INT *max_size,
3877 unsigned HOST_WIDE_INT *probable_max_size)
3918b108
JH
3878{
3879 if (CONST_INT_P (len_rtx))
3880 {
2738b4c7 3881 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3918b108
JH
3882 return;
3883 }
3884 else
3885 {
807e902e 3886 wide_int min, max;
54994253 3887 enum value_range_kind range_type = VR_UNDEFINED;
82bb7d4e
JH
3888
3889 /* Determine bounds from the type. */
3890 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3891 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3892 else
3893 *min_size = 0;
3894 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2738b4c7
JJ
3895 *probable_max_size = *max_size
3896 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
82bb7d4e
JH
3897 else
3898 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3899
3900 if (TREE_CODE (len) == SSA_NAME)
3901 range_type = get_range_info (len, &min, &max);
3902 if (range_type == VR_RANGE)
3918b108 3903 {
807e902e 3904 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3918b108 3905 *min_size = min.to_uhwi ();
807e902e 3906 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
82bb7d4e 3907 *probable_max_size = *max_size = max.to_uhwi ();
3918b108 3908 }
82bb7d4e 3909 else if (range_type == VR_ANTI_RANGE)
3918b108 3910 {
82bb7d4e
JH
3911 /* Code like
3912
3913 int n;
3914 if (n < 100)
70ec86ee 3915 memcpy (a, b, n)
82bb7d4e
JH
3916
3917 Produce anti range allowing negative values of N. We still
3918 can use the information and make a guess that N is not negative.
3919 */
f21757eb 3920 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
807e902e 3921 *probable_max_size = min.to_uhwi () - 1;
3918b108
JH
3922 }
3923 }
3924 gcc_checking_assert (*max_size <=
3925 (unsigned HOST_WIDE_INT)
3926 GET_MODE_MASK (GET_MODE (len_rtx)));
3927}
3928
d14c547a
MS
3929/* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3930 accessing an object with SIZE. */
3931
3932static bool
3933maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func,
3934 tree bndrng[2], tree size, const access_data *pad = NULL)
3935{
3936 if (!bndrng[0] || TREE_NO_WARNING (exp))
3937 return false;
3938
3939 tree maxobjsize = max_object_size ();
3940
3941 bool warned = false;
3942
3943 if (opt == OPT_Wstringop_overread)
3944 {
eafe8ee7
MS
3945 bool maybe = pad && pad->src.phi ();
3946
d14c547a
MS
3947 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3948 {
3949 if (bndrng[0] == bndrng[1])
3950 warned = (func
3951 ? warning_at (loc, opt,
eafe8ee7
MS
3952 (maybe
3953 ? G_("%K%qD specified bound %E may "
3954 "exceed maximum object size %E")
3955 : G_("%K%qD specified bound %E "
3956 "exceeds maximum object size %E")),
d14c547a
MS
3957 exp, func, bndrng[0], maxobjsize)
3958 : warning_at (loc, opt,
eafe8ee7
MS
3959 (maybe
3960 ? G_("%Kspecified bound %E may "
3961 "exceed maximum object size %E")
3962 : G_("%Kspecified bound %E "
3963 "exceeds maximum object size %E")),
d14c547a
MS
3964 exp, bndrng[0], maxobjsize));
3965 else
3966 warned = (func
3967 ? warning_at (loc, opt,
eafe8ee7
MS
3968 (maybe
3969 ? G_("%K%qD specified bound [%E, %E] may "
3970 "exceed maximum object size %E")
3971 : G_("%K%qD specified bound [%E, %E] "
3972 "exceeds maximum object size %E")),
d14c547a
MS
3973 exp, func,
3974 bndrng[0], bndrng[1], maxobjsize)
3975 : warning_at (loc, opt,
eafe8ee7
MS
3976 (maybe
3977 ? G_("%Kspecified bound [%E, %E] may "
3978 "exceed maximum object size %E")
3979 : G_("%Kspecified bound [%E, %E] "
3980 "exceeds maximum object size %E")),
d14c547a
MS
3981 exp, bndrng[0], bndrng[1], maxobjsize));
3982 }
3983 else if (!size || tree_int_cst_le (bndrng[0], size))
3984 return false;
3985 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3986 warned = (func
3987 ? warning_at (loc, opt,
eafe8ee7
MS
3988 (maybe
3989 ? G_("%K%qD specified bound %E may exceed "
3990 "source size %E")
3991 : G_("%K%qD specified bound %E exceeds "
3992 "source size %E")),
d14c547a
MS
3993 exp, func, bndrng[0], size)
3994 : warning_at (loc, opt,
eafe8ee7
MS
3995 (maybe
3996 ? G_("%Kspecified bound %E may exceed "
3997 "source size %E")
3998 : G_("%Kspecified bound %E exceeds "
3999 "source size %E")),
d14c547a
MS
4000 exp, bndrng[0], size));
4001 else
4002 warned = (func
4003 ? warning_at (loc, opt,
eafe8ee7
MS
4004 (maybe
4005 ? G_("%K%qD specified bound [%E, %E] may "
4006 "exceed source size %E")
4007 : G_("%K%qD specified bound [%E, %E] exceeds "
4008 "source size %E")),
d14c547a
MS
4009 exp, func, bndrng[0], bndrng[1], size)
4010 : warning_at (loc, opt,
eafe8ee7
MS
4011 (maybe
4012 ? G_("%Kspecified bound [%E, %E] may exceed "
4013 "source size %E")
4014 : G_("%Kspecified bound [%E, %E] exceeds "
4015 "source size %E")),
d14c547a
MS
4016 exp, bndrng[0], bndrng[1], size));
4017 if (warned)
4018 {
4019 if (pad && pad->src.ref)
4020 {
4021 if (DECL_P (pad->src.ref))
4022 inform (DECL_SOURCE_LOCATION (pad->src.ref),
4023 "source object declared here");
4024 else if (EXPR_HAS_LOCATION (pad->src.ref))
4025 inform (EXPR_LOCATION (pad->src.ref),
4026 "source object allocated here");
4027 }
4028 TREE_NO_WARNING (exp) = true;
4029 }
4030
4031 return warned;
4032 }
4033
eafe8ee7 4034 bool maybe = pad && pad->dst.phi ();
d14c547a
MS
4035 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
4036 {
4037 if (bndrng[0] == bndrng[1])
4038 warned = (func
4039 ? warning_at (loc, opt,
eafe8ee7
MS
4040 (maybe
4041 ? G_("%K%qD specified size %E may "
4042 "exceed maximum object size %E")
4043 : G_("%K%qD specified size %E "
4044 "exceeds maximum object size %E")),
d14c547a
MS
4045 exp, func, bndrng[0], maxobjsize)
4046 : warning_at (loc, opt,
eafe8ee7
MS
4047 (maybe
4048 ? G_("%Kspecified size %E may exceed "
4049 "maximum object size %E")
4050 : G_("%Kspecified size %E exceeds "
4051 "maximum object size %E")),
d14c547a
MS
4052 exp, bndrng[0], maxobjsize));
4053 else
4054 warned = (func
4055 ? warning_at (loc, opt,
eafe8ee7
MS
4056 (maybe
4057 ? G_("%K%qD specified size between %E and %E "
4058 "may exceed maximum object size %E")
4059 : G_("%K%qD specified size between %E and %E "
4060 "exceeds maximum object size %E")),
d14c547a
MS
4061 exp, func,
4062 bndrng[0], bndrng[1], maxobjsize)
4063 : warning_at (loc, opt,
eafe8ee7
MS
4064 (maybe
4065 ? G_("%Kspecified size between %E and %E "
4066 "may exceed maximum object size %E")
4067 : G_("%Kspecified size between %E and %E "
4068 "exceeds maximum object size %E")),
d14c547a
MS
4069 exp, bndrng[0], bndrng[1], maxobjsize));
4070 }
4071 else if (!size || tree_int_cst_le (bndrng[0], size))
4072 return false;
4073 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
4074 warned = (func
4075 ? warning_at (loc, OPT_Wstringop_overflow_,
eafe8ee7
MS
4076 (maybe
4077 ? G_("%K%qD specified bound %E may exceed "
4078 "destination size %E")
4079 : G_("%K%qD specified bound %E exceeds "
4080 "destination size %E")),
d14c547a
MS
4081 exp, func, bndrng[0], size)
4082 : warning_at (loc, OPT_Wstringop_overflow_,
eafe8ee7
MS
4083 (maybe
4084 ? G_("%Kspecified bound %E may exceed "
4085 "destination size %E")
4086 : G_("%Kspecified bound %E exceeds "
4087 "destination size %E")),
d14c547a
MS
4088 exp, bndrng[0], size));
4089 else
4090 warned = (func
4091 ? warning_at (loc, OPT_Wstringop_overflow_,
eafe8ee7
MS
4092 (maybe
4093 ? G_("%K%qD specified bound [%E, %E] may exceed "
4094 "destination size %E")
4095 : G_("%K%qD specified bound [%E, %E] exceeds "
4096 "destination size %E")),
d14c547a
MS
4097 exp, func, bndrng[0], bndrng[1], size)
4098 : warning_at (loc, OPT_Wstringop_overflow_,
eafe8ee7
MS
4099 (maybe
4100 ? G_("%Kspecified bound [%E, %E] exceeds "
4101 "destination size %E")
4102 : G_("%Kspecified bound [%E, %E] exceeds "
4103 "destination size %E")),
d14c547a
MS
4104 exp, bndrng[0], bndrng[1], size));
4105
4106 if (warned)
4107 {
4108 if (pad && pad->dst.ref)
6edc8f5b
MS
4109 {
4110 if (DECL_P (pad->dst.ref))
4111 inform (DECL_SOURCE_LOCATION (pad->dst.ref),
4112 "destination object declared here");
4113 else if (EXPR_HAS_LOCATION (pad->dst.ref))
4114 inform (EXPR_LOCATION (pad->dst.ref),
4115 "destination object allocated here");
4116 }
d14c547a
MS
4117 TREE_NO_WARNING (exp) = true;
4118 }
4119
4120 return warned;
4121}
4122
b825a228 4123/* For an expression EXP issue an access warning controlled by option OPT
baad4c48
MS
4124 with access to a region SIZE bytes in size in the RANGE of sizes.
4125 WRITE is true for a write access, READ for a read access, neither for
4126 call that may or may not perform an access but for which the range
4127 is expected to valid.
4128 Returns true when a warning has been issued. */
b825a228
MS
4129
4130static bool
baad4c48 4131warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
eafe8ee7 4132 tree size, bool write, bool read, bool maybe)
b825a228
MS
4133{
4134 bool warned = false;
4135
baad4c48
MS
4136 if (write && read)
4137 {
4138 if (tree_int_cst_equal (range[0], range[1]))
4139 warned = (func
4140 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
eafe8ee7
MS
4141 (maybe
4142 ? G_("%K%qD may access %E byte in a region "
4143 "of size %E")
4144 : G_("%K%qD accessing %E byte in a region "
4145 "of size %E")),
4146 (maybe
4147 ? G_ ("%K%qD may access %E bytes in a region "
4148 "of size %E")
4149 : G_ ("%K%qD accessing %E bytes in a region "
4150 "of size %E")),
baad4c48
MS
4151 exp, func, range[0], size)
4152 : warning_n (loc, opt, tree_to_uhwi (range[0]),
eafe8ee7
MS
4153 (maybe
4154 ? G_("%Kmay access %E byte in a region "
4155 "of size %E")
4156 : G_("%Kaccessing %E byte in a region "
4157 "of size %E")),
4158 (maybe
4159 ? G_("%Kmay access %E bytes in a region "
4160 "of size %E")
4161 : G_("%Kaccessing %E bytes in a region "
4162 "of size %E")),
baad4c48
MS
4163 exp, range[0], size));
4164 else if (tree_int_cst_sign_bit (range[1]))
4165 {
4166 /* Avoid printing the upper bound if it's invalid. */
4167 warned = (func
4168 ? warning_at (loc, opt,
eafe8ee7
MS
4169 (maybe
4170 ? G_("%K%qD may access %E or more bytes "
4171 "in a region of size %E")
4172 : G_("%K%qD accessing %E or more bytes "
4173 "in a region of size %E")),
baad4c48
MS
4174 exp, func, range[0], size)
4175 : warning_at (loc, opt,
eafe8ee7
MS
4176 (maybe
4177 ? G_("%Kmay access %E or more bytes "
4178 "in a region of size %E")
4179 : G_("%Kaccessing %E or more bytes "
4180 "in a region of size %E")),
baad4c48
MS
4181 exp, range[0], size));
4182 }
4183 else
4184 warned = (func
4185 ? warning_at (loc, opt,
eafe8ee7
MS
4186 (maybe
4187 ? G_("%K%qD may access between %E and %E "
4188 "bytes in a region of size %E")
4189 : G_("%K%qD accessing between %E and %E "
4190 "bytes in a region of size %E")),
baad4c48
MS
4191 exp, func, range[0], range[1],
4192 size)
4193 : warning_at (loc, opt,
eafe8ee7
MS
4194 (maybe
4195 ? G_("%Kmay access between %E and %E bytes "
4196 "in a region of size %E")
4197 : G_("%Kaccessing between %E and %E bytes "
4198 "in a region of size %E")),
baad4c48
MS
4199 exp, range[0], range[1],
4200 size));
4201 return warned;
4202 }
4203
4204 if (write)
4205 {
4206 if (tree_int_cst_equal (range[0], range[1]))
4207 warned = (func
4208 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
eafe8ee7
MS
4209 (maybe
4210 ? G_("%K%qD may write %E byte into a region "
4211 "of size %E")
4212 : G_("%K%qD writing %E byte into a region "
4213 "of size %E overflows the destination")),
4214 (maybe
4215 ? G_("%K%qD may write %E bytes into a region "
4216 "of size %E")
4217 : G_("%K%qD writing %E bytes into a region "
4218 "of size %E overflows the destination")),
baad4c48
MS
4219 exp, func, range[0], size)
4220 : warning_n (loc, opt, tree_to_uhwi (range[0]),
eafe8ee7
MS
4221 (maybe
4222 ? G_("%Kmay write %E byte into a region "
4223 "of size %E")
4224 : G_("%Kwriting %E byte into a region "
4225 "of size %E overflows the destination")),
4226 (maybe
4227 ? G_("%Kmay write %E bytes into a region "
4228 "of size %E")
4229 : G_("%Kwriting %E bytes into a region "
4230 "of size %E overflows the destination")),
baad4c48
MS
4231 exp, range[0], size));
4232 else if (tree_int_cst_sign_bit (range[1]))
4233 {
4234 /* Avoid printing the upper bound if it's invalid. */
4235 warned = (func
4236 ? warning_at (loc, opt,
eafe8ee7
MS
4237 (maybe
4238 ? G_("%K%qD may write %E or more bytes "
4239 "into a region of size %E "
4240 "the destination")
4241 : G_("%K%qD writing %E or more bytes "
4242 "into a region of size %E overflows "
4243 "the destination")),
baad4c48
MS
4244 exp, func, range[0], size)
4245 : warning_at (loc, opt,
eafe8ee7
MS
4246 (maybe
4247 ? G_("%Kmay write %E or more bytes into "
4248 "a region of size %E")
4249 : G_("%Kwriting %E or more bytes into "
4250 "a region of size %E overflows "
4251 "the destination")),
baad4c48
MS
4252 exp, range[0], size));
4253 }
4254 else
4255 warned = (func
4256 ? warning_at (loc, opt,
eafe8ee7
MS
4257 (maybe
4258 ? G_("%K%qD may write between %E and %E bytes "
4259 "into a region of size %E")
4260 : G_("%K%qD writing between %E and %E bytes "
4261 "into a region of size %E overflows "
4262 "the destination")),
baad4c48
MS
4263 exp, func, range[0], range[1],
4264 size)
4265 : warning_at (loc, opt,
eafe8ee7
MS
4266 (maybe
4267 ? G_("%Kmay write between %E and %E bytes "
4268 "into a region of size %E")
4269 : G_("%Kwriting between %E and %E bytes "
4270 "into a region of size %E overflows "
4271 "the destination")),
baad4c48
MS
4272 exp, range[0], range[1],
4273 size));
4274 return warned;
4275 }
4276
4277 if (read)
b825a228
MS
4278 {
4279 if (tree_int_cst_equal (range[0], range[1]))
4280 warned = (func
d14c547a
MS
4281 ? warning_n (loc, OPT_Wstringop_overread,
4282 tree_to_uhwi (range[0]),
eafe8ee7
MS
4283 (maybe
4284 ? G_("%K%qD may reade %E byte from a region "
4285 "of size %E")
4286 : G_("%K%qD reading %E byte from a region "
4287 "of size %E")),
4288 (maybe
4289 ? G_("%K%qD may read %E bytes from a region "
4290 "of size %E")
4291 : G_("%K%qD reading %E bytes from a region "
4292 "of size %E")),
4293 exp, func, range[0], size)
d14c547a
MS
4294 : warning_n (loc, OPT_Wstringop_overread,
4295 tree_to_uhwi (range[0]),
eafe8ee7
MS
4296 (maybe
4297 ? G_("%Kmay read %E byte from a region "
4298 "of size %E")
4299 : G_("%Kreading %E byte from a region "
4300 "of size %E")),
4301 (maybe
4302 ? G_("%Kmay read %E bytes from a region "
4303 "of size %E")
4304 : G_("%Kreading %E bytes from a region "
4305 "of size %E")),
baad4c48 4306 exp, range[0], size));
b825a228
MS
4307 else if (tree_int_cst_sign_bit (range[1]))
4308 {
4309 /* Avoid printing the upper bound if it's invalid. */
4310 warned = (func
d14c547a 4311 ? warning_at (loc, OPT_Wstringop_overread,
eafe8ee7
MS
4312 (maybe
4313 ? G_("%K%qD may read %E or more bytes "
4314 "from a region of size %E")
4315 : G_("%K%qD reading %E or more bytes "
4316 "from a region of size %E")),
baad4c48 4317 exp, func, range[0], size)
d14c547a 4318 : warning_at (loc, OPT_Wstringop_overread,
eafe8ee7
MS
4319 (maybe
4320 ? G_("%Kmay read %E or more bytes "
4321 "from a region of size %E")
4322 : G_("%Kreading %E or more bytes "
4323 "from a region of size %E")),
baad4c48 4324 exp, range[0], size));
b825a228
MS
4325 }
4326 else
4327 warned = (func
d14c547a 4328 ? warning_at (loc, OPT_Wstringop_overread,
eafe8ee7
MS
4329 (maybe
4330 ? G_("%K%qD may read between %E and %E bytes "
4331 "from a region of size %E")
4332 : G_("%K%qD reading between %E and %E bytes "
4333 "from a region of size %E")),
baad4c48
MS
4334 exp, func, range[0], range[1], size)
4335 : warning_at (loc, opt,
eafe8ee7
MS
4336 (maybe
4337 ? G_("%Kmay read between %E and %E bytes "
4338 "from a region of size %E")
4339 : G_("%Kreading between %E and %E bytes "
4340 "from a region of size %E")),
baad4c48 4341 exp, range[0], range[1], size));
b825a228 4342
d14c547a
MS
4343 if (warned)
4344 TREE_NO_WARNING (exp) = true;
4345
b825a228
MS
4346 return warned;
4347 }
4348
baad4c48
MS
4349 if (tree_int_cst_equal (range[0], range[1])
4350 || tree_int_cst_sign_bit (range[1]))
b825a228 4351 warned = (func
d14c547a
MS
4352 ? warning_n (loc, OPT_Wstringop_overread,
4353 tree_to_uhwi (range[0]),
b825a228
MS
4354 "%K%qD epecting %E byte in a region of size %E",
4355 "%K%qD expecting %E bytes in a region of size %E",
baad4c48 4356 exp, func, range[0], size)
d14c547a
MS
4357 : warning_n (loc, OPT_Wstringop_overread,
4358 tree_to_uhwi (range[0]),
b825a228
MS
4359 "%Kexpecting %E byte in a region of size %E",
4360 "%Kexpecting %E bytes in a region of size %E",
baad4c48 4361 exp, range[0], size));
b825a228
MS
4362 else if (tree_int_cst_sign_bit (range[1]))
4363 {
4364 /* Avoid printing the upper bound if it's invalid. */
4365 warned = (func
d14c547a 4366 ? warning_at (loc, OPT_Wstringop_overread,
b825a228
MS
4367 "%K%qD expecting %E or more bytes in a region "
4368 "of size %E",
baad4c48 4369 exp, func, range[0], size)
d14c547a 4370 : warning_at (loc, OPT_Wstringop_overread,
b825a228
MS
4371 "%Kexpecting %E or more bytes in a region "
4372 "of size %E",
baad4c48 4373 exp, range[0], size));
b825a228
MS
4374 }
4375 else
4376 warned = (func
d14c547a 4377 ? warning_at (loc, OPT_Wstringop_overread,
b825a228
MS
4378 "%K%qD expecting between %E and %E bytes in "
4379 "a region of size %E",
baad4c48 4380 exp, func, range[0], range[1], size)
d14c547a 4381 : warning_at (loc, OPT_Wstringop_overread,
b825a228
MS
4382 "%Kexpectting between %E and %E bytes in "
4383 "a region of size %E",
baad4c48 4384 exp, range[0], range[1], size));
d14c547a
MS
4385
4386 if (warned)
4387 TREE_NO_WARNING (exp) = true;
4388
b825a228
MS
4389 return warned;
4390}
4391
eafe8ee7 4392/* Issue one inform message describing each target of an access REF.
a2c2cee9
MS
4393 WRITE is set for a write access and clear for a read access. */
4394
eafe8ee7
MS
4395void
4396access_ref::inform_access (access_mode mode) const
a2c2cee9 4397{
eafe8ee7
MS
4398 const access_ref &aref = *this;
4399 if (!aref.ref)
a2c2cee9
MS
4400 return;
4401
eafe8ee7
MS
4402 if (aref.phi ())
4403 {
4404 /* Set MAXREF to refer to the largest object and fill ALL_REFS
4405 with data for all objects referenced by the PHI arguments. */
4406 access_ref maxref;
4407 auto_vec<access_ref> all_refs;
4408 if (!get_ref (&all_refs, &maxref))
4409 return;
4410
4411 /* Except for MAXREF, the rest of the arguments' offsets need not
4412 reflect one added to the PHI itself. Determine the latter from
4413 MAXREF on which the result is based. */
4414 const offset_int orng[] =
4415 {
4416 offrng[0] - maxref.offrng[0],
4417 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
4418 };
4419
4420 /* Add the final PHI's offset to that of each of the arguments
4421 and recurse to issue an inform message for it. */
4422 for (unsigned i = 0; i != all_refs.length (); ++i)
4423 {
4424 /* Skip any PHIs; those could lead to infinite recursion. */
4425 if (all_refs[i].phi ())
4426 continue;
4427
4428 all_refs[i].add_offset (orng[0], orng[1]);
4429 all_refs[i].inform_access (mode);
4430 }
4431 return;
4432 }
4433
83685efd
MS
4434 /* Convert offset range and avoid including a zero range since it
4435 isn't necessarily meaningful. */
4436 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
4437 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
4438 HOST_WIDE_INT minoff;
4439 HOST_WIDE_INT maxoff = diff_max;
eafe8ee7
MS
4440 if (wi::fits_shwi_p (aref.offrng[0]))
4441 minoff = aref.offrng[0].to_shwi ();
83685efd 4442 else
eafe8ee7 4443 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
83685efd 4444
eafe8ee7
MS
4445 if (wi::fits_shwi_p (aref.offrng[1]))
4446 maxoff = aref.offrng[1].to_shwi ();
83685efd
MS
4447
4448 if (maxoff <= diff_min || maxoff >= diff_max)
4449 /* Avoid mentioning an upper bound that's equal to or in excess
4450 of the maximum of ptrdiff_t. */
4451 maxoff = minoff;
a2c2cee9
MS
4452
4453 /* Convert size range and always include it since all sizes are
4454 meaningful. */
4455 unsigned long long minsize = 0, maxsize = 0;
eafe8ee7
MS
4456 if (wi::fits_shwi_p (aref.sizrng[0])
4457 && wi::fits_shwi_p (aref.sizrng[1]))
a2c2cee9 4458 {
eafe8ee7
MS
4459 minsize = aref.sizrng[0].to_shwi ();
4460 maxsize = aref.sizrng[1].to_shwi ();
a2c2cee9
MS
4461 }
4462
eafe8ee7
MS
4463 /* SIZRNG doesn't necessarily have the same range as the allocation
4464 size determined by gimple_call_alloc_size (). */
a2c2cee9 4465 char sizestr[80];
eafe8ee7
MS
4466 if (minsize == maxsize)
4467 sprintf (sizestr, "%llu", minsize);
4468 else
4469 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
4470
4471 char offstr[80];
4472 if (minoff == 0
4473 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
4474 offstr[0] = '\0';
4475 else if (minoff == maxoff)
4476 sprintf (offstr, "%lli", (long long) minoff);
4477 else
4478 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
4479
4480 location_t loc = UNKNOWN_LOCATION;
4481
4482 tree ref = this->ref;
a2c2cee9 4483 tree allocfn = NULL_TREE;
eafe8ee7 4484 if (TREE_CODE (ref) == SSA_NAME)
a2c2cee9 4485 {
eafe8ee7
MS
4486 gimple *stmt = SSA_NAME_DEF_STMT (ref);
4487 if (is_gimple_call (stmt))
4488 {
4489 loc = gimple_location (stmt);
4490 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4491 {
4492 /* Strip the SSA_NAME suffix from the variable name and
4493 recreate an identifier with the VLA's original name. */
4494 ref = gimple_call_lhs (stmt);
4495 ref = SSA_NAME_IDENTIFIER (ref);
4496 const char *id = IDENTIFIER_POINTER (ref);
4497 size_t len = strcspn (id, ".$");
4498 if (!len)
4499 len = strlen (id);
4500 ref = get_identifier_with_length (id, len);
4501 }
4502 else
4503 {
4504 /* Except for VLAs, retrieve the allocation function. */
4505 allocfn = gimple_call_fndecl (stmt);
4506 if (!allocfn)
4507 allocfn = gimple_call_fn (stmt);
4508 if (TREE_CODE (allocfn) == SSA_NAME)
4509 {
4510 /* For an ALLOC_CALL via a function pointer make a small
4511 effort to determine the destination of the pointer. */
4512 gimple *def = SSA_NAME_DEF_STMT (allocfn);
4513 if (gimple_assign_single_p (def))
4514 {
4515 tree rhs = gimple_assign_rhs1 (def);
4516 if (DECL_P (rhs))
4517 allocfn = rhs;
4518 else if (TREE_CODE (rhs) == COMPONENT_REF)
4519 allocfn = TREE_OPERAND (rhs, 1);
4520 }
4521 }
4522 }
4523 }
4524 else if (gimple_nop_p (stmt))
4525 /* Handle DECL_PARM below. */
4526 ref = SSA_NAME_VAR (ref);
a2c2cee9 4527 }
eafe8ee7
MS
4528
4529 if (DECL_P (ref))
4530 loc = DECL_SOURCE_LOCATION (ref);
4531 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
4532 loc = EXPR_LOCATION (ref);
4533 else if (TREE_CODE (ref) != IDENTIFIER_NODE
4534 && TREE_CODE (ref) != SSA_NAME)
83685efd 4535 return;
a2c2cee9 4536
d14c547a 4537 if (mode == access_read_write || mode == access_write_only)
a2c2cee9 4538 {
83685efd 4539 if (allocfn == NULL_TREE)
a2c2cee9 4540 {
eafe8ee7
MS
4541 if (*offstr)
4542 inform (loc, "at offset %s into destination object %qE of size %s",
4543 offstr, ref, sizestr);
a2c2cee9 4544 else
eafe8ee7 4545 inform (loc, "destination object %qE of size %s", ref, sizestr);
a2c2cee9
MS
4546 return;
4547 }
4548
eafe8ee7 4549 if (*offstr)
a2c2cee9 4550 inform (loc,
eafe8ee7
MS
4551 "at offset %s into destination object of size %s "
4552 "allocated by %qE", offstr, sizestr, allocfn);
4553 else
4554 inform (loc, "destination object of size %s allocated by %qE",
4555 sizestr, allocfn);
a2c2cee9
MS
4556 return;
4557 }
4558
eafe8ee7 4559 if (DECL_P (ref))
a2c2cee9 4560 {
eafe8ee7
MS
4561 if (*offstr)
4562 inform (loc, "at offset %s into source object %qD of size %s",
4563 offstr, ref, sizestr);
a2c2cee9 4564 else
eafe8ee7
MS
4565 inform (loc, "source object %qD of size %s", ref, sizestr);
4566
a2c2cee9
MS
4567 return;
4568 }
4569
eafe8ee7 4570 if (*offstr)
a2c2cee9 4571 inform (loc,
eafe8ee7
MS
4572 "at offset %s into source object of size %s allocated by %qE",
4573 offstr, sizestr, allocfn);
4574 else
4575 inform (loc, "source object of size %s allocated by %qE",
4576 sizestr, allocfn);
a2c2cee9
MS
4577}
4578
d14c547a
MS
4579/* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
4580 by BNDRNG if nonnull and valid. */
4581
4582static void
4583get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
4584{
4585 if (bound)
4586 get_size_range (bound, range);
4587
4588 if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U))
4589 return;
4590
4591 if (range[0] && TREE_CODE (range[0]) == INTEGER_CST)
4592 {
4593 offset_int r[] =
4594 { wi::to_offset (range[0]), wi::to_offset (range[1]) };
4595 if (r[0] < bndrng[0])
4596 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4597 if (bndrng[1] < r[1])
4598 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4599 }
4600 else
4601 {
4602 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4603 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4604 }
4605}
4606
ee92e7ba
MS
4607/* Try to verify that the sizes and lengths of the arguments to a string
4608 manipulation function given by EXP are within valid bounds and that
cc8bea0a
MS
4609 the operation does not lead to buffer overflow or read past the end.
4610 Arguments other than EXP may be null. When non-null, the arguments
4611 have the following meaning:
4612 DST is the destination of a copy call or NULL otherwise.
4613 SRC is the source of a copy call or NULL otherwise.
4614 DSTWRITE is the number of bytes written into the destination obtained
4615 from the user-supplied size argument to the function (such as in
4616 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
4617 MAXREAD is the user-supplied bound on the length of the source sequence
ee92e7ba 4618 (such as in strncat(d, s, N). It specifies the upper limit on the number
cc8bea0a
MS
4619 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
4620 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
4621 expression EXP is a string function call (as opposed to a memory call
4622 like memcpy). As an exception, SRCSTR can also be an integer denoting
4623 the precomputed size of the source string or object (for functions like
4624 memcpy).
d14c547a 4625 DSTSIZE is the size of the destination object.
ee92e7ba 4626
cc8bea0a 4627 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
ee92e7ba
MS
4628 SIZE_MAX.
4629
baad4c48
MS
4630 WRITE is true for write accesses, READ is true for reads. Both are
4631 false for simple size checks in calls to functions that neither read
4632 from nor write to the region.
b825a228 4633
a2c2cee9
MS
4634 When nonnull, PAD points to a more detailed description of the access.
4635
cc8bea0a
MS
4636 If the call is successfully verified as safe return true, otherwise
4637 return false. */
ee92e7ba 4638
54aa6b58 4639bool
d14c547a 4640check_access (tree exp, tree dstwrite,
b825a228 4641 tree maxread, tree srcstr, tree dstsize,
d14c547a 4642 access_mode mode, const access_data *pad /* = NULL */)
ee92e7ba
MS
4643{
4644 /* The size of the largest object is half the address space, or
cc8bea0a
MS
4645 PTRDIFF_MAX. (This is way too permissive.) */
4646 tree maxobjsize = max_object_size ();
ee92e7ba 4647
d14c547a
MS
4648 /* Either an approximate/minimum the length of the source string for
4649 string functions or the size of the source object for raw memory
4650 functions. */
ee92e7ba
MS
4651 tree slen = NULL_TREE;
4652
d14c547a
MS
4653 /* The range of the access in bytes; first set to the write access
4654 for functions that write and then read for those that also (or
4655 just) read. */
d9c5a8b9
MS
4656 tree range[2] = { NULL_TREE, NULL_TREE };
4657
ee92e7ba
MS
4658 /* Set to true when the exact number of bytes written by a string
4659 function like strcpy is not known and the only thing that is
4660 known is that it must be at least one (for the terminating nul). */
4661 bool at_least_one = false;
cc8bea0a 4662 if (srcstr)
ee92e7ba 4663 {
cc8bea0a 4664 /* SRCSTR is normally a pointer to string but as a special case
ee92e7ba 4665 it can be an integer denoting the length of a string. */
cc8bea0a 4666 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
ee92e7ba 4667 {
d14c547a
MS
4668 if (!check_nul_terminated_array (exp, srcstr, maxread))
4669 return false;
ee92e7ba 4670 /* Try to determine the range of lengths the source string
d9c5a8b9 4671 refers to. If it can be determined and is less than
cc8bea0a 4672 the upper bound given by MAXREAD add one to it for
ee92e7ba 4673 the terminating nul. Otherwise, set it to one for
cc8bea0a 4674 the same reason, or to MAXREAD as appropriate. */
5d6655eb
MS
4675 c_strlen_data lendata = { };
4676 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
4677 range[0] = lendata.minlen;
a7160771 4678 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
d14c547a
MS
4679 if (range[0]
4680 && TREE_CODE (range[0]) == INTEGER_CST
4681 && TREE_CODE (range[1]) == INTEGER_CST
4682 && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
d9c5a8b9 4683 {
cc8bea0a
MS
4684 if (maxread && tree_int_cst_le (maxread, range[0]))
4685 range[0] = range[1] = maxread;
d9c5a8b9
MS
4686 else
4687 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
4688 range[0], size_one_node);
4689
cc8bea0a
MS
4690 if (maxread && tree_int_cst_le (maxread, range[1]))
4691 range[1] = maxread;
d9c5a8b9
MS
4692 else if (!integer_all_onesp (range[1]))
4693 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
4694 range[1], size_one_node);
4695
4696 slen = range[0];
4697 }
ee92e7ba
MS
4698 else
4699 {
4700 at_least_one = true;
4701 slen = size_one_node;
4702 }
4703 }
4704 else
cc8bea0a 4705 slen = srcstr;
ee92e7ba
MS
4706 }
4707
cc8bea0a 4708 if (!dstwrite && !maxread)
ee92e7ba
MS
4709 {
4710 /* When the only available piece of data is the object size
4711 there is nothing to do. */
4712 if (!slen)
4713 return true;
4714
4715 /* Otherwise, when the length of the source sequence is known
cc8bea0a 4716 (as with strlen), set DSTWRITE to it. */
d9c5a8b9 4717 if (!range[0])
cc8bea0a 4718 dstwrite = slen;
ee92e7ba
MS
4719 }
4720
cc8bea0a
MS
4721 if (!dstsize)
4722 dstsize = maxobjsize;
ee92e7ba 4723
d14c547a
MS
4724 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4725 if valid. */
4726 get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL);
ee92e7ba 4727
cc8bea0a 4728 tree func = get_callee_fndecl (exp);
baad4c48
MS
4729 /* Read vs write access by built-ins can be determined from the const
4730 qualifiers on the pointer argument. In the absence of attribute
4731 access, non-const qualified pointer arguments to user-defined
4732 functions are assumed to both read and write the objects. */
4733 const bool builtin = func ? fndecl_built_in_p (func) : false;
ee92e7ba
MS
4734
4735 /* First check the number of bytes to be written against the maximum
4736 object size. */
bfb9bd47
MS
4737 if (range[0]
4738 && TREE_CODE (range[0]) == INTEGER_CST
4739 && tree_int_cst_lt (maxobjsize, range[0]))
ee92e7ba 4740 {
fe7f75cf 4741 location_t loc = tree_inlined_location (exp);
d14c547a
MS
4742 maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
4743 NULL_TREE, pad);
ee92e7ba
MS
4744 return false;
4745 }
4746
cc8bea0a
MS
4747 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4748 constant, and in range of unsigned HOST_WIDE_INT. */
4749 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
4750
ee92e7ba
MS
4751 /* Next check the number of bytes to be written against the destination
4752 object size. */
cc8bea0a 4753 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
ee92e7ba
MS
4754 {
4755 if (range[0]
bfb9bd47 4756 && TREE_CODE (range[0]) == INTEGER_CST
cc8bea0a
MS
4757 && ((tree_fits_uhwi_p (dstsize)
4758 && tree_int_cst_lt (dstsize, range[0]))
bfb9bd47
MS
4759 || (dstwrite
4760 && tree_fits_uhwi_p (dstwrite)
cc8bea0a 4761 && tree_int_cst_lt (dstwrite, range[0]))))
ee92e7ba 4762 {
d14c547a
MS
4763 if (TREE_NO_WARNING (exp)
4764 || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref)))
e0676e2e
MS
4765 return false;
4766
fe7f75cf 4767 location_t loc = tree_inlined_location (exp);
54aa6b58 4768 bool warned = false;
cc8bea0a 4769 if (dstwrite == slen && at_least_one)
d9c5a8b9
MS
4770 {
4771 /* This is a call to strcpy with a destination of 0 size
4772 and a source of unknown length. The call will write
4773 at least one byte past the end of the destination. */
54aa6b58 4774 warned = (func
d14c547a 4775 ? warning_at (loc, OPT_Wstringop_overflow_,
54aa6b58
MS
4776 "%K%qD writing %E or more bytes into "
4777 "a region of size %E overflows "
4778 "the destination",
4779 exp, func, range[0], dstsize)
d14c547a 4780 : warning_at (loc, OPT_Wstringop_overflow_,
54aa6b58
MS
4781 "%Kwriting %E or more bytes into "
4782 "a region of size %E overflows "
4783 "the destination",
4784 exp, range[0], dstsize));
d9c5a8b9 4785 }
baad4c48 4786 else
d9c5a8b9 4787 {
baad4c48
MS
4788 const bool read
4789 = mode == access_read_only || mode == access_read_write;
4790 const bool write
4791 = mode == access_write_only || mode == access_read_write;
eafe8ee7 4792 const bool maybe = pad && pad->dst.parmarray;
baad4c48
MS
4793 warned = warn_for_access (loc, func, exp,
4794 OPT_Wstringop_overflow_,
4795 range, dstsize,
eafe8ee7 4796 write, read && !builtin, maybe);
d9c5a8b9 4797 }
baad4c48 4798
54aa6b58 4799 if (warned)
a2c2cee9
MS
4800 {
4801 TREE_NO_WARNING (exp) = true;
4802 if (pad)
eafe8ee7 4803 pad->dst.inform_access (pad->mode);
a2c2cee9 4804 }
ee92e7ba
MS
4805
4806 /* Return error when an overflow has been detected. */
4807 return false;
4808 }
4809 }
4810
4811 /* Check the maximum length of the source sequence against the size
4812 of the destination object if known, or against the maximum size
4813 of an object. */
cc8bea0a 4814 if (maxread)
ee92e7ba 4815 {
d14c547a
MS
4816 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4817 PAD is nonnull and BNDRNG is valid. */
4818 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4819
fe7f75cf 4820 location_t loc = tree_inlined_location (exp);
d14c547a
MS
4821 tree size = dstsize;
4822 if (pad && pad->mode == access_read_only)
4823 size = wide_int_to_tree (sizetype, pad->src.sizrng[1]);
4824
4825 if (range[0] && maxread && tree_fits_uhwi_p (size))
4826 {
ee92e7ba
MS
4827 if (tree_int_cst_lt (maxobjsize, range[0]))
4828 {
d14c547a
MS
4829 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
4830 range, size, pad);
ee92e7ba
MS
4831 return false;
4832 }
4833
d14c547a 4834 if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
ee92e7ba 4835 {
d14c547a
MS
4836 int opt = (dstwrite || mode != access_read_only
4837 ? OPT_Wstringop_overflow_
4838 : OPT_Wstringop_overread);
4839 maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
ee92e7ba
MS
4840 return false;
4841 }
4842 }
d14c547a
MS
4843
4844 maybe_warn_nonstring_arg (func, exp);
ee92e7ba
MS
4845 }
4846
cc8bea0a 4847 /* Check for reading past the end of SRC. */
d14c547a
MS
4848 bool overread = (slen
4849 && slen == srcstr
4850 && dstwrite
4851 && range[0]
4852 && TREE_CODE (slen) == INTEGER_CST
4853 && tree_int_cst_lt (slen, range[0]));
83685efd
MS
4854 /* If none is determined try to get a better answer based on the details
4855 in PAD. */
4856 if (!overread
4857 && pad
4858 && pad->src.sizrng[1] >= 0
4859 && pad->src.offrng[0] >= 0
4860 && (pad->src.offrng[1] < 0
4861 || pad->src.offrng[0] <= pad->src.offrng[1]))
d9c5a8b9 4862 {
d14c547a
MS
4863 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4864 PAD is nonnull and BNDRNG is valid. */
4865 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4866 /* Set OVERREAD for reads starting just past the end of an object. */
4867 overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4868 range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]);
4869 slen = size_zero_node;
4870 }
4871
4872 if (overread)
4873 {
4874 if (TREE_NO_WARNING (exp)
4875 || (srcstr && TREE_NO_WARNING (srcstr))
4876 || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref)))
e0676e2e
MS
4877 return false;
4878
fe7f75cf 4879 location_t loc = tree_inlined_location (exp);
baad4c48
MS
4880 const bool read
4881 = mode == access_read_only || mode == access_read_write;
eafe8ee7 4882 const bool maybe = pad && pad->dst.parmarray;
baad4c48 4883 if (warn_for_access (loc, func, exp, OPT_Wstringop_overread, range,
eafe8ee7 4884 slen, false, read, maybe))
baad4c48
MS
4885 {
4886 TREE_NO_WARNING (exp) = true;
4887 if (pad)
eafe8ee7 4888 pad->src.inform_access (access_read_only);
baad4c48 4889 }
d9c5a8b9
MS
4890 return false;
4891 }
4892
ee92e7ba
MS
4893 return true;
4894}
4895
d14c547a
MS
4896/* A convenience wrapper for check_access above to check access
4897 by a read-only function like puts. */
4898
4899static bool
4900check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4901 int ost /* = 1 */)
4902{
4903 if (!warn_stringop_overread)
4904 return true;
4905
4906 access_data data (exp, access_read_only, NULL_TREE, false, bound, true);
4907 compute_objsize (src, ost, &data.src);
4908 return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound,
4909 /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode,
4910 &data);
4911}
4912
ef29b12c 4913/* If STMT is a call to an allocation function, returns the constant
baad4c48
MS
4914 maximum size of the object allocated by the call represented as
4915 sizetype. If nonnull, sets RNG1[] to the range of the size.
4916 When nonnull, uses RVALS for range information, otherwise calls
4917 get_range_info to get it.
4918 Returns null when STMT is not a call to a valid allocation function. */
268209f3
MS
4919
4920tree
ef29b12c 4921gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
83685efd 4922 range_query * /* = NULL */)
268209f3
MS
4923{
4924 if (!stmt)
4925 return NULL_TREE;
4926
4927 tree allocfntype;
4928 if (tree fndecl = gimple_call_fndecl (stmt))
4929 allocfntype = TREE_TYPE (fndecl);
4930 else
4931 allocfntype = gimple_call_fntype (stmt);
4932
4933 if (!allocfntype)
4934 return NULL_TREE;
4935
4936 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
4937 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
4938 if (!at)
4939 {
4940 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4941 return NULL_TREE;
4942
4943 argidx1 = 0;
4944 }
4945
4946 unsigned nargs = gimple_call_num_args (stmt);
4947
4948 if (argidx1 == UINT_MAX)
4949 {
4950 tree atval = TREE_VALUE (at);
4951 if (!atval)
4952 return NULL_TREE;
4953
4954 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4955 if (nargs <= argidx1)
4956 return NULL_TREE;
4957
4958 atval = TREE_CHAIN (atval);
4959 if (atval)
4960 {
4961 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4962 if (nargs <= argidx2)
4963 return NULL_TREE;
4964 }
4965 }
4966
4967 tree size = gimple_call_arg (stmt, argidx1);
4968
ef29b12c
MS
4969 wide_int rng1_buf[2];
4970 /* If RNG1 is not set, use the buffer. */
4971 if (!rng1)
4972 rng1 = rng1_buf;
4973
83685efd 4974 /* Use maximum precision to avoid overflow below. */
baad4c48 4975 const int prec = ADDR_MAX_PRECISION;
83685efd
MS
4976
4977 {
4978 tree r[2];
4979 /* Determine the largest valid range size, including zero. */
4980 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4981 return NULL_TREE;
4982 rng1[0] = wi::to_wide (r[0], prec);
4983 rng1[1] = wi::to_wide (r[1], prec);
4984 }
268209f3
MS
4985
4986 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
a6ae300f 4987 return fold_convert (sizetype, size);
268209f3
MS
4988
4989 /* To handle ranges do the math in wide_int and return the product
4990 of the upper bounds as a constant. Ignore anti-ranges. */
4991 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
4992 wide_int rng2[2];
83685efd
MS
4993 {
4994 tree r[2];
baad4c48 4995 /* As above, use the full non-negative range on failure. */
83685efd
MS
4996 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4997 return NULL_TREE;
4998 rng2[0] = wi::to_wide (r[0], prec);
4999 rng2[1] = wi::to_wide (r[1], prec);
5000 }
268209f3 5001
ef29b12c
MS
5002 /* Compute products of both bounds for the caller but return the lesser
5003 of SIZE_MAX and the product of the upper bounds as a constant. */
268209f3
MS
5004 rng1[0] = rng1[0] * rng2[0];
5005 rng1[1] = rng1[1] * rng2[1];
83685efd
MS
5006
5007 const tree size_max = TYPE_MAX_VALUE (sizetype);
268209f3
MS
5008 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
5009 {
83685efd 5010 rng1[1] = wi::to_wide (size_max, prec);
268209f3
MS
5011 return size_max;
5012 }
5013
5014 return wide_int_to_tree (sizetype, rng1[1]);
5015}
5016
baad4c48
MS
5017/* For an access to an object referenced to by the function parameter PTR
5018 of pointer type, and set RNG[] to the range of sizes of the object
5019 obtainedfrom the attribute access specification for the current function.
eafe8ee7 5020 Set STATIC_ARRAY if the array parameter has been declared [static].
baad4c48
MS
5021 Return the function parameter on success and null otherwise. */
5022
5023tree
83685efd 5024gimple_parm_array_size (tree ptr, wide_int rng[2],
eafe8ee7 5025 bool *static_array /* = NULL */)
baad4c48
MS
5026{
5027 /* For a function argument try to determine the byte size of the array
5028 from the current function declaratation (e.g., attribute access or
5029 related). */
5030 tree var = SSA_NAME_VAR (ptr);
5031 if (TREE_CODE (var) != PARM_DECL)
5032 return NULL_TREE;
5033
5034 const unsigned prec = TYPE_PRECISION (sizetype);
5035
5036 rdwr_map rdwr_idx;
5037 attr_access *access = get_parm_access (rdwr_idx, var);
5038 if (!access)
5039 return NULL_TREE;
5040
5041 if (access->sizarg != UINT_MAX)
5042 {
5043 /* TODO: Try to extract the range from the argument based on
5044 those of subsequent assertions or based on known calls to
5045 the current function. */
5046 return NULL_TREE;
5047 }
5048
5049 if (!access->minsize)
5050 return NULL_TREE;
5051
5052 /* Only consider ordinary array bound at level 2 (or above if it's
5053 ever added). */
5054 if (warn_array_parameter < 2 && !access->static_p)
5055 return NULL_TREE;
5056
eafe8ee7
MS
5057 if (static_array)
5058 *static_array = access->static_p;
5059
baad4c48
MS
5060 rng[0] = wi::zero (prec);
5061 rng[1] = wi::uhwi (access->minsize, prec);
14d83c6f
MS
5062 /* Multiply the array bound encoded in the attribute by the size
5063 of what the pointer argument to which it decays points to. */
baad4c48 5064 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
baad4c48
MS
5065 tree size = TYPE_SIZE_UNIT (eltype);
5066 if (!size || TREE_CODE (size) != INTEGER_CST)
5067 return NULL_TREE;
5068
5069 rng[1] *= wi::to_wide (size, prec);
5070 return var;
5071}
5072
83685efd
MS
5073/* Wrapper around the wide_int overload of get_range that accepts
5074 offset_int instead. For middle end expressions returns the same
5075 result. For a subset of nonconstamt expressions emitted by the front
5076 end determines a more precise range than would be possible otherwise. */
ef29b12c 5077
a2c2cee9 5078static bool
83685efd 5079get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
ef29b12c 5080{
83685efd
MS
5081 offset_int add = 0;
5082 if (TREE_CODE (x) == PLUS_EXPR)
de05c19d 5083 {
83685efd
MS
5084 /* Handle constant offsets in pointer addition expressions seen
5085 n the front end IL. */
5086 tree op = TREE_OPERAND (x, 1);
5087 if (TREE_CODE (op) == INTEGER_CST)
de05c19d 5088 {
83685efd
MS
5089 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
5090 add = wi::to_offset (op);
5091 x = TREE_OPERAND (x, 0);
de05c19d 5092 }
83685efd
MS
5093 }
5094
5095 if (TREE_CODE (x) == NOP_EXPR)
5096 /* Also handle conversions to sizetype seen in the front end IL. */
5097 x = TREE_OPERAND (x, 0);
de05c19d 5098
83685efd 5099 tree type = TREE_TYPE (x);
fdd8560c
MS
5100 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
5101 return false;
83685efd
MS
5102
5103 if (TREE_CODE (x) != INTEGER_CST
5104 && TREE_CODE (x) != SSA_NAME)
5105 {
5106 if (TYPE_UNSIGNED (type)
5107 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
5108 type = signed_type_for (type);
5109
5110 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
5111 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
de05c19d
MS
5112 return x;
5113 }
5114
a2c2cee9 5115 wide_int wr[2];
f5299992 5116 if (!get_range (x, stmt, wr, rvals))
a2c2cee9 5117 return false;
ef29b12c 5118
83685efd 5119 signop sgn = SIGNED;
de05c19d
MS
5120 /* Only convert signed integers or unsigned sizetype to a signed
5121 offset and avoid converting large positive values in narrower
5122 types to negative offsets. */
5123 if (TYPE_UNSIGNED (type)
5124 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
5125 sgn = UNSIGNED;
5126
a2c2cee9
MS
5127 r[0] = offset_int::from (wr[0], sgn);
5128 r[1] = offset_int::from (wr[1], sgn);
5129 return true;
ef29b12c
MS
5130}
5131
83685efd
MS
5132/* Return the argument that the call STMT to a built-in function returns
5133 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
5134 from the argument reflected in the value returned by the built-in if it
5135 can be determined, otherwise to 0 and HWI_M1U respectively. */
5136
5137static tree
5138gimple_call_return_array (gimple *stmt, offset_int offrng[2],
5139 range_query *rvals)
5140{
5141 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5142 || gimple_call_num_args (stmt) < 1)
5143 return NULL_TREE;
5144
5145 tree fn = gimple_call_fndecl (stmt);
5146 switch (DECL_FUNCTION_CODE (fn))
5147 {
5148 case BUILT_IN_MEMCPY:
5149 case BUILT_IN_MEMCPY_CHK:
5150 case BUILT_IN_MEMMOVE:
5151 case BUILT_IN_MEMMOVE_CHK:
5152 case BUILT_IN_MEMSET:
5153 case BUILT_IN_STPCPY:
5154 case BUILT_IN_STPCPY_CHK:
5155 case BUILT_IN_STPNCPY:
5156 case BUILT_IN_STPNCPY_CHK:
5157 case BUILT_IN_STRCAT:
5158 case BUILT_IN_STRCAT_CHK:
5159 case BUILT_IN_STRCPY:
5160 case BUILT_IN_STRCPY_CHK:
5161 case BUILT_IN_STRNCAT:
5162 case BUILT_IN_STRNCAT_CHK:
5163 case BUILT_IN_STRNCPY:
5164 case BUILT_IN_STRNCPY_CHK:
5165 offrng[0] = offrng[1] = 0;
5166 return gimple_call_arg (stmt, 0);
5167
5168 case BUILT_IN_MEMPCPY:
5169 case BUILT_IN_MEMPCPY_CHK:
5170 {
5171 tree off = gimple_call_arg (stmt, 2);
5172 if (!get_offset_range (off, stmt, offrng, rvals))
5173 {
5174 offrng[0] = 0;
5175 offrng[1] = HOST_WIDE_INT_M1U;
5176 }
5177 return gimple_call_arg (stmt, 0);
5178 }
5179
5180 case BUILT_IN_MEMCHR:
5181 {
5182 tree off = gimple_call_arg (stmt, 2);
5183 if (get_offset_range (off, stmt, offrng, rvals))
5184 offrng[0] = 0;
5185 else
5186 {
5187 offrng[0] = 0;
5188 offrng[1] = HOST_WIDE_INT_M1U;
5189 }
5190 return gimple_call_arg (stmt, 0);
5191 }
5192
5193 case BUILT_IN_STRCHR:
5194 case BUILT_IN_STRRCHR:
5195 case BUILT_IN_STRSTR:
5196 {
5197 offrng[0] = 0;
5198 offrng[1] = HOST_WIDE_INT_M1U;
5199 }
5200 return gimple_call_arg (stmt, 0);
5201
5202 default:
5203 break;
5204 }
5205
5206 return NULL_TREE;
5207}
5208
eafe8ee7
MS
5209/* A helper of compute_objsize() to determine the size from an assignment
5210 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. */
5211
5212static bool
5213handle_min_max_size (gimple *stmt, int ostype, access_ref *pref,
d02c41dd 5214 ssa_name_limit_t &snlim, pointer_query *qry)
eafe8ee7
MS
5215{
5216 tree_code code = gimple_assign_rhs_code (stmt);
5217
5218 tree ptr = gimple_assign_rhs1 (stmt);
5219
5220 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
5221 Determine the size/offset of each and use the one with more or less
5222 space remaining, respectively. If either fails, use the information
5223 determined from the other instead, adjusted up or down as appropriate
5224 for the expression. */
5225 access_ref aref[2] = { *pref, *pref };
d02c41dd 5226 if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry))
eafe8ee7
MS
5227 {
5228 aref[0].base0 = false;
5229 aref[0].offrng[0] = aref[0].offrng[1] = 0;
5230 aref[0].add_max_offset ();
5231 aref[0].set_max_size_range ();
5232 }
5233
5234 ptr = gimple_assign_rhs2 (stmt);
d02c41dd 5235 if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry))
eafe8ee7
MS
5236 {
5237 aref[1].base0 = false;
5238 aref[1].offrng[0] = aref[1].offrng[1] = 0;
5239 aref[1].add_max_offset ();
5240 aref[1].set_max_size_range ();
5241 }
5242
5243 if (!aref[0].ref && !aref[1].ref)
5244 /* Fail if the identity of neither argument could be determined. */
5245 return false;
5246
5247 bool i0 = false;
5248 if (aref[0].ref && aref[0].base0)
5249 {
5250 if (aref[1].ref && aref[1].base0)
5251 {
5252 /* If the object referenced by both arguments has been determined
5253 set *PREF to the one with more or less space remainng, whichever
5254 is appopriate for CODE.
5255 TODO: Indicate when the objects are distinct so it can be
5256 diagnosed. */
5257 i0 = code == MAX_EXPR;
5258 const bool i1 = !i0;
5259
5260 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
5261 *pref = aref[i1];
5262 else
5263 *pref = aref[i0];
5264 return true;
5265 }
5266
5267 /* If only the object referenced by one of the arguments could be
5268 determined, use it and... */
5269 *pref = aref[0];
5270 i0 = true;
5271 }
5272 else
5273 *pref = aref[1];
5274
5275 const bool i1 = !i0;
5276 /* ...see if the offset obtained from the other pointer can be used
5277 to tighten up the bound on the offset obtained from the first. */
5278 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
5279 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
5280 {
5281 pref->offrng[0] = aref[i0].offrng[0];
5282 pref->offrng[1] = aref[i0].offrng[1];
5283 }
5284 return true;
5285}
5286
a2c2cee9 5287/* Helper to compute the size of the object referenced by the PTR
025d57f0 5288 expression which must have pointer type, using Object Size type
ef29b12c 5289 OSTYPE (only the least significant 2 bits are used).
a2c2cee9
MS
5290 On success, sets PREF->REF to the DECL of the referenced object
5291 if it's unique, otherwise to null, PREF->OFFRNG to the range of
5292 offsets into it, and PREF->SIZRNG to the range of sizes of
5293 the object(s).
eafe8ee7 5294 SNLIM is used to avoid visiting the same PHI operand multiple
a2c2cee9 5295 times, and, when nonnull, RVALS to determine range information.
83685efd
MS
5296 Returns true on success, false when a meaningful size (or range)
5297 cannot be determined.
ef29b12c 5298
464969eb
MS
5299 The function is intended for diagnostics and should not be used
5300 to influence code generation or optimization. */
ee92e7ba 5301
a2c2cee9 5302static bool
d02c41dd
MS
5303compute_objsize_r (tree ptr, int ostype, access_ref *pref,
5304 ssa_name_limit_t &snlim, pointer_query *qry)
ee92e7ba 5305{
de05c19d
MS
5306 STRIP_NOPS (ptr);
5307
a2c2cee9
MS
5308 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
5309 if (addr)
dce6c58d
MS
5310 {
5311 --pref->deref;
5312 ptr = TREE_OPERAND (ptr, 0);
5313 }
025d57f0 5314
a2c2cee9
MS
5315 if (DECL_P (ptr))
5316 {
83685efd
MS
5317 pref->ref = ptr;
5318
a2c2cee9 5319 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
83685efd
MS
5320 {
5321 /* Set the maximum size if the reference is to the pointer
5322 itself (as opposed to what it points to). */
5323 pref->set_max_size_range ();
5324 return true;
5325 }
af3fa359 5326
de05c19d
MS
5327 if (tree size = decl_init_size (ptr, false))
5328 if (TREE_CODE (size) == INTEGER_CST)
5329 {
5330 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5331 return true;
5332 }
83685efd
MS
5333
5334 pref->set_max_size_range ();
a2c2cee9
MS
5335 return true;
5336 }
5337
5338 const tree_code code = TREE_CODE (ptr);
d02c41dd 5339 range_query *const rvals = qry ? qry->rvals : NULL;
a2c2cee9 5340
83685efd
MS
5341 if (code == BIT_FIELD_REF)
5342 {
5343 tree ref = TREE_OPERAND (ptr, 0);
d02c41dd 5344 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
83685efd
MS
5345 return false;
5346
5347 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
5348 pref->add_offset (off / BITS_PER_UNIT);
5349 return true;
5350 }
5351
a2c2cee9
MS
5352 if (code == COMPONENT_REF)
5353 {
de05c19d 5354 tree ref = TREE_OPERAND (ptr, 0);
eafe8ee7
MS
5355 if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
5356 /* In accesses through union types consider the entire unions
5357 rather than just their members. */
5358 ostype = 0;
5acc654e
MS
5359 tree field = TREE_OPERAND (ptr, 1);
5360
a2c2cee9 5361 if (ostype == 0)
af3fa359 5362 {
83685efd
MS
5363 /* In OSTYPE zero (for raw memory functions like memcpy), use
5364 the maximum size instead if the identity of the enclosing
5365 object cannot be determined. */
d02c41dd 5366 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
a2c2cee9 5367 return false;
5acc654e
MS
5368
5369 /* Otherwise, use the size of the enclosing object and add
5370 the offset of the member to the offset computed so far. */
5371 tree offset = byte_position (field);
83685efd
MS
5372 if (TREE_CODE (offset) == INTEGER_CST)
5373 pref->add_offset (wi::to_offset (offset));
5374 else
5375 pref->add_max_offset ();
eafe8ee7
MS
5376
5377 if (!pref->ref)
5378 /* REF may have been already set to an SSA_NAME earlier
5379 to provide better context for diagnostics. In that case,
5380 leave it unchanged. */
5381 pref->ref = ref;
5acc654e 5382 return true;
af3fa359 5383 }
025d57f0 5384
eafe8ee7
MS
5385 pref->ref = field;
5386
a2c2cee9 5387 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
83685efd
MS
5388 {
5389 /* Set maximum size if the reference is to the pointer member
5390 itself (as opposed to what it points to). */
5391 pref->set_max_size_range ();
5392 return true;
5393 }
a2c2cee9 5394
de05c19d
MS
5395 /* SAM is set for array members that might need special treatment. */
5396 special_array_member sam;
5397 tree size = component_ref_size (ptr, &sam);
5398 if (sam == special_array_member::int_0)
5399 pref->sizrng[0] = pref->sizrng[1] = 0;
5400 else if (!pref->trail1special && sam == special_array_member::trail_1)
5401 pref->sizrng[0] = pref->sizrng[1] = 1;
5402 else if (size && TREE_CODE (size) == INTEGER_CST)
5403 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5404 else
a2c2cee9 5405 {
de05c19d
MS
5406 /* When the size of the member is unknown it's either a flexible
5407 array member or a trailing special array member (either zero
5408 length or one-element). Set the size to the maximum minus
5409 the constant size of the type. */
5410 pref->sizrng[0] = 0;
5411 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5412 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
5413 if (TREE_CODE (recsize) == INTEGER_CST)
5414 pref->sizrng[1] -= wi::to_offset (recsize);
a2c2cee9 5415 }
a2c2cee9
MS
5416 return true;
5417 }
af3fa359 5418
a2c2cee9 5419 if (code == ARRAY_REF || code == MEM_REF)
b631bdb3 5420 {
dce6c58d
MS
5421 ++pref->deref;
5422
a2c2cee9 5423 tree ref = TREE_OPERAND (ptr, 0);
2b5d3dc2 5424 tree reftype = TREE_TYPE (ref);
eafe8ee7 5425 if (!addr && code == ARRAY_REF
a2c2cee9
MS
5426 && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
5427 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
5428 of known bound. */
5429 return false;
5430
5431 if (code == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
2b5d3dc2
MS
5432 {
5433 /* Give up for MEM_REFs of vector types; those may be synthesized
5434 from multiple assignments to consecutive data members. See PR
5435 93200.
5436 FIXME: Deal with this more generally, e.g., by marking up such
5437 MEM_REFs at the time they're created. */
5438 reftype = TREE_TYPE (reftype);
5439 if (TREE_CODE (reftype) == VECTOR_TYPE)
a2c2cee9 5440 return false;
2b5d3dc2 5441 }
ef29b12c 5442
d02c41dd 5443 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
a2c2cee9 5444 return false;
ef29b12c 5445
a2c2cee9 5446 offset_int orng[2];
de05c19d 5447 tree off = pref->eval (TREE_OPERAND (ptr, 1));
83685efd
MS
5448 if (!get_offset_range (off, NULL, orng, rvals))
5449 {
5450 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
5451 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5452 orng[0] = -orng[1] - 1;
5453 }
12603635 5454
a2c2cee9
MS
5455 if (TREE_CODE (ptr) == ARRAY_REF)
5456 {
5457 /* Convert the array index range determined above to a byte
5458 offset. */
5459 tree lowbnd = array_ref_low_bound (ptr);
5460 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
5461 {
5462 /* Adjust the index by the low bound of the array domain
5463 (normally zero but 1 in Fortran). */
5464 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
5465 orng[0] -= lb;
5466 orng[1] -= lb;
464969eb
MS
5467 }
5468
a2c2cee9
MS
5469 tree eltype = TREE_TYPE (ptr);
5470 tree tpsize = TYPE_SIZE_UNIT (eltype);
5471 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
83685efd
MS
5472 {
5473 pref->add_max_offset ();
5474 return true;
5475 }
a2c2cee9
MS
5476
5477 offset_int sz = wi::to_offset (tpsize);
5478 orng[0] *= sz;
5479 orng[1] *= sz;
ef29b12c 5480
5acc654e 5481 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
268209f3 5482 {
de05c19d
MS
5483 /* Except for the permissive raw memory functions which use
5484 the size of the whole object determined above, use the size
5485 of the referenced array. Because the overall offset is from
5486 the beginning of the complete array object add this overall
5487 offset to the size of array. */
5488 offset_int sizrng[2] =
5489 {
5490 pref->offrng[0] + orng[0] + sz,
5491 pref->offrng[1] + orng[1] + sz
5492 };
5493 if (sizrng[1] < sizrng[0])
5494 std::swap (sizrng[0], sizrng[1]);
5495 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
5496 pref->sizrng[0] = sizrng[0];
5497 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
5498 pref->sizrng[1] = sizrng[1];
268209f3 5499 }
a2c2cee9 5500 }
268209f3 5501
83685efd
MS
5502 pref->add_offset (orng[0], orng[1]);
5503 return true;
5504 }
5505
5506 if (code == TARGET_MEM_REF)
5507 {
5508 tree ref = TREE_OPERAND (ptr, 0);
d02c41dd 5509 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
83685efd
MS
5510 return false;
5511
5512 /* TODO: Handle remaining operands. Until then, add maximum offset. */
5513 pref->ref = ptr;
5514 pref->add_max_offset ();
5515 return true;
5516 }
5517
5518 if (code == INTEGER_CST)
5519 {
5520 /* Pointer constants other than null are most likely the result
5521 of erroneous null pointer addition/subtraction. Set size to
5522 zero. For null pointers, set size to the maximum for now
5523 since those may be the result of jump threading. */
5524 if (integer_zerop (ptr))
5525 pref->set_max_size_range ();
5526 else
5527 pref->sizrng[0] = pref->sizrng[1] = 0;
5528 pref->ref = ptr;
268209f3 5529
a2c2cee9
MS
5530 return true;
5531 }
83685efd
MS
5532
5533 if (code == STRING_CST)
5534 {
5535 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
eafe8ee7 5536 pref->ref = ptr;
83685efd
MS
5537 return true;
5538 }
5539
5540 if (code == POINTER_PLUS_EXPR)
de05c19d
MS
5541 {
5542 tree ref = TREE_OPERAND (ptr, 0);
d02c41dd 5543 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
de05c19d
MS
5544 return false;
5545
dce6c58d
MS
5546 /* Clear DEREF since the offset is being applied to the target
5547 of the dereference. */
5548 pref->deref = 0;
5549
de05c19d
MS
5550 offset_int orng[2];
5551 tree off = pref->eval (TREE_OPERAND (ptr, 1));
83685efd
MS
5552 if (get_offset_range (off, NULL, orng, rvals))
5553 pref->add_offset (orng[0], orng[1]);
5554 else
5555 pref->add_max_offset ();
de05c19d
MS
5556 return true;
5557 }
83685efd
MS
5558
5559 if (code == VIEW_CONVERT_EXPR)
de05c19d
MS
5560 {
5561 ptr = TREE_OPERAND (ptr, 0);
d02c41dd 5562 return compute_objsize_r (ptr, ostype, pref, snlim, qry);
de05c19d 5563 }
a2c2cee9 5564
eafe8ee7 5565 if (code == SSA_NAME)
a2c2cee9 5566 {
eafe8ee7
MS
5567 if (!snlim.next ())
5568 return false;
5569
5570 /* Only process an SSA_NAME if the recursion limit has not yet
5571 been reached. */
d02c41dd
MS
5572 if (qry)
5573 {
5574 if (++qry->depth)
5575 qry->max_depth = qry->depth;
5576 if (const access_ref *cache_ref = qry->get_ref (ptr))
5577 {
5578 /* If the pointer is in the cache set *PREF to what it refers
5579 to and return success. */
5580 *pref = *cache_ref;
5581 return true;
5582 }
5583 }
5584
a2c2cee9
MS
5585 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
5586 if (is_gimple_call (stmt))
5587 {
5588 /* If STMT is a call to an allocation function get the size
83685efd
MS
5589 from its argument(s). If successful, also set *PREF->REF
5590 to PTR for the caller to include in diagnostics. */
a2c2cee9
MS
5591 wide_int wr[2];
5592 if (gimple_call_alloc_size (stmt, wr, rvals))
268209f3 5593 {
a2c2cee9
MS
5594 pref->ref = ptr;
5595 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5596 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
83685efd
MS
5597 /* Constrain both bounds to a valid size. */
5598 offset_int maxsize = wi::to_offset (max_object_size ());
5599 if (pref->sizrng[0] > maxsize)
5600 pref->sizrng[0] = maxsize;
5601 if (pref->sizrng[1] > maxsize)
5602 pref->sizrng[1] = maxsize;
268209f3 5603 }
83685efd
MS
5604 else
5605 {
5606 /* For functions known to return one of their pointer arguments
5607 try to determine what the returned pointer points to, and on
5608 success add OFFRNG which was set to the offset added by
5609 the function (e.g., memchr) to the overall offset. */
5610 offset_int offrng[2];
5611 if (tree ret = gimple_call_return_array (stmt, offrng, rvals))
5612 {
d02c41dd 5613 if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
83685efd
MS
5614 return false;
5615
5616 /* Cap OFFRNG[1] to at most the remaining size of
5617 the object. */
5618 offset_int remrng[2];
5619 remrng[1] = pref->size_remaining (remrng);
5620 if (remrng[1] < offrng[1])
5621 offrng[1] = remrng[1];
5622 pref->add_offset (offrng[0], offrng[1]);
5623 }
5624 else
5625 {
5626 /* For other calls that might return arbitrary pointers
5627 including into the middle of objects set the size
5628 range to maximum, clear PREF->BASE0, and also set
5629 PREF->REF to include in diagnostics. */
5630 pref->set_max_size_range ();
5631 pref->base0 = false;
5632 pref->ref = ptr;
5633 }
5634 }
d02c41dd 5635 qry->put_ref (ptr, *pref);
83685efd 5636 return true;
a2c2cee9 5637 }
268209f3 5638
baad4c48
MS
5639 if (gimple_nop_p (stmt))
5640 {
5641 /* For a function argument try to determine the byte size
5642 of the array from the current function declaratation
5643 (e.g., attribute access or related). */
5644 wide_int wr[2];
eafe8ee7
MS
5645 bool static_array = false;
5646 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
83685efd 5647 {
eafe8ee7 5648 pref->parmarray = !static_array;
83685efd
MS
5649 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5650 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5651 pref->ref = ref;
d02c41dd 5652 qry->put_ref (ptr, *pref);
83685efd
MS
5653 return true;
5654 }
5655
5656 pref->set_max_size_range ();
5657 pref->base0 = false;
5658 pref->ref = ptr;
d02c41dd 5659 qry->put_ref (ptr, *pref);
baad4c48
MS
5660 return true;
5661 }
5662
eafe8ee7
MS
5663 if (gimple_code (stmt) == GIMPLE_PHI)
5664 {
5665 pref->ref = ptr;
5666 access_ref phi_ref = *pref;
d02c41dd 5667 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
eafe8ee7
MS
5668 return false;
5669 *pref = phi_ref;
5670 pref->ref = ptr;
d02c41dd 5671 qry->put_ref (ptr, *pref);
eafe8ee7
MS
5672 return true;
5673 }
268209f3 5674
a2c2cee9 5675 if (!is_gimple_assign (stmt))
83685efd
MS
5676 {
5677 /* Clear BASE0 since the assigned pointer might point into
5678 the middle of the object, set the maximum size range and,
5679 if the SSA_NAME refers to a function argumnent, set
5680 PREF->REF to it. */
5681 pref->base0 = false;
5682 pref->set_max_size_range ();
eafe8ee7 5683 pref->ref = ptr;
83685efd
MS
5684 return true;
5685 }
268209f3 5686
a2c2cee9 5687 tree_code code = gimple_assign_rhs_code (stmt);
268209f3 5688
eafe8ee7 5689 if (code == MAX_EXPR || code == MIN_EXPR)
d02c41dd
MS
5690 {
5691 if (!handle_min_max_size (stmt, ostype, pref, snlim, qry))
5692 return false;
5693 qry->put_ref (ptr, *pref);
5694 return true;
5695 }
eafe8ee7
MS
5696
5697 tree rhs = gimple_assign_rhs1 (stmt);
5698
83685efd 5699 if (code == POINTER_PLUS_EXPR
eafe8ee7 5700 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
a2c2cee9 5701 {
83685efd 5702 /* Compute the size of the object first. */
d02c41dd 5703 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
83685efd
MS
5704 return false;
5705
a2c2cee9
MS
5706 offset_int orng[2];
5707 tree off = gimple_assign_rhs2 (stmt);
83685efd
MS
5708 if (get_offset_range (off, stmt, orng, rvals))
5709 pref->add_offset (orng[0], orng[1]);
5710 else
5711 pref->add_max_offset ();
d02c41dd 5712 qry->put_ref (ptr, *pref);
83685efd 5713 return true;
b631bdb3
MS
5714 }
5715
eafe8ee7
MS
5716 if (code == ADDR_EXPR
5717 || code == SSA_NAME)
d02c41dd 5718 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
b631bdb3 5719
eafe8ee7
MS
5720 /* (This could also be an assignment from a nonlocal pointer.) Save
5721 PTR to mention in diagnostics but otherwise treat it as a pointer
83685efd 5722 to an unknown object. */
eafe8ee7
MS
5723 pref->ref = rhs;
5724 pref->base0 = false;
5725 pref->set_max_size_range ();
5726 return true;
025d57f0
MS
5727 }
5728
83685efd
MS
5729 /* Assume all other expressions point into an unknown object
5730 of the maximum valid size. */
eafe8ee7 5731 pref->ref = ptr;
83685efd
MS
5732 pref->base0 = false;
5733 pref->set_max_size_range ();
d02c41dd
MS
5734 if (TREE_CODE (ptr) == SSA_NAME)
5735 qry->put_ref (ptr, *pref);
83685efd 5736 return true;
a2c2cee9
MS
5737}
5738
f36a8168
MS
5739/* A "public" wrapper around the above. Clients should use this overload
5740 instead. */
a2c2cee9 5741
baad4c48 5742tree
a2c2cee9 5743compute_objsize (tree ptr, int ostype, access_ref *pref,
f5299992 5744 range_query *rvals /* = NULL */)
a2c2cee9 5745{
d02c41dd
MS
5746 pointer_query qry;
5747 qry.rvals = rvals;
5748 ssa_name_limit_t snlim;
5749 if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
5750 return NULL_TREE;
5751
5752 offset_int maxsize = pref->size_remaining ();
5753 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5754 pref->offrng[0] = 0;
5755 return wide_int_to_tree (sizetype, maxsize);
5756}
5757
5758/* Transitional wrapper. The function should be removed once callers
5759 transition to the pointer_query API. */
5760
5761tree
5762compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
5763{
5764 pointer_query qry;
5765 if (ptr_qry)
5766 ptr_qry->depth = 0;
5767 else
5768 ptr_qry = &qry;
5769
eafe8ee7 5770 ssa_name_limit_t snlim;
d02c41dd 5771 if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
a2c2cee9
MS
5772 return NULL_TREE;
5773
83685efd
MS
5774 offset_int maxsize = pref->size_remaining ();
5775 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5776 pref->offrng[0] = 0;
5777 return wide_int_to_tree (sizetype, maxsize);
a2c2cee9
MS
5778}
5779
d02c41dd 5780/* Legacy wrapper around the above. The function should be removed
a2c2cee9
MS
5781 once callers transition to one of the two above. */
5782
5783tree
5784compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
83685efd 5785 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
a2c2cee9
MS
5786{
5787 /* Set the initial offsets to zero and size to negative to indicate
5788 none has been computed yet. */
5789 access_ref ref;
5790 tree size = compute_objsize (ptr, ostype, &ref, rvals);
83685efd 5791 if (!size || !ref.base0)
a2c2cee9
MS
5792 return NULL_TREE;
5793
5794 if (pdecl)
5795 *pdecl = ref.ref;
5796
5797 if (poff)
5798 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
5799
5800 return size;
ee92e7ba
MS
5801}
5802
5803/* Helper to determine and check the sizes of the source and the destination
d9c5a8b9
MS
5804 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
5805 call expression, DEST is the destination argument, SRC is the source
5806 argument or null, and LEN is the number of bytes. Use Object Size type-0
5807 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
ee92e7ba
MS
5808 (no overflow or invalid sizes), false otherwise. */
5809
5810static bool
cc8bea0a 5811check_memop_access (tree exp, tree dest, tree src, tree size)
ee92e7ba 5812{
ee92e7ba 5813 /* For functions like memset and memcpy that operate on raw memory
d9c5a8b9
MS
5814 try to determine the size of the largest source and destination
5815 object using type-0 Object Size regardless of the object size
5816 type specified by the option. */
d14c547a 5817 access_data data (exp, access_read_write);
a2c2cee9
MS
5818 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
5819 tree dstsize = compute_objsize (dest, 0, &data.dst);
ee92e7ba 5820
d14c547a
MS
5821 return check_access (exp, size, /*maxread=*/NULL_TREE,
5822 srcsize, dstsize, data.mode, &data);
d9c5a8b9
MS
5823}
5824
5825/* Validate memchr arguments without performing any expansion.
5826 Return NULL_RTX. */
5827
5828static rtx
5829expand_builtin_memchr (tree exp, rtx)
5830{
5831 if (!validate_arglist (exp,
5832 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5833 return NULL_RTX;
5834
5835 tree arg1 = CALL_EXPR_ARG (exp, 0);
5836 tree len = CALL_EXPR_ARG (exp, 2);
5837
d14c547a 5838 check_read_access (exp, arg1, len, 0);
d9c5a8b9
MS
5839
5840 return NULL_RTX;
ee92e7ba
MS
5841}
5842
5039610b
SL
5843/* Expand a call EXP to the memcpy builtin.
5844 Return NULL_RTX if we failed, the caller should emit a normal call,
9cb65f92 5845 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e 5846 mode MODE if that's convenient). */
5039610b 5847
28f4ec01 5848static rtx
44e10129 5849expand_builtin_memcpy (tree exp, rtx target)
28f4ec01 5850{
5039610b
SL
5851 if (!validate_arglist (exp,
5852 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5853 return NULL_RTX;
ee92e7ba
MS
5854
5855 tree dest = CALL_EXPR_ARG (exp, 0);
5856 tree src = CALL_EXPR_ARG (exp, 1);
5857 tree len = CALL_EXPR_ARG (exp, 2);
5858
cc8bea0a 5859 check_memop_access (exp, dest, src, len);
ee92e7ba 5860
671a00ee 5861 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
03a9b90a 5862 /*retmode=*/ RETURN_BEGIN, false);
edcf72f3 5863}
57814e5e 5864
e50d56a5
MS
5865/* Check a call EXP to the memmove built-in for validity.
5866 Return NULL_RTX on both success and failure. */
5867
5868static rtx
03a9b90a 5869expand_builtin_memmove (tree exp, rtx target)
e50d56a5
MS
5870{
5871 if (!validate_arglist (exp,
5872 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5873 return NULL_RTX;
5874
5875 tree dest = CALL_EXPR_ARG (exp, 0);
d9c5a8b9 5876 tree src = CALL_EXPR_ARG (exp, 1);
e50d56a5
MS
5877 tree len = CALL_EXPR_ARG (exp, 2);
5878
cc8bea0a 5879 check_memop_access (exp, dest, src, len);
e50d56a5 5880
03a9b90a
AS
5881 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5882 /*retmode=*/ RETURN_BEGIN, true);
e50d56a5
MS
5883}
5884
5039610b
SL
5885/* Expand a call EXP to the mempcpy builtin.
5886 Return NULL_RTX if we failed; the caller should emit a normal call,
e3e9f108 5887 otherwise try to get the result in TARGET, if convenient (and in
2ff5ffb6 5888 mode MODE if that's convenient). */
e3e9f108
JJ
5889
5890static rtx
671a00ee 5891expand_builtin_mempcpy (tree exp, rtx target)
e3e9f108 5892{
5039610b
SL
5893 if (!validate_arglist (exp,
5894 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5895 return NULL_RTX;
ee92e7ba
MS
5896
5897 tree dest = CALL_EXPR_ARG (exp, 0);
5898 tree src = CALL_EXPR_ARG (exp, 1);
5899 tree len = CALL_EXPR_ARG (exp, 2);
5900
af3fa359
MS
5901 /* Policy does not generally allow using compute_objsize (which
5902 is used internally by check_memop_size) to change code generation
5903 or drive optimization decisions.
5904
5905 In this instance it is safe because the code we generate has
5906 the same semantics regardless of the return value of
5907 check_memop_sizes. Exactly the same amount of data is copied
5908 and the return value is exactly the same in both cases.
5909
5910 Furthermore, check_memop_size always uses mode 0 for the call to
5911 compute_objsize, so the imprecise nature of compute_objsize is
5912 avoided. */
5913
ee92e7ba
MS
5914 /* Avoid expanding mempcpy into memcpy when the call is determined
5915 to overflow the buffer. This also prevents the same overflow
5916 from being diagnosed again when expanding memcpy. */
cc8bea0a 5917 if (!check_memop_access (exp, dest, src, len))
ee92e7ba
MS
5918 return NULL_RTX;
5919
5920 return expand_builtin_mempcpy_args (dest, src, len,
2ff5ffb6 5921 target, exp, /*retmode=*/ RETURN_END);
edcf72f3
IE
5922}
5923
671a00ee
ML
5924/* Helper function to do the actual work for expand of memory copy family
5925 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
2ff5ffb6
ML
5926 of memory from SRC to DEST and assign to TARGET if convenient. Return
5927 value is based on RETMODE argument. */
5039610b
SL
5928
5929static rtx
671a00ee 5930expand_builtin_memory_copy_args (tree dest, tree src, tree len,
03a9b90a
AS
5931 rtx target, tree exp, memop_ret retmode,
5932 bool might_overlap)
5039610b 5933{
671a00ee
ML
5934 unsigned int src_align = get_pointer_alignment (src);
5935 unsigned int dest_align = get_pointer_alignment (dest);
5936 rtx dest_mem, src_mem, dest_addr, len_rtx;
5937 HOST_WIDE_INT expected_size = -1;
5938 unsigned int expected_align = 0;
5939 unsigned HOST_WIDE_INT min_size;
5940 unsigned HOST_WIDE_INT max_size;
5941 unsigned HOST_WIDE_INT probable_max_size;
edcf72f3 5942
db91c7cf
ML
5943 bool is_move_done;
5944
671a00ee
ML
5945 /* If DEST is not a pointer type, call the normal function. */
5946 if (dest_align == 0)
5947 return NULL_RTX;
c22cacf3 5948
671a00ee
ML
5949 /* If either SRC is not a pointer type, don't do this
5950 operation in-line. */
5951 if (src_align == 0)
5952 return NULL_RTX;
8fd3cf4e 5953
671a00ee
ML
5954 if (currently_expanding_gimple_stmt)
5955 stringop_block_profile (currently_expanding_gimple_stmt,
5956 &expected_align, &expected_size);
33521f7d 5957
671a00ee
ML
5958 if (expected_align < dest_align)
5959 expected_align = dest_align;
5960 dest_mem = get_memory_rtx (dest, len);
5961 set_mem_align (dest_mem, dest_align);
5962 len_rtx = expand_normal (len);
5963 determine_block_size (len, len_rtx, &min_size, &max_size,
5964 &probable_max_size);
d5803b98
MS
5965
5966 /* Try to get the byte representation of the constant SRC points to,
5967 with its byte size in NBYTES. */
5968 unsigned HOST_WIDE_INT nbytes;
866626ef 5969 const char *rep = getbyterep (src, &nbytes);
d5803b98
MS
5970
5971 /* If the function's constant bound LEN_RTX is less than or equal
5972 to the byte size of the representation of the constant argument,
5973 and if block move would be done by pieces, we can avoid loading
5974 the bytes from memory and only store the computed constant.
5975 This works in the overlap (memmove) case as well because
5976 store_by_pieces just generates a series of stores of constants
866626ef 5977 from the representation returned by getbyterep(). */
d5803b98 5978 if (rep
671a00ee 5979 && CONST_INT_P (len_rtx)
d5803b98 5980 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
671a00ee 5981 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
d5803b98 5982 CONST_CAST (char *, rep),
671a00ee
ML
5983 dest_align, false))
5984 {
5985 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
5986 builtin_memcpy_read_str,
d5803b98 5987 CONST_CAST (char *, rep),
2ff5ffb6 5988 dest_align, false, retmode);
671a00ee
ML
5989 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5990 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5991 return dest_mem;
5992 }
e3e9f108 5993
671a00ee
ML
5994 src_mem = get_memory_rtx (src, len);
5995 set_mem_align (src_mem, src_align);
8fd3cf4e 5996
671a00ee 5997 /* Copy word part most expediently. */
fdd33254 5998 enum block_op_methods method = BLOCK_OP_NORMAL;
2ff5ffb6
ML
5999 if (CALL_EXPR_TAILCALL (exp)
6000 && (retmode == RETURN_BEGIN || target == const0_rtx))
fdd33254 6001 method = BLOCK_OP_TAILCALL;
db91c7cf
ML
6002 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
6003 && retmode == RETURN_END
03a9b90a 6004 && !might_overlap
db91c7cf
ML
6005 && target != const0_rtx);
6006 if (use_mempcpy_call)
fdd33254
ML
6007 method = BLOCK_OP_NO_LIBCALL_RET;
6008 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
671a00ee 6009 expected_align, expected_size,
db91c7cf 6010 min_size, max_size, probable_max_size,
d5803b98
MS
6011 use_mempcpy_call, &is_move_done,
6012 might_overlap);
db91c7cf
ML
6013
6014 /* Bail out when a mempcpy call would be expanded as libcall and when
6015 we have a target that provides a fast implementation
6016 of mempcpy routine. */
6017 if (!is_move_done)
6018 return NULL_RTX;
6019
fdd33254
ML
6020 if (dest_addr == pc_rtx)
6021 return NULL_RTX;
671a00ee
ML
6022
6023 if (dest_addr == 0)
6024 {
6025 dest_addr = force_operand (XEXP (dest_mem, 0), target);
6026 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6027 }
6028
2ff5ffb6 6029 if (retmode != RETURN_BEGIN && target != const0_rtx)
671a00ee
ML
6030 {
6031 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
6032 /* stpcpy pointer to last byte. */
2ff5ffb6 6033 if (retmode == RETURN_END_MINUS_ONE)
671a00ee 6034 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
7d3eecca 6035 }
671a00ee
ML
6036
6037 return dest_addr;
6038}
6039
6040static rtx
6041expand_builtin_mempcpy_args (tree dest, tree src, tree len,
2ff5ffb6 6042 rtx target, tree orig_exp, memop_ret retmode)
671a00ee
ML
6043{
6044 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
03a9b90a 6045 retmode, false);
e3e9f108
JJ
6046}
6047
5039610b 6048/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
beed8fc0 6049 we failed, the caller should emit a normal call, otherwise try to
2ff5ffb6
ML
6050 get the result in TARGET, if convenient.
6051 Return value is based on RETMODE argument. */
beed8fc0
AO
6052
6053static rtx
2ff5ffb6 6054expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
beed8fc0 6055{
99b1c316 6056 class expand_operand ops[3];
beed8fc0
AO
6057 rtx dest_mem;
6058 rtx src_mem;
beed8fc0 6059
7cff0471 6060 if (!targetm.have_movstr ())
5039610b 6061 return NULL_RTX;
beed8fc0 6062
435bb2a1
JJ
6063 dest_mem = get_memory_rtx (dest, NULL);
6064 src_mem = get_memory_rtx (src, NULL);
2831adb5 6065 if (retmode == RETURN_BEGIN)
beed8fc0
AO
6066 {
6067 target = force_reg (Pmode, XEXP (dest_mem, 0));
6068 dest_mem = replace_equiv_address (dest_mem, target);
beed8fc0
AO
6069 }
6070
42bdb8f2
ML
6071 create_output_operand (&ops[0],
6072 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
a5c7d693
RS
6073 create_fixed_operand (&ops[1], dest_mem);
6074 create_fixed_operand (&ops[2], src_mem);
7cff0471 6075 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
7c5425fa 6076 return NULL_RTX;
beed8fc0 6077
2ff5ffb6 6078 if (retmode != RETURN_BEGIN && target != const0_rtx)
7ce3fc8f 6079 {
a5c7d693
RS
6080 target = ops[0].value;
6081 /* movstr is supposed to set end to the address of the NUL
6082 terminator. If the caller requested a mempcpy-like return value,
6083 adjust it. */
2ff5ffb6 6084 if (retmode == RETURN_END)
a5c7d693 6085 {
0a81f074
RS
6086 rtx tem = plus_constant (GET_MODE (target),
6087 gen_lowpart (GET_MODE (target), target), 1);
a5c7d693
RS
6088 emit_move_insn (target, force_operand (tem, NULL_RTX));
6089 }
7ce3fc8f 6090 }
beed8fc0
AO
6091 return target;
6092}
6093
ee92e7ba
MS
6094/* Do some very basic size validation of a call to the strcpy builtin
6095 given by EXP. Return NULL_RTX to have the built-in expand to a call
6096 to the library function. */
6097
6098static rtx
b5338fb3 6099expand_builtin_strcat (tree exp)
ee92e7ba
MS
6100{
6101 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
6102 || !warn_stringop_overflow)
6103 return NULL_RTX;
6104
6105 tree dest = CALL_EXPR_ARG (exp, 0);
6106 tree src = CALL_EXPR_ARG (exp, 1);
6107
6108 /* There is no way here to determine the length of the string in
6109 the destination to which the SRC string is being appended so
6110 just diagnose cases when the souce string is longer than
6111 the destination object. */
d14c547a
MS
6112 access_data data (exp, access_read_write, NULL_TREE, true,
6113 NULL_TREE, true);
6114 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6115 compute_objsize (src, ost, &data.src);
6116 tree destsize = compute_objsize (dest, ost, &data.dst);
ee92e7ba 6117
d14c547a
MS
6118 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6119 src, destsize, data.mode, &data);
ee92e7ba
MS
6120
6121 return NULL_RTX;
6122}
6123
b8698a0f
L
6124/* Expand expression EXP, which is a call to the strcpy builtin. Return
6125 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 6126 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 6127 convenient). */
fed3cef0 6128
28f4ec01 6129static rtx
44e10129 6130expand_builtin_strcpy (tree exp, rtx target)
28f4ec01 6131{
ee92e7ba
MS
6132 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6133 return NULL_RTX;
6134
6135 tree dest = CALL_EXPR_ARG (exp, 0);
6136 tree src = CALL_EXPR_ARG (exp, 1);
6137
6138 if (warn_stringop_overflow)
6139 {
d14c547a
MS
6140 access_data data (exp, access_read_write, NULL_TREE, true,
6141 NULL_TREE, true);
6142 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6143 compute_objsize (src, ost, &data.src);
6144 tree dstsize = compute_objsize (dest, ost, &data.dst);
6145 check_access (exp, /*dstwrite=*/ NULL_TREE,
6146 /*maxread=*/ NULL_TREE, /*srcstr=*/ src,
6147 dstsize, data.mode, &data);
ee92e7ba
MS
6148 }
6149
e08341bb 6150 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
36537a1c
MS
6151 {
6152 /* Check to see if the argument was declared attribute nonstring
6153 and if so, issue a warning since at this point it's not known
6154 to be nul-terminated. */
6155 tree fndecl = get_callee_fndecl (exp);
6156 maybe_warn_nonstring_arg (fndecl, exp);
6157 return ret;
6158 }
6159
6160 return NULL_RTX;
5039610b
SL
6161}
6162
6163/* Helper function to do the actual work for expand_builtin_strcpy. The
6164 arguments to the builtin_strcpy call DEST and SRC are broken out
6165 so that this can also be called without constructing an actual CALL_EXPR.
6166 The other arguments and return value are the same as for
6167 expand_builtin_strcpy. */
6168
6169static rtx
e08341bb 6170expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
5039610b 6171{
e08341bb 6172 /* Detect strcpy calls with unterminated arrays.. */
d14c547a
MS
6173 tree size;
6174 bool exact;
6175 if (tree nonstr = unterminated_array (src, &size, &exact))
e08341bb
MS
6176 {
6177 /* NONSTR refers to the non-nul terminated constant array. */
d14c547a
MS
6178 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr,
6179 size, exact);
e08341bb
MS
6180 return NULL_RTX;
6181 }
6182
2ff5ffb6 6183 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
28f4ec01
BS
6184}
6185
5039610b
SL
6186/* Expand a call EXP to the stpcpy builtin.
6187 Return NULL_RTX if we failed the caller should emit a normal call,
9cb65f92
KG
6188 otherwise try to get the result in TARGET, if convenient (and in
6189 mode MODE if that's convenient). */
6190
6191static rtx
3ce4cdb2 6192expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
9cb65f92 6193{
5039610b 6194 tree dst, src;
db3927fb 6195 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
6196
6197 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6198 return NULL_RTX;
6199
6200 dst = CALL_EXPR_ARG (exp, 0);
6201 src = CALL_EXPR_ARG (exp, 1);
6202
e50d56a5
MS
6203 if (warn_stringop_overflow)
6204 {
d14c547a 6205 access_data data (exp, access_read_write);
a2c2cee9
MS
6206 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
6207 &data.dst);
d14c547a
MS
6208 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6209 src, destsize, data.mode, &data);
e50d56a5
MS
6210 }
6211
beed8fc0 6212 /* If return value is ignored, transform stpcpy into strcpy. */
e79983f4 6213 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
ad4319ec 6214 {
e79983f4 6215 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
aa493694 6216 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
0d2a6e08 6217 return expand_expr (result, target, mode, EXPAND_NORMAL);
ad4319ec 6218 }
9cb65f92
KG
6219 else
6220 {
5039610b 6221 tree len, lenp1;
beed8fc0 6222 rtx ret;
e3e9f108 6223
8fd3cf4e 6224 /* Ensure we get an actual string whose length can be evaluated at
c22cacf3
MS
6225 compile-time, not an expression containing a string. This is
6226 because the latter will potentially produce pessimized code
6227 when used to produce the return value. */
e09aa5bd 6228 c_strlen_data lendata = { };
866626ef 6229 if (!c_getstr (src)
e09aa5bd 6230 || !(len = c_strlen (src, 0, &lendata, 1)))
2ff5ffb6
ML
6231 return expand_movstr (dst, src, target,
6232 /*retmode=*/ RETURN_END_MINUS_ONE);
9cb65f92 6233
d14c547a
MS
6234 if (lendata.decl)
6235 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl);
01b0acb7 6236
db3927fb 6237 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
44e10129 6238 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
2ff5ffb6
ML
6239 target, exp,
6240 /*retmode=*/ RETURN_END_MINUS_ONE);
beed8fc0
AO
6241
6242 if (ret)
6243 return ret;
6244
6245 if (TREE_CODE (len) == INTEGER_CST)
6246 {
84217346 6247 rtx len_rtx = expand_normal (len);
beed8fc0 6248
481683e1 6249 if (CONST_INT_P (len_rtx))
beed8fc0 6250 {
e08341bb 6251 ret = expand_builtin_strcpy_args (exp, dst, src, target);
beed8fc0
AO
6252
6253 if (ret)
6254 {
6255 if (! target)
58ec6ece
SE
6256 {
6257 if (mode != VOIDmode)
6258 target = gen_reg_rtx (mode);
6259 else
6260 target = gen_reg_rtx (GET_MODE (ret));
6261 }
beed8fc0
AO
6262 if (GET_MODE (target) != GET_MODE (ret))
6263 ret = gen_lowpart (GET_MODE (target), ret);
6264
0a81f074 6265 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
7ce3fc8f 6266 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
298e6adc 6267 gcc_assert (ret);
beed8fc0
AO
6268
6269 return target;
6270 }
6271 }
6272 }
6273
2ff5ffb6
ML
6274 return expand_movstr (dst, src, target,
6275 /*retmode=*/ RETURN_END_MINUS_ONE);
9cb65f92
KG
6276 }
6277}
6278
3ce4cdb2
MS
6279/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
6280 arguments while being careful to avoid duplicate warnings (which could
6281 be issued if the expander were to expand the call, resulting in it
6282 being emitted in expand_call(). */
6283
6284static rtx
6285expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
6286{
6287 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
6288 {
6289 /* The call has been successfully expanded. Check for nonstring
6290 arguments and issue warnings as appropriate. */
6291 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6292 return ret;
6293 }
6294
6295 return NULL_RTX;
6296}
6297
e50d56a5
MS
6298/* Check a call EXP to the stpncpy built-in for validity.
6299 Return NULL_RTX on both success and failure. */
6300
6301static rtx
6302expand_builtin_stpncpy (tree exp, rtx)
6303{
6304 if (!validate_arglist (exp,
6305 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6306 || !warn_stringop_overflow)
6307 return NULL_RTX;
6308
c6c02519 6309 /* The source and destination of the call. */
e50d56a5
MS
6310 tree dest = CALL_EXPR_ARG (exp, 0);
6311 tree src = CALL_EXPR_ARG (exp, 1);
6312
c6c02519 6313 /* The exact number of bytes to write (not the maximum). */
e50d56a5 6314 tree len = CALL_EXPR_ARG (exp, 2);
d14c547a 6315 access_data data (exp, access_read_write);
c6c02519 6316 /* The size of the destination object. */
a2c2cee9 6317 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
d14c547a 6318 check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data);
e50d56a5
MS
6319 return NULL_RTX;
6320}
6321
57814e5e
JJ
6322/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6323 bytes from constant string DATA + OFFSET and return it as target
6324 constant. */
6325
14a43348 6326rtx
4682ae04 6327builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
095a2d76 6328 scalar_int_mode mode)
57814e5e
JJ
6329{
6330 const char *str = (const char *) data;
6331
6332 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
6333 return const0_rtx;
6334
6335 return c_readstr (str + offset, mode);
6336}
6337
ee92e7ba
MS
6338/* Helper to check the sizes of sequences and the destination of calls
6339 to __builtin_strncat and __builtin___strncat_chk. Returns true on
6340 success (no overflow or invalid sizes), false otherwise. */
6341
6342static bool
6343check_strncat_sizes (tree exp, tree objsize)
6344{
6345 tree dest = CALL_EXPR_ARG (exp, 0);
6346 tree src = CALL_EXPR_ARG (exp, 1);
cc8bea0a 6347 tree maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
6348
6349 /* Try to determine the range of lengths that the source expression
6350 refers to. */
5d6655eb
MS
6351 c_strlen_data lendata = { };
6352 get_range_strlen (src, &lendata, /* eltsize = */ 1);
ee92e7ba
MS
6353
6354 /* Try to verify that the destination is big enough for the shortest
6355 string. */
6356
d14c547a 6357 access_data data (exp, access_read_write, maxread, true);
ee92e7ba
MS
6358 if (!objsize && warn_stringop_overflow)
6359 {
6360 /* If it hasn't been provided by __strncat_chk, try to determine
6361 the size of the destination object into which the source is
6362 being copied. */
a2c2cee9 6363 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
ee92e7ba
MS
6364 }
6365
6366 /* Add one for the terminating nul. */
5d6655eb
MS
6367 tree srclen = (lendata.minlen
6368 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
ee92e7ba
MS
6369 size_one_node)
6370 : NULL_TREE);
6371
cc8bea0a
MS
6372 /* The strncat function copies at most MAXREAD bytes and always appends
6373 the terminating nul so the specified upper bound should never be equal
6374 to (or greater than) the size of the destination. */
6375 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
6376 && tree_int_cst_equal (objsize, maxread))
ee92e7ba 6377 {
fe7f75cf 6378 location_t loc = tree_inlined_location (exp);
e50d56a5 6379 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 6380 "%K%qD specified bound %E equals destination size",
cc8bea0a 6381 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
6382
6383 return false;
6384 }
6385
6386 if (!srclen
cc8bea0a 6387 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 6388 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
6389 && tree_int_cst_lt (maxread, srclen)))
6390 srclen = maxread;
ee92e7ba 6391
a2c2cee9 6392 /* The number of bytes to write is LEN but check_access will alsoa
ee92e7ba 6393 check SRCLEN if LEN's value isn't known. */
d14c547a
MS
6394 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6395 objsize, data.mode, &data);
ee92e7ba
MS
6396}
6397
6398/* Similar to expand_builtin_strcat, do some very basic size validation
6399 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
6400 the built-in expand to a call to the library function. */
6401
6402static rtx
6403expand_builtin_strncat (tree exp, rtx)
6404{
6405 if (!validate_arglist (exp,
6406 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6407 || !warn_stringop_overflow)
6408 return NULL_RTX;
6409
6410 tree dest = CALL_EXPR_ARG (exp, 0);
6411 tree src = CALL_EXPR_ARG (exp, 1);
6412 /* The upper bound on the number of bytes to write. */
cc8bea0a 6413 tree maxread = CALL_EXPR_ARG (exp, 2);
b5338fb3
MS
6414
6415 /* Detect unterminated source (only). */
6416 if (!check_nul_terminated_array (exp, src, maxread))
6417 return NULL_RTX;
6418
ee92e7ba
MS
6419 /* The length of the source sequence. */
6420 tree slen = c_strlen (src, 1);
6421
6422 /* Try to determine the range of lengths that the source expression
5d6655eb
MS
6423 refers to. Since the lengths are only used for warning and not
6424 for code generation disable strict mode below. */
6425 tree maxlen = slen;
6426 if (!maxlen)
6427 {
6428 c_strlen_data lendata = { };
6429 get_range_strlen (src, &lendata, /* eltsize = */ 1);
6430 maxlen = lendata.maxbound;
6431 }
ee92e7ba 6432
d14c547a 6433 access_data data (exp, access_read_write);
ee92e7ba
MS
6434 /* Try to verify that the destination is big enough for the shortest
6435 string. First try to determine the size of the destination object
6436 into which the source is being copied. */
a2c2cee9 6437 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
ee92e7ba
MS
6438
6439 /* Add one for the terminating nul. */
5d6655eb
MS
6440 tree srclen = (maxlen
6441 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
ee92e7ba
MS
6442 size_one_node)
6443 : NULL_TREE);
6444
cc8bea0a
MS
6445 /* The strncat function copies at most MAXREAD bytes and always appends
6446 the terminating nul so the specified upper bound should never be equal
6447 to (or greater than) the size of the destination. */
6448 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
6449 && tree_int_cst_equal (destsize, maxread))
ee92e7ba 6450 {
fe7f75cf 6451 location_t loc = tree_inlined_location (exp);
e50d56a5 6452 warning_at (loc, OPT_Wstringop_overflow_,
13c5654f 6453 "%K%qD specified bound %E equals destination size",
cc8bea0a 6454 exp, get_callee_fndecl (exp), maxread);
ee92e7ba
MS
6455
6456 return NULL_RTX;
6457 }
6458
6459 if (!srclen
cc8bea0a 6460 || (maxread && tree_fits_uhwi_p (maxread)
ee92e7ba 6461 && tree_fits_uhwi_p (srclen)
cc8bea0a
MS
6462 && tree_int_cst_lt (maxread, srclen)))
6463 srclen = maxread;
ee92e7ba 6464
d14c547a
MS
6465 check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6466 destsize, data.mode, &data);
ee92e7ba
MS
6467 return NULL_RTX;
6468}
6469
b8698a0f 6470/* Expand expression EXP, which is a call to the strncpy builtin. Return
5039610b 6471 NULL_RTX if we failed the caller should emit a normal call. */
da9e9f08
KG
6472
6473static rtx
44e10129 6474expand_builtin_strncpy (tree exp, rtx target)
da9e9f08 6475{
db3927fb 6476 location_t loc = EXPR_LOCATION (exp);
5039610b 6477
b5338fb3
MS
6478 if (!validate_arglist (exp,
6479 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6480 return NULL_RTX;
6481 tree dest = CALL_EXPR_ARG (exp, 0);
6482 tree src = CALL_EXPR_ARG (exp, 1);
6483 /* The number of bytes to write (not the maximum). */
6484 tree len = CALL_EXPR_ARG (exp, 2);
57814e5e 6485
b5338fb3
MS
6486 /* The length of the source sequence. */
6487 tree slen = c_strlen (src, 1);
ee92e7ba 6488
b5338fb3
MS
6489 if (warn_stringop_overflow)
6490 {
d14c547a
MS
6491 access_data data (exp, access_read_write, len, true, len, true);
6492 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6493 compute_objsize (src, ost, &data.src);
6494 tree dstsize = compute_objsize (dest, ost, &data.dst);
b5338fb3
MS
6495 /* The number of bytes to write is LEN but check_access will also
6496 check SLEN if LEN's value isn't known. */
d14c547a
MS
6497 check_access (exp, /*dstwrite=*/len,
6498 /*maxread=*/len, src, dstsize, data.mode, &data);
b5338fb3 6499 }
da9e9f08 6500
b5338fb3
MS
6501 /* We must be passed a constant len and src parameter. */
6502 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
6503 return NULL_RTX;
57814e5e 6504
b5338fb3
MS
6505 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
6506
6507 /* We're required to pad with trailing zeros if the requested
6508 len is greater than strlen(s2)+1. In that case try to
6509 use store_by_pieces, if it fails, punt. */
6510 if (tree_int_cst_lt (slen, len))
6511 {
6512 unsigned int dest_align = get_pointer_alignment (dest);
6513 const char *p = c_getstr (src);
6514 rtx dest_mem;
6515
6516 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
6517 || !can_store_by_pieces (tree_to_uhwi (len),
6518 builtin_strncpy_read_str,
6519 CONST_CAST (char *, p),
6520 dest_align, false))
6521 return NULL_RTX;
6522
6523 dest_mem = get_memory_rtx (dest, len);
6524 store_by_pieces (dest_mem, tree_to_uhwi (len),
6525 builtin_strncpy_read_str,
6526 CONST_CAST (char *, p), dest_align, false,
6527 RETURN_BEGIN);
6528 dest_mem = force_operand (XEXP (dest_mem, 0), target);
6529 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6530 return dest_mem;
da9e9f08 6531 }
b5338fb3 6532
5039610b 6533 return NULL_RTX;
da9e9f08
KG
6534}
6535
ab937357
JJ
6536/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6537 bytes from constant string DATA + OFFSET and return it as target
6538 constant. */
6539
34d85166 6540rtx
4682ae04 6541builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 6542 scalar_int_mode mode)
ab937357
JJ
6543{
6544 const char *c = (const char *) data;
f883e0a7 6545 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ab937357
JJ
6546
6547 memset (p, *c, GET_MODE_SIZE (mode));
6548
6549 return c_readstr (p, mode);
6550}
6551
1a887f86
RS
6552/* Callback routine for store_by_pieces. Return the RTL of a register
6553 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
6554 char value given in the RTL register data. For example, if mode is
6555 4 bytes wide, return the RTL for 0x01010101*data. */
6556
6557static rtx
4682ae04 6558builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
095a2d76 6559 scalar_int_mode mode)
1a887f86
RS
6560{
6561 rtx target, coeff;
6562 size_t size;
6563 char *p;
6564
6565 size = GET_MODE_SIZE (mode);
5ab2f7b7
KH
6566 if (size == 1)
6567 return (rtx) data;
1a887f86 6568
f883e0a7 6569 p = XALLOCAVEC (char, size);
1a887f86
RS
6570 memset (p, 1, size);
6571 coeff = c_readstr (p, mode);
6572
5ab2f7b7 6573 target = convert_to_mode (mode, (rtx) data, 1);
1a887f86
RS
6574 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
6575 return force_reg (mode, target);
6576}
6577
b8698a0f
L
6578/* Expand expression EXP, which is a call to the memset builtin. Return
6579 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 6580 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 6581 convenient). */
fed3cef0 6582
28f4ec01 6583static rtx
ef4bddc2 6584expand_builtin_memset (tree exp, rtx target, machine_mode mode)
28f4ec01 6585{
5039610b
SL
6586 if (!validate_arglist (exp,
6587 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
6588 return NULL_RTX;
ee92e7ba
MS
6589
6590 tree dest = CALL_EXPR_ARG (exp, 0);
6591 tree val = CALL_EXPR_ARG (exp, 1);
6592 tree len = CALL_EXPR_ARG (exp, 2);
6593
cc8bea0a 6594 check_memop_access (exp, dest, NULL_TREE, len);
ee92e7ba
MS
6595
6596 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
5039610b 6597}
28f4ec01 6598
5039610b
SL
6599/* Helper function to do the actual work for expand_builtin_memset. The
6600 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
6601 so that this can also be called without constructing an actual CALL_EXPR.
6602 The other arguments and return value are the same as for
6603 expand_builtin_memset. */
880864cf 6604
5039610b
SL
6605static rtx
6606expand_builtin_memset_args (tree dest, tree val, tree len,
ef4bddc2 6607 rtx target, machine_mode mode, tree orig_exp)
5039610b
SL
6608{
6609 tree fndecl, fn;
6610 enum built_in_function fcode;
ef4bddc2 6611 machine_mode val_mode;
5039610b
SL
6612 char c;
6613 unsigned int dest_align;
6614 rtx dest_mem, dest_addr, len_rtx;
6615 HOST_WIDE_INT expected_size = -1;
6616 unsigned int expected_align = 0;
3918b108
JH
6617 unsigned HOST_WIDE_INT min_size;
6618 unsigned HOST_WIDE_INT max_size;
82bb7d4e 6619 unsigned HOST_WIDE_INT probable_max_size;
28f4ec01 6620
0eb77834 6621 dest_align = get_pointer_alignment (dest);
079a182e 6622
5039610b
SL
6623 /* If DEST is not a pointer type, don't do this operation in-line. */
6624 if (dest_align == 0)
6625 return NULL_RTX;
c2bd38e8 6626
a5883ba0
MM
6627 if (currently_expanding_gimple_stmt)
6628 stringop_block_profile (currently_expanding_gimple_stmt,
6629 &expected_align, &expected_size);
726a989a 6630
5039610b
SL
6631 if (expected_align < dest_align)
6632 expected_align = dest_align;
880864cf 6633
5039610b
SL
6634 /* If the LEN parameter is zero, return DEST. */
6635 if (integer_zerop (len))
6636 {
6637 /* Evaluate and ignore VAL in case it has side-effects. */
6638 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
6639 return expand_expr (dest, target, mode, EXPAND_NORMAL);
6640 }
57e84f18 6641
5039610b
SL
6642 /* Stabilize the arguments in case we fail. */
6643 dest = builtin_save_expr (dest);
6644 val = builtin_save_expr (val);
6645 len = builtin_save_expr (len);
1a887f86 6646
5039610b 6647 len_rtx = expand_normal (len);
82bb7d4e
JH
6648 determine_block_size (len, len_rtx, &min_size, &max_size,
6649 &probable_max_size);
5039610b 6650 dest_mem = get_memory_rtx (dest, len);
8a445129 6651 val_mode = TYPE_MODE (unsigned_char_type_node);
1a887f86 6652
5039610b
SL
6653 if (TREE_CODE (val) != INTEGER_CST)
6654 {
6655 rtx val_rtx;
1a887f86 6656
5039610b 6657 val_rtx = expand_normal (val);
8a445129 6658 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
28f4ec01 6659
5039610b
SL
6660 /* Assume that we can memset by pieces if we can store
6661 * the coefficients by pieces (in the required modes).
6662 * We can't pass builtin_memset_gen_str as that emits RTL. */
6663 c = 1;
cc269bb6 6664 if (tree_fits_uhwi_p (len)
ae7e9ddd 6665 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
6666 builtin_memset_read_str, &c, dest_align,
6667 true))
5039610b 6668 {
8a445129 6669 val_rtx = force_reg (val_mode, val_rtx);
ae7e9ddd 6670 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150 6671 builtin_memset_gen_str, val_rtx, dest_align,
2ff5ffb6 6672 true, RETURN_BEGIN);
5039610b
SL
6673 }
6674 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
6675 dest_align, expected_align,
82bb7d4e
JH
6676 expected_size, min_size, max_size,
6677 probable_max_size))
880864cf 6678 goto do_libcall;
b8698a0f 6679
5039610b
SL
6680 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6681 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6682 return dest_mem;
6683 }
28f4ec01 6684
5039610b
SL
6685 if (target_char_cast (val, &c))
6686 goto do_libcall;
ab937357 6687
5039610b
SL
6688 if (c)
6689 {
cc269bb6 6690 if (tree_fits_uhwi_p (len)
ae7e9ddd 6691 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
6692 builtin_memset_read_str, &c, dest_align,
6693 true))
ae7e9ddd 6694 store_by_pieces (dest_mem, tree_to_uhwi (len),
2ff5ffb6
ML
6695 builtin_memset_read_str, &c, dest_align, true,
6696 RETURN_BEGIN);
8a445129
RS
6697 else if (!set_storage_via_setmem (dest_mem, len_rtx,
6698 gen_int_mode (c, val_mode),
5039610b 6699 dest_align, expected_align,
82bb7d4e
JH
6700 expected_size, min_size, max_size,
6701 probable_max_size))
5039610b 6702 goto do_libcall;
b8698a0f 6703
5039610b
SL
6704 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6705 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6706 return dest_mem;
6707 }
ab937357 6708
5039610b
SL
6709 set_mem_align (dest_mem, dest_align);
6710 dest_addr = clear_storage_hints (dest_mem, len_rtx,
6711 CALL_EXPR_TAILCALL (orig_exp)
6712 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3918b108 6713 expected_align, expected_size,
82bb7d4e
JH
6714 min_size, max_size,
6715 probable_max_size);
28f4ec01 6716
5039610b
SL
6717 if (dest_addr == 0)
6718 {
6719 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6720 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6721 }
28f4ec01 6722
5039610b 6723 return dest_addr;
880864cf 6724
5039610b
SL
6725 do_libcall:
6726 fndecl = get_callee_fndecl (orig_exp);
6727 fcode = DECL_FUNCTION_CODE (fndecl);
31db0fe0 6728 if (fcode == BUILT_IN_MEMSET)
aa493694
JJ
6729 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
6730 dest, val, len);
5039610b 6731 else if (fcode == BUILT_IN_BZERO)
aa493694
JJ
6732 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
6733 dest, len);
5039610b
SL
6734 else
6735 gcc_unreachable ();
44e10129
MM
6736 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
6737 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5039610b 6738 return expand_call (fn, target, target == const0_rtx);
28f4ec01
BS
6739}
6740
b8698a0f 6741/* Expand expression EXP, which is a call to the bzero builtin. Return
5039610b 6742 NULL_RTX if we failed the caller should emit a normal call. */
5197bd50 6743
e3a709be 6744static rtx
8148fe65 6745expand_builtin_bzero (tree exp)
e3a709be 6746{
5039610b 6747 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3477addf 6748 return NULL_RTX;
e3a709be 6749
ee92e7ba
MS
6750 tree dest = CALL_EXPR_ARG (exp, 0);
6751 tree size = CALL_EXPR_ARG (exp, 1);
6752
cc8bea0a 6753 check_memop_access (exp, dest, NULL_TREE, size);
8d51ecf8 6754
3477addf 6755 /* New argument list transforming bzero(ptr x, int y) to
c2bd38e8
RS
6756 memset(ptr x, int 0, size_t y). This is done this way
6757 so that if it isn't expanded inline, we fallback to
6758 calling bzero instead of memset. */
8d51ecf8 6759
ee92e7ba
MS
6760 location_t loc = EXPR_LOCATION (exp);
6761
5039610b 6762 return expand_builtin_memset_args (dest, integer_zero_node,
0d82a1c8
RG
6763 fold_convert_loc (loc,
6764 size_type_node, size),
5039610b 6765 const0_rtx, VOIDmode, exp);
e3a709be
KG
6766}
6767
a666df60
RS
6768/* Try to expand cmpstr operation ICODE with the given operands.
6769 Return the result rtx on success, otherwise return null. */
6770
6771static rtx
6772expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
6773 HOST_WIDE_INT align)
6774{
6775 machine_mode insn_mode = insn_data[icode].operand[0].mode;
6776
6777 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
6778 target = NULL_RTX;
6779
99b1c316 6780 class expand_operand ops[4];
a666df60
RS
6781 create_output_operand (&ops[0], target, insn_mode);
6782 create_fixed_operand (&ops[1], arg1_rtx);
6783 create_fixed_operand (&ops[2], arg2_rtx);
6784 create_integer_operand (&ops[3], align);
6785 if (maybe_expand_insn (icode, 4, ops))
6786 return ops[0].value;
6787 return NULL_RTX;
6788}
6789
2be3b5ce 6790/* Expand expression EXP, which is a call to the memcmp built-in function.
9b0f6f5e 6791 Return NULL_RTX if we failed and the caller should emit a normal call,
36b85e43
BS
6792 otherwise try to get the result in TARGET, if convenient.
6793 RESULT_EQ is true if we can relax the returned value to be either zero
6794 or nonzero, without caring about the sign. */
5197bd50 6795
28f4ec01 6796static rtx
36b85e43 6797expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
28f4ec01 6798{
5039610b
SL
6799 if (!validate_arglist (exp,
6800 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6801 return NULL_RTX;
c2bd38e8 6802
7f9f48be
RS
6803 tree arg1 = CALL_EXPR_ARG (exp, 0);
6804 tree arg2 = CALL_EXPR_ARG (exp, 1);
6805 tree len = CALL_EXPR_ARG (exp, 2);
d9c5a8b9
MS
6806
6807 /* Diagnose calls where the specified length exceeds the size of either
6808 object. */
d14c547a
MS
6809 if (!check_read_access (exp, arg1, len, 0)
6810 || !check_read_access (exp, arg2, len, 0))
b99d7d97
QZ
6811 return NULL_RTX;
6812
10a0e2a9 6813 /* Due to the performance benefit, always inline the calls first
b2272b13
QZ
6814 when result_eq is false. */
6815 rtx result = NULL_RTX;
d14c547a 6816 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
b99d7d97 6817 if (!result_eq && fcode != BUILT_IN_BCMP)
d9c5a8b9 6818 {
d5803b98 6819 result = inline_expand_builtin_bytecmp (exp, target);
b2272b13
QZ
6820 if (result)
6821 return result;
d9c5a8b9
MS
6822 }
6823
36b85e43
BS
6824 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6825 location_t loc = EXPR_LOCATION (exp);
358b8f01 6826
7f9f48be
RS
6827 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6828 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
28f4ec01 6829
7f9f48be
RS
6830 /* If we don't have POINTER_TYPE, call the function. */
6831 if (arg1_align == 0 || arg2_align == 0)
6832 return NULL_RTX;
28f4ec01 6833
7f9f48be
RS
6834 rtx arg1_rtx = get_memory_rtx (arg1, len);
6835 rtx arg2_rtx = get_memory_rtx (arg2, len);
36b85e43 6836 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
28f4ec01 6837
7f9f48be 6838 /* Set MEM_SIZE as appropriate. */
36b85e43 6839 if (CONST_INT_P (len_rtx))
7f9f48be 6840 {
36b85e43
BS
6841 set_mem_size (arg1_rtx, INTVAL (len_rtx));
6842 set_mem_size (arg2_rtx, INTVAL (len_rtx));
7f9f48be 6843 }
6cbaec9e 6844
36b85e43
BS
6845 by_pieces_constfn constfn = NULL;
6846
d5803b98
MS
6847 /* Try to get the byte representation of the constant ARG2 (or, only
6848 when the function's result is used for equality to zero, ARG1)
6849 points to, with its byte size in NBYTES. */
6850 unsigned HOST_WIDE_INT nbytes;
866626ef 6851 const char *rep = getbyterep (arg2, &nbytes);
d5803b98 6852 if (result_eq && rep == NULL)
d0d7f887 6853 {
d5803b98 6854 /* For equality to zero the arguments are interchangeable. */
866626ef 6855 rep = getbyterep (arg1, &nbytes);
d5803b98 6856 if (rep != NULL)
4f353581 6857 std::swap (arg1_rtx, arg2_rtx);
d0d7f887 6858 }
36b85e43 6859
d5803b98
MS
6860 /* If the function's constant bound LEN_RTX is less than or equal
6861 to the byte size of the representation of the constant argument,
6862 and if block move would be done by pieces, we can avoid loading
6863 the bytes from memory and only store the computed constant result. */
6864 if (rep
36b85e43 6865 && CONST_INT_P (len_rtx)
d5803b98 6866 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
36b85e43
BS
6867 constfn = builtin_memcpy_read_str;
6868
b2272b13
QZ
6869 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
6870 TREE_TYPE (len), target,
6871 result_eq, constfn,
d5803b98 6872 CONST_CAST (char *, rep));
36b85e43 6873
7f9f48be
RS
6874 if (result)
6875 {
6876 /* Return the value in the proper mode for this function. */
6877 if (GET_MODE (result) == mode)
6878 return result;
6cbaec9e 6879
7f9f48be
RS
6880 if (target != 0)
6881 {
6882 convert_move (target, result, 0);
6883 return target;
6884 }
8878e913 6885
28f4ec01 6886 return convert_to_mode (mode, result, 0);
7f9f48be 6887 }
28f4ec01 6888
ee516de9 6889 return NULL_RTX;
c2bd38e8
RS
6890}
6891
5039610b 6892/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
28f4ec01
BS
6893 if we failed the caller should emit a normal call, otherwise try to get
6894 the result in TARGET, if convenient. */
fed3cef0 6895
28f4ec01 6896static rtx
44e10129 6897expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
28f4ec01 6898{
5039610b
SL
6899 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6900 return NULL_RTX;
8d51ecf8 6901
b5338fb3
MS
6902 tree arg1 = CALL_EXPR_ARG (exp, 0);
6903 tree arg2 = CALL_EXPR_ARG (exp, 1);
6904
d14c547a
MS
6905 if (!check_read_access (exp, arg1)
6906 || !check_read_access (exp, arg2))
b5338fb3
MS
6907 return NULL_RTX;
6908
b2272b13
QZ
6909 /* Due to the performance benefit, always inline the calls first. */
6910 rtx result = NULL_RTX;
d5803b98 6911 result = inline_expand_builtin_bytecmp (exp, target);
b2272b13
QZ
6912 if (result)
6913 return result;
6914
a666df60
RS
6915 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
6916 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
6917 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
6918 return NULL_RTX;
c22cacf3 6919
16155777
MS
6920 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
6921 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
2be3b5ce 6922
16155777
MS
6923 /* If we don't have POINTER_TYPE, call the function. */
6924 if (arg1_align == 0 || arg2_align == 0)
6925 return NULL_RTX;
2be3b5ce 6926
16155777
MS
6927 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
6928 arg1 = builtin_save_expr (arg1);
6929 arg2 = builtin_save_expr (arg2);
28f4ec01 6930
16155777
MS
6931 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
6932 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
a666df60 6933
16155777
MS
6934 /* Try to call cmpstrsi. */
6935 if (cmpstr_icode != CODE_FOR_nothing)
6936 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
6937 MIN (arg1_align, arg2_align));
40c1d5f8 6938
16155777
MS
6939 /* Try to determine at least one length and call cmpstrnsi. */
6940 if (!result && cmpstrn_icode != CODE_FOR_nothing)
6941 {
6942 tree len;
6943 rtx arg3_rtx;
6944
6945 tree len1 = c_strlen (arg1, 1);
6946 tree len2 = c_strlen (arg2, 1);
6947
6948 if (len1)
6949 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
6950 if (len2)
6951 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
6952
6953 /* If we don't have a constant length for the first, use the length
6954 of the second, if we know it. We don't require a constant for
6955 this case; some cost analysis could be done if both are available
6956 but neither is constant. For now, assume they're equally cheap,
6957 unless one has side effects. If both strings have constant lengths,
6958 use the smaller. */
6959
6960 if (!len1)
6961 len = len2;
6962 else if (!len2)
6963 len = len1;
6964 else if (TREE_SIDE_EFFECTS (len1))
6965 len = len2;
6966 else if (TREE_SIDE_EFFECTS (len2))
6967 len = len1;
6968 else if (TREE_CODE (len1) != INTEGER_CST)
6969 len = len2;
6970 else if (TREE_CODE (len2) != INTEGER_CST)
6971 len = len1;
6972 else if (tree_int_cst_lt (len1, len2))
6973 len = len1;
6974 else
6975 len = len2;
c43fa1f5 6976
16155777
MS
6977 /* If both arguments have side effects, we cannot optimize. */
6978 if (len && !TREE_SIDE_EFFECTS (len))
40c1d5f8 6979 {
16155777
MS
6980 arg3_rtx = expand_normal (len);
6981 result = expand_cmpstrn_or_cmpmem
6982 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
6983 arg3_rtx, MIN (arg1_align, arg2_align));
40c1d5f8 6984 }
16155777
MS
6985 }
6986
16155777 6987 tree fndecl = get_callee_fndecl (exp);
16155777
MS
6988 if (result)
6989 {
36537a1c
MS
6990 /* Check to see if the argument was declared attribute nonstring
6991 and if so, issue a warning since at this point it's not known
6992 to be nul-terminated. */
6993 maybe_warn_nonstring_arg (fndecl, exp);
6994
16155777
MS
6995 /* Return the value in the proper mode for this function. */
6996 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6997 if (GET_MODE (result) == mode)
6998 return result;
6999 if (target == 0)
7000 return convert_to_mode (mode, result, 0);
7001 convert_move (target, result, 0);
7002 return target;
40c1d5f8 7003 }
16155777
MS
7004
7005 /* Expand the library call ourselves using a stabilized argument
7006 list to avoid re-evaluating the function's arguments twice. */
7007 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
7008 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
7009 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
7010 return expand_call (fn, target, target == const0_rtx);
2dee4af1 7011}
28f4ec01 7012
b8698a0f 7013/* Expand expression EXP, which is a call to the strncmp builtin. Return
d14c547a
MS
7014 NULL_RTX if we failed the caller should emit a normal call, otherwise
7015 try to get the result in TARGET, if convenient. */
5197bd50 7016
da9e9f08 7017static rtx
44e10129 7018expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
ef4bddc2 7019 ATTRIBUTE_UNUSED machine_mode mode)
da9e9f08 7020{
5039610b
SL
7021 if (!validate_arglist (exp,
7022 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7023 return NULL_RTX;
da9e9f08 7024
b5338fb3
MS
7025 tree arg1 = CALL_EXPR_ARG (exp, 0);
7026 tree arg2 = CALL_EXPR_ARG (exp, 1);
7027 tree arg3 = CALL_EXPR_ARG (exp, 2);
7028
7029 if (!check_nul_terminated_array (exp, arg1, arg3)
7030 || !check_nul_terminated_array (exp, arg2, arg3))
7031 return NULL_RTX;
7032
fe7f75cf 7033 location_t loc = tree_inlined_location (exp);
d14c547a
MS
7034 tree len1 = c_strlen (arg1, 1);
7035 tree len2 = c_strlen (arg2, 1);
7036
7037 if (!len1 || !len2)
7038 {
7039 /* Check to see if the argument was declared attribute nonstring
7040 and if so, issue a warning since at this point it's not known
7041 to be nul-terminated. */
7042 if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp)
7043 && !len1 && !len2)
7044 {
7045 /* A strncmp read is constrained not just by the bound but
7046 also by the length of the shorter string. Specifying
7047 a bound that's larger than the size of either array makes
7048 no sense and is likely a bug. When the length of neither
7049 of the two strings is known but the sizes of both of
7050 the arrays they are stored in is, issue a warning if
7051 the bound is larger than than the size of the larger
7052 of the two arrays. */
7053
7054 access_ref ref1 (arg3, true);
7055 access_ref ref2 (arg3, true);
7056
7057 tree bndrng[2] = { NULL_TREE, NULL_TREE };
7058 get_size_range (arg3, bndrng, ref1.bndrng);
7059
7060 tree size1 = compute_objsize (arg1, 1, &ref1);
7061 tree size2 = compute_objsize (arg2, 1, &ref2);
7062 tree func = get_callee_fndecl (exp);
7063
83685efd 7064 if (size1 && size2 && bndrng[0] && !integer_zerop (bndrng[0]))
d14c547a 7065 {
83685efd
MS
7066 offset_int rem1 = ref1.size_remaining ();
7067 offset_int rem2 = ref2.size_remaining ();
7068 if (rem1 == 0 || rem2 == 0)
d14c547a 7069 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
83685efd
MS
7070 bndrng, integer_zero_node);
7071 else
7072 {
7073 offset_int maxrem = wi::max (rem1, rem2, UNSIGNED);
7074 if (maxrem < wi::to_offset (bndrng[0]))
7075 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp,
7076 func, bndrng,
7077 wide_int_to_tree (sizetype, maxrem));
7078 }
d14c547a
MS
7079 }
7080 else if (bndrng[0]
7081 && !integer_zerop (bndrng[0])
7082 && ((size1 && integer_zerop (size1))
7083 || (size2 && integer_zerop (size2))))
7084 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7085 bndrng, integer_zero_node);
7086 }
7087 }
7088
b2272b13
QZ
7089 /* Due to the performance benefit, always inline the calls first. */
7090 rtx result = NULL_RTX;
d5803b98 7091 result = inline_expand_builtin_bytecmp (exp, target);
b2272b13
QZ
7092 if (result)
7093 return result;
7094
819c1488 7095 /* If c_strlen can determine an expression for one of the string
40c1d5f8 7096 lengths, and it doesn't have side effects, then emit cmpstrnsi
2be3b5ce 7097 using length MIN(strlen(string)+1, arg3). */
a666df60 7098 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
16155777
MS
7099 if (cmpstrn_icode == CODE_FOR_nothing)
7100 return NULL_RTX;
5197bd50 7101
16155777
MS
7102 tree len;
7103
16155777
MS
7104 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7105 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7106
16155777
MS
7107 if (len1)
7108 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7109 if (len2)
7110 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7111
7112 tree len3 = fold_convert_loc (loc, sizetype, arg3);
7113
7114 /* If we don't have a constant length for the first, use the length
7115 of the second, if we know it. If neither string is constant length,
7116 use the given length argument. We don't require a constant for
7117 this case; some cost analysis could be done if both are available
7118 but neither is constant. For now, assume they're equally cheap,
7119 unless one has side effects. If both strings have constant lengths,
7120 use the smaller. */
7121
7122 if (!len1 && !len2)
7123 len = len3;
7124 else if (!len1)
7125 len = len2;
7126 else if (!len2)
7127 len = len1;
7128 else if (TREE_SIDE_EFFECTS (len1))
7129 len = len2;
7130 else if (TREE_SIDE_EFFECTS (len2))
7131 len = len1;
7132 else if (TREE_CODE (len1) != INTEGER_CST)
7133 len = len2;
7134 else if (TREE_CODE (len2) != INTEGER_CST)
7135 len = len1;
7136 else if (tree_int_cst_lt (len1, len2))
7137 len = len1;
7138 else
7139 len = len2;
7140
7141 /* If we are not using the given length, we must incorporate it here.
7142 The actual new length parameter will be MIN(len,arg3) in this case. */
7143 if (len != len3)
75e96bc8
MS
7144 {
7145 len = fold_convert_loc (loc, sizetype, len);
7146 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
7147 }
16155777
MS
7148 rtx arg1_rtx = get_memory_rtx (arg1, len);
7149 rtx arg2_rtx = get_memory_rtx (arg2, len);
7150 rtx arg3_rtx = expand_normal (len);
b2272b13
QZ
7151 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
7152 arg2_rtx, TREE_TYPE (len), arg3_rtx,
7153 MIN (arg1_align, arg2_align));
16155777 7154
16155777 7155 tree fndecl = get_callee_fndecl (exp);
16155777
MS
7156 if (result)
7157 {
7158 /* Return the value in the proper mode for this function. */
7159 mode = TYPE_MODE (TREE_TYPE (exp));
7160 if (GET_MODE (result) == mode)
7161 return result;
7162 if (target == 0)
7163 return convert_to_mode (mode, result, 0);
7164 convert_move (target, result, 0);
7165 return target;
7166 }
7167
7168 /* Expand the library call ourselves using a stabilized argument
7169 list to avoid re-evaluating the function's arguments twice. */
d14c547a
MS
7170 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
7171 if (TREE_NO_WARNING (exp))
7172 TREE_NO_WARNING (call) = true;
7173 gcc_assert (TREE_CODE (call) == CALL_EXPR);
7174 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
7175 return expand_call (call, target, target == const0_rtx);
d118937d
KG
7176}
7177
d3707adb
RH
7178/* Expand a call to __builtin_saveregs, generating the result in TARGET,
7179 if that's convenient. */
fed3cef0 7180
d3707adb 7181rtx
4682ae04 7182expand_builtin_saveregs (void)
28f4ec01 7183{
58f4cf2a
DM
7184 rtx val;
7185 rtx_insn *seq;
28f4ec01
BS
7186
7187 /* Don't do __builtin_saveregs more than once in a function.
7188 Save the result of the first call and reuse it. */
7189 if (saveregs_value != 0)
7190 return saveregs_value;
28f4ec01 7191
d3707adb
RH
7192 /* When this function is called, it means that registers must be
7193 saved on entry to this function. So we migrate the call to the
7194 first insn of this function. */
7195
7196 start_sequence ();
28f4ec01 7197
d3707adb 7198 /* Do whatever the machine needs done in this case. */
61f71b34 7199 val = targetm.calls.expand_builtin_saveregs ();
28f4ec01 7200
d3707adb
RH
7201 seq = get_insns ();
7202 end_sequence ();
28f4ec01 7203
d3707adb 7204 saveregs_value = val;
28f4ec01 7205
2f937369
DM
7206 /* Put the insns after the NOTE that starts the function. If this
7207 is inside a start_sequence, make the outer-level insn chain current, so
d3707adb
RH
7208 the code is placed at the start of the function. */
7209 push_topmost_sequence ();
242229bb 7210 emit_insn_after (seq, entry_of_function ());
d3707adb
RH
7211 pop_topmost_sequence ();
7212
7213 return val;
28f4ec01
BS
7214}
7215
8870e212 7216/* Expand a call to __builtin_next_arg. */
5197bd50 7217
28f4ec01 7218static rtx
8870e212 7219expand_builtin_next_arg (void)
28f4ec01 7220{
8870e212
JJ
7221 /* Checking arguments is already done in fold_builtin_next_arg
7222 that must be called before this function. */
4319e38c 7223 return expand_binop (ptr_mode, add_optab,
38173d38
JH
7224 crtl->args.internal_arg_pointer,
7225 crtl->args.arg_offset_rtx,
28f4ec01
BS
7226 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7227}
7228
d3707adb
RH
7229/* Make it easier for the backends by protecting the valist argument
7230 from multiple evaluations. */
7231
7232static tree
db3927fb 7233stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
d3707adb 7234{
35cbb299
KT
7235 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
7236
70f34814
RG
7237 /* The current way of determining the type of valist is completely
7238 bogus. We should have the information on the va builtin instead. */
7239 if (!vatype)
7240 vatype = targetm.fn_abi_va_list (cfun->decl);
35cbb299
KT
7241
7242 if (TREE_CODE (vatype) == ARRAY_TYPE)
d3707adb 7243 {
9f720c3e
GK
7244 if (TREE_SIDE_EFFECTS (valist))
7245 valist = save_expr (valist);
8ebecc3b 7246
9f720c3e 7247 /* For this case, the backends will be expecting a pointer to
35cbb299
KT
7248 vatype, but it's possible we've actually been given an array
7249 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
9f720c3e
GK
7250 So fix it. */
7251 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
daf68dd7 7252 {
35cbb299 7253 tree p1 = build_pointer_type (TREE_TYPE (vatype));
db3927fb 7254 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
daf68dd7 7255 }
d3707adb 7256 }
8ebecc3b 7257 else
d3707adb 7258 {
70f34814 7259 tree pt = build_pointer_type (vatype);
8ebecc3b 7260
9f720c3e
GK
7261 if (! needs_lvalue)
7262 {
8ebecc3b
RH
7263 if (! TREE_SIDE_EFFECTS (valist))
7264 return valist;
8d51ecf8 7265
db3927fb 7266 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
d3707adb 7267 TREE_SIDE_EFFECTS (valist) = 1;
d3707adb 7268 }
9f720c3e 7269
8ebecc3b 7270 if (TREE_SIDE_EFFECTS (valist))
9f720c3e 7271 valist = save_expr (valist);
70f34814
RG
7272 valist = fold_build2_loc (loc, MEM_REF,
7273 vatype, valist, build_int_cst (pt, 0));
d3707adb
RH
7274 }
7275
7276 return valist;
7277}
7278
c35d187f
RH
7279/* The "standard" definition of va_list is void*. */
7280
7281tree
7282std_build_builtin_va_list (void)
7283{
7284 return ptr_type_node;
7285}
7286
35cbb299
KT
7287/* The "standard" abi va_list is va_list_type_node. */
7288
7289tree
7290std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
7291{
7292 return va_list_type_node;
7293}
7294
7295/* The "standard" type of va_list is va_list_type_node. */
7296
7297tree
7298std_canonical_va_list_type (tree type)
7299{
7300 tree wtype, htype;
7301
35cbb299
KT
7302 wtype = va_list_type_node;
7303 htype = type;
431e31a9
TV
7304
7305 if (TREE_CODE (wtype) == ARRAY_TYPE)
35cbb299
KT
7306 {
7307 /* If va_list is an array type, the argument may have decayed
7308 to a pointer type, e.g. by being passed to another function.
7309 In that case, unwrap both types so that we can compare the
7310 underlying records. */
7311 if (TREE_CODE (htype) == ARRAY_TYPE
7312 || POINTER_TYPE_P (htype))
7313 {
7314 wtype = TREE_TYPE (wtype);
7315 htype = TREE_TYPE (htype);
7316 }
7317 }
7318 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
7319 return va_list_type_node;
7320
7321 return NULL_TREE;
7322}
7323
d3707adb
RH
7324/* The "standard" implementation of va_start: just assign `nextarg' to
7325 the variable. */
5197bd50 7326
d3707adb 7327void
4682ae04 7328std_expand_builtin_va_start (tree valist, rtx nextarg)
d3707adb 7329{
508dabda
ILT
7330 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
7331 convert_move (va_r, nextarg, 0);
d3707adb
RH
7332}
7333
5039610b 7334/* Expand EXP, a call to __builtin_va_start. */
5197bd50 7335
d3707adb 7336static rtx
5039610b 7337expand_builtin_va_start (tree exp)
d3707adb
RH
7338{
7339 rtx nextarg;
5039610b 7340 tree valist;
db3927fb 7341 location_t loc = EXPR_LOCATION (exp);
d3707adb 7342
5039610b 7343 if (call_expr_nargs (exp) < 2)
c69c9b36 7344 {
db3927fb 7345 error_at (loc, "too few arguments to function %<va_start%>");
c69c9b36
JM
7346 return const0_rtx;
7347 }
d3707adb 7348
5039610b 7349 if (fold_builtin_next_arg (exp, true))
8870e212 7350 return const0_rtx;
d3147f64 7351
8870e212 7352 nextarg = expand_builtin_next_arg ();
db3927fb 7353 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
d3707adb 7354
d7bd8aeb
JJ
7355 if (targetm.expand_builtin_va_start)
7356 targetm.expand_builtin_va_start (valist, nextarg);
7357 else
7358 std_expand_builtin_va_start (valist, nextarg);
d3707adb
RH
7359
7360 return const0_rtx;
7361}
7362
5039610b 7363/* Expand EXP, a call to __builtin_va_end. */
3bdf5ad1 7364
d3707adb 7365static rtx
5039610b 7366expand_builtin_va_end (tree exp)
d3707adb 7367{
5039610b 7368 tree valist = CALL_EXPR_ARG (exp, 0);
daf68dd7 7369
daf68dd7
RH
7370 /* Evaluate for side effects, if needed. I hate macros that don't
7371 do that. */
7372 if (TREE_SIDE_EFFECTS (valist))
7373 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
d3707adb
RH
7374
7375 return const0_rtx;
7376}
7377
5039610b 7378/* Expand EXP, a call to __builtin_va_copy. We do this as a
d3707adb
RH
7379 builtin rather than just as an assignment in stdarg.h because of the
7380 nastiness of array-type va_list types. */
3bdf5ad1 7381
d3707adb 7382static rtx
5039610b 7383expand_builtin_va_copy (tree exp)
d3707adb
RH
7384{
7385 tree dst, src, t;
db3927fb 7386 location_t loc = EXPR_LOCATION (exp);
d3707adb 7387
5039610b
SL
7388 dst = CALL_EXPR_ARG (exp, 0);
7389 src = CALL_EXPR_ARG (exp, 1);
d3707adb 7390
db3927fb
AH
7391 dst = stabilize_va_list_loc (loc, dst, 1);
7392 src = stabilize_va_list_loc (loc, src, 0);
d3707adb 7393
35cbb299
KT
7394 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
7395
7396 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
d3707adb 7397 {
35cbb299 7398 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
d3707adb
RH
7399 TREE_SIDE_EFFECTS (t) = 1;
7400 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7401 }
7402 else
7403 {
8ebecc3b
RH
7404 rtx dstb, srcb, size;
7405
7406 /* Evaluate to pointers. */
7407 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
7408 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
35cbb299
KT
7409 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
7410 NULL_RTX, VOIDmode, EXPAND_NORMAL);
8ebecc3b 7411
5ae6cd0d
MM
7412 dstb = convert_memory_address (Pmode, dstb);
7413 srcb = convert_memory_address (Pmode, srcb);
ce2d32cd 7414
8ebecc3b
RH
7415 /* "Dereference" to BLKmode memories. */
7416 dstb = gen_rtx_MEM (BLKmode, dstb);
ba4828e0 7417 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
35cbb299 7418 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b 7419 srcb = gen_rtx_MEM (BLKmode, srcb);
ba4828e0 7420 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
35cbb299 7421 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b
RH
7422
7423 /* Copy. */
44bb111a 7424 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
d3707adb
RH
7425 }
7426
7427 return const0_rtx;
7428}
7429
28f4ec01
BS
7430/* Expand a call to one of the builtin functions __builtin_frame_address or
7431 __builtin_return_address. */
5197bd50 7432
28f4ec01 7433static rtx
5039610b 7434expand_builtin_frame_address (tree fndecl, tree exp)
28f4ec01 7435{
28f4ec01
BS
7436 /* The argument must be a nonnegative integer constant.
7437 It counts the number of frames to scan up the stack.
8423e57c
MS
7438 The value is either the frame pointer value or the return
7439 address saved in that frame. */
5039610b 7440 if (call_expr_nargs (exp) == 0)
28f4ec01
BS
7441 /* Warning about missing arg was already issued. */
7442 return const0_rtx;
cc269bb6 7443 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
28f4ec01 7444 {
8423e57c 7445 error ("invalid argument to %qD", fndecl);
28f4ec01
BS
7446 return const0_rtx;
7447 }
7448 else
7449 {
8423e57c
MS
7450 /* Number of frames to scan up the stack. */
7451 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
7452
7453 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
28f4ec01
BS
7454
7455 /* Some ports cannot access arbitrary stack frames. */
7456 if (tem == NULL)
7457 {
8423e57c 7458 warning (0, "unsupported argument to %qD", fndecl);
28f4ec01
BS
7459 return const0_rtx;
7460 }
7461
8423e57c
MS
7462 if (count)
7463 {
7464 /* Warn since no effort is made to ensure that any frame
7465 beyond the current one exists or can be safely reached. */
7466 warning (OPT_Wframe_address, "calling %qD with "
7467 "a nonzero argument is unsafe", fndecl);
7468 }
7469
28f4ec01
BS
7470 /* For __builtin_frame_address, return what we've got. */
7471 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7472 return tem;
7473
f8cfc6aa 7474 if (!REG_P (tem)
28f4ec01 7475 && ! CONSTANT_P (tem))
18ae1560 7476 tem = copy_addr_to_reg (tem);
28f4ec01
BS
7477 return tem;
7478 }
7479}
7480
d3c12306 7481/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
b7e52782 7482 failed and the caller should emit a normal call. */
d5457140 7483
28f4ec01 7484static rtx
b7e52782 7485expand_builtin_alloca (tree exp)
28f4ec01
BS
7486{
7487 rtx op0;
d5457140 7488 rtx result;
13e49da9 7489 unsigned int align;
8bd9f164 7490 tree fndecl = get_callee_fndecl (exp);
9e878cf1
EB
7491 HOST_WIDE_INT max_size;
7492 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
b7e52782 7493 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
8bd9f164 7494 bool valid_arglist
9e878cf1
EB
7495 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7496 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
7497 VOID_TYPE)
7498 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
7499 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
7500 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
13e49da9
TV
7501
7502 if (!valid_arglist)
5039610b 7503 return NULL_RTX;
28f4ec01 7504
00abf86c
MS
7505 if ((alloca_for_var
7506 && warn_vla_limit >= HOST_WIDE_INT_MAX
7507 && warn_alloc_size_limit < warn_vla_limit)
7508 || (!alloca_for_var
7509 && warn_alloca_limit >= HOST_WIDE_INT_MAX
7510 && warn_alloc_size_limit < warn_alloca_limit
7511 ))
8bd9f164 7512 {
00abf86c
MS
7513 /* -Walloca-larger-than and -Wvla-larger-than settings of
7514 less than HOST_WIDE_INT_MAX override the more general
7515 -Walloc-size-larger-than so unless either of the former
7516 options is smaller than the last one (wchich would imply
7517 that the call was already checked), check the alloca
7518 arguments for overflow. */
8bd9f164
MS
7519 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
7520 int idx[] = { 0, -1 };
7521 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
7522 }
7523
28f4ec01 7524 /* Compute the argument. */
5039610b 7525 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
28f4ec01 7526
13e49da9 7527 /* Compute the alignment. */
9e878cf1
EB
7528 align = (fcode == BUILT_IN_ALLOCA
7529 ? BIGGEST_ALIGNMENT
7530 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
7531
7532 /* Compute the maximum size. */
7533 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7534 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
7535 : -1);
13e49da9 7536
b7e52782
EB
7537 /* Allocate the desired space. If the allocation stems from the declaration
7538 of a variable-sized object, it cannot accumulate. */
9e878cf1
EB
7539 result
7540 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5ae6cd0d 7541 result = convert_memory_address (ptr_mode, result);
d5457140 7542
3cf3da88
EB
7543 /* Dynamic allocations for variables are recorded during gimplification. */
7544 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
7545 record_dynamic_alloc (exp);
7546
d5457140 7547 return result;
28f4ec01
BS
7548}
7549
7504c3bf
JJ
7550/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
7551 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
7552 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
7553 handle_builtin_stack_restore function. */
e3174bdf
MO
7554
7555static rtx
7556expand_asan_emit_allocas_unpoison (tree exp)
7557{
7558 tree arg0 = CALL_EXPR_ARG (exp, 0);
7504c3bf 7559 tree arg1 = CALL_EXPR_ARG (exp, 1);
8f4956ca 7560 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7504c3bf
JJ
7561 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7562 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
7563 stack_pointer_rtx, NULL_RTX, 0,
7564 OPTAB_LIB_WIDEN);
7565 off = convert_modes (ptr_mode, Pmode, off, 0);
7566 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
7567 OPTAB_LIB_WIDEN);
e3174bdf 7568 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
db69559b
RS
7569 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
7570 top, ptr_mode, bot, ptr_mode);
e3174bdf
MO
7571 return ret;
7572}
7573
ac868f29
EB
7574/* Expand a call to bswap builtin in EXP.
7575 Return NULL_RTX if a normal call should be emitted rather than expanding the
7576 function in-line. If convenient, the result should be placed in TARGET.
7577 SUBTARGET may be used as the target for computing one of EXP's operands. */
167fa32c
EC
7578
7579static rtx
ef4bddc2 7580expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
ac868f29 7581 rtx subtarget)
167fa32c 7582{
167fa32c
EC
7583 tree arg;
7584 rtx op0;
7585
5039610b
SL
7586 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7587 return NULL_RTX;
167fa32c 7588
5039610b 7589 arg = CALL_EXPR_ARG (exp, 0);
ac868f29
EB
7590 op0 = expand_expr (arg,
7591 subtarget && GET_MODE (subtarget) == target_mode
7592 ? subtarget : NULL_RTX,
7593 target_mode, EXPAND_NORMAL);
7594 if (GET_MODE (op0) != target_mode)
7595 op0 = convert_to_mode (target_mode, op0, 1);
167fa32c 7596
ac868f29 7597 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
167fa32c
EC
7598
7599 gcc_assert (target);
7600
ac868f29 7601 return convert_to_mode (target_mode, target, 1);
167fa32c
EC
7602}
7603
5039610b
SL
7604/* Expand a call to a unary builtin in EXP.
7605 Return NULL_RTX if a normal call should be emitted rather than expanding the
28f4ec01
BS
7606 function in-line. If convenient, the result should be placed in TARGET.
7607 SUBTARGET may be used as the target for computing one of EXP's operands. */
d5457140 7608
28f4ec01 7609static rtx
ef4bddc2 7610expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4682ae04 7611 rtx subtarget, optab op_optab)
28f4ec01
BS
7612{
7613 rtx op0;
5039610b
SL
7614
7615 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7616 return NULL_RTX;
28f4ec01
BS
7617
7618 /* Compute the argument. */
4359dc2a
JJ
7619 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
7620 (subtarget
7621 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
7622 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
49452c07 7623 VOIDmode, EXPAND_NORMAL);
2928cd7a 7624 /* Compute op, into TARGET if possible.
28f4ec01 7625 Set TARGET to wherever the result comes back. */
5039610b 7626 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
146aef0b 7627 op_optab, op0, target, op_optab != clrsb_optab);
298e6adc 7628 gcc_assert (target);
5906d013 7629
6c537d03 7630 return convert_to_mode (target_mode, target, 0);
28f4ec01 7631}
994a57cd 7632
b8698a0f 7633/* Expand a call to __builtin_expect. We just return our argument
ef950eba
JH
7634 as the builtin_expect semantic should've been already executed by
7635 tree branch prediction pass. */
994a57cd
RH
7636
7637static rtx
5039610b 7638expand_builtin_expect (tree exp, rtx target)
994a57cd 7639{
451409e4 7640 tree arg;
994a57cd 7641
5039610b 7642 if (call_expr_nargs (exp) < 2)
994a57cd 7643 return const0_rtx;
5039610b 7644 arg = CALL_EXPR_ARG (exp, 0);
994a57cd 7645
5039610b 7646 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
ef950eba 7647 /* When guessing was done, the hints should be already stripped away. */
1d8381f1 7648 gcc_assert (!flag_guess_branch_prob
1da2ed5f 7649 || optimize == 0 || seen_error ());
994a57cd
RH
7650 return target;
7651}
5f2d6cfa 7652
1e9168b2
ML
7653/* Expand a call to __builtin_expect_with_probability. We just return our
7654 argument as the builtin_expect semantic should've been already executed by
7655 tree branch prediction pass. */
7656
7657static rtx
7658expand_builtin_expect_with_probability (tree exp, rtx target)
7659{
7660 tree arg;
7661
7662 if (call_expr_nargs (exp) < 3)
7663 return const0_rtx;
7664 arg = CALL_EXPR_ARG (exp, 0);
7665
7666 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7667 /* When guessing was done, the hints should be already stripped away. */
7668 gcc_assert (!flag_guess_branch_prob
7669 || optimize == 0 || seen_error ());
7670 return target;
7671}
7672
7673
45d439ac
JJ
7674/* Expand a call to __builtin_assume_aligned. We just return our first
7675 argument as the builtin_assume_aligned semantic should've been already
7676 executed by CCP. */
7677
7678static rtx
7679expand_builtin_assume_aligned (tree exp, rtx target)
7680{
7681 if (call_expr_nargs (exp) < 2)
7682 return const0_rtx;
7683 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
7684 EXPAND_NORMAL);
7685 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
7686 && (call_expr_nargs (exp) < 3
7687 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
7688 return target;
7689}
7690
1e188d1e 7691void
4682ae04 7692expand_builtin_trap (void)
9602f5a0 7693{
eb6f47fb 7694 if (targetm.have_trap ())
206604dc 7695 {
eb6f47fb 7696 rtx_insn *insn = emit_insn (targetm.gen_trap ());
206604dc
JJ
7697 /* For trap insns when not accumulating outgoing args force
7698 REG_ARGS_SIZE note to prevent crossjumping of calls with
7699 different args sizes. */
7700 if (!ACCUMULATE_OUTGOING_ARGS)
68184180 7701 add_args_size_note (insn, stack_pointer_delta);
206604dc 7702 }
9602f5a0 7703 else
ee516de9
EB
7704 {
7705 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
7706 tree call_expr = build_call_expr (fn, 0);
7707 expand_call (call_expr, NULL_RTX, false);
7708 }
7709
9602f5a0
RH
7710 emit_barrier ();
7711}
075ec276 7712
468059bc
DD
7713/* Expand a call to __builtin_unreachable. We do nothing except emit
7714 a barrier saying that control flow will not pass here.
7715
7716 It is the responsibility of the program being compiled to ensure
7717 that control flow does never reach __builtin_unreachable. */
7718static void
7719expand_builtin_unreachable (void)
7720{
7721 emit_barrier ();
7722}
7723
5039610b
SL
7724/* Expand EXP, a call to fabs, fabsf or fabsl.
7725 Return NULL_RTX if a normal call should be emitted rather than expanding
075ec276
RS
7726 the function inline. If convenient, the result should be placed
7727 in TARGET. SUBTARGET may be used as the target for computing
7728 the operand. */
7729
7730static rtx
5039610b 7731expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
075ec276 7732{
ef4bddc2 7733 machine_mode mode;
075ec276
RS
7734 tree arg;
7735 rtx op0;
7736
5039610b
SL
7737 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
7738 return NULL_RTX;
075ec276 7739
5039610b 7740 arg = CALL_EXPR_ARG (exp, 0);
4cd8e76f 7741 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
075ec276 7742 mode = TYPE_MODE (TREE_TYPE (arg));
49452c07 7743 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
075ec276
RS
7744 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
7745}
7746
5039610b 7747/* Expand EXP, a call to copysign, copysignf, or copysignl.
046625fa
RH
7748 Return NULL is a normal call should be emitted rather than expanding the
7749 function inline. If convenient, the result should be placed in TARGET.
7750 SUBTARGET may be used as the target for computing the operand. */
7751
7752static rtx
5039610b 7753expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
046625fa
RH
7754{
7755 rtx op0, op1;
7756 tree arg;
7757
5039610b
SL
7758 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
7759 return NULL_RTX;
046625fa 7760
5039610b 7761 arg = CALL_EXPR_ARG (exp, 0);
84217346 7762 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
046625fa 7763
5039610b 7764 arg = CALL_EXPR_ARG (exp, 1);
84217346 7765 op1 = expand_normal (arg);
046625fa
RH
7766
7767 return expand_copysign (op0, op1, target);
7768}
7769
c05ece92 7770/* Emit a call to __builtin___clear_cache. */
677feb77 7771
c05ece92
AO
7772void
7773default_emit_call_builtin___clear_cache (rtx begin, rtx end)
677feb77 7774{
c05ece92
AO
7775 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
7776 BUILTIN_ASM_NAME_PTR
7777 (BUILT_IN_CLEAR_CACHE));
7778
7779 emit_library_call (callee,
7780 LCT_NORMAL, VOIDmode,
b737b70f
JJ
7781 convert_memory_address (ptr_mode, begin), ptr_mode,
7782 convert_memory_address (ptr_mode, end), ptr_mode);
c05ece92
AO
7783}
7784
7785/* Emit a call to __builtin___clear_cache, unless the target specifies
7786 it as do-nothing. This function can be used by trampoline
7787 finalizers to duplicate the effects of expanding a call to the
7788 clear_cache builtin. */
7789
7790void
7791maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
7792{
442b6fb7
AO
7793 if ((GET_MODE (begin) != ptr_mode && GET_MODE (begin) != Pmode)
7794 || (GET_MODE (end) != ptr_mode && GET_MODE (end) != Pmode))
f2cf13bd 7795 {
c05ece92
AO
7796 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
7797 return;
7798 }
7799
7800 if (targetm.have_clear_cache ())
7801 {
7802 /* We have a "clear_cache" insn, and it will handle everything. */
7803 class expand_operand ops[2];
7804
7805 create_address_operand (&ops[0], begin);
7806 create_address_operand (&ops[1], end);
7807
7808 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
7809 return;
7810 }
7811 else
7812 {
7813#ifndef CLEAR_INSN_CACHE
f2cf13bd
RS
7814 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
7815 does nothing. There is no need to call it. Do nothing. */
c05ece92 7816 return;
677feb77 7817#endif /* CLEAR_INSN_CACHE */
f2cf13bd
RS
7818 }
7819
c05ece92
AO
7820 targetm.calls.emit_call_builtin___clear_cache (begin, end);
7821}
7822
7823/* Expand a call to __builtin___clear_cache. */
7824
7825static void
7826expand_builtin___clear_cache (tree exp)
7827{
677feb77
DD
7828 tree begin, end;
7829 rtx begin_rtx, end_rtx;
677feb77
DD
7830
7831 /* We must not expand to a library call. If we did, any
7832 fallback library function in libgcc that might contain a call to
7833 __builtin___clear_cache() would recurse infinitely. */
7834 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7835 {
7836 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
c05ece92 7837 return;
677feb77
DD
7838 }
7839
c05ece92
AO
7840 begin = CALL_EXPR_ARG (exp, 0);
7841 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77 7842
c05ece92
AO
7843 end = CALL_EXPR_ARG (exp, 1);
7844 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77 7845
c05ece92 7846 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
677feb77
DD
7847}
7848
6de9cd9a
DN
7849/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
7850
7851static rtx
7852round_trampoline_addr (rtx tramp)
7853{
7854 rtx temp, addend, mask;
7855
7856 /* If we don't need too much alignment, we'll have been guaranteed
7857 proper alignment by get_trampoline_type. */
7858 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
7859 return tramp;
7860
7861 /* Round address up to desired boundary. */
7862 temp = gen_reg_rtx (Pmode);
2f1cd2eb
RS
7863 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
7864 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6de9cd9a
DN
7865
7866 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
7867 temp, 0, OPTAB_LIB_WIDEN);
7868 tramp = expand_simple_binop (Pmode, AND, temp, mask,
7869 temp, 0, OPTAB_LIB_WIDEN);
7870
7871 return tramp;
7872}
7873
7874static rtx
183dd130 7875expand_builtin_init_trampoline (tree exp, bool onstack)
6de9cd9a
DN
7876{
7877 tree t_tramp, t_func, t_chain;
531ca746 7878 rtx m_tramp, r_tramp, r_chain, tmp;
6de9cd9a 7879
5039610b 7880 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6de9cd9a
DN
7881 POINTER_TYPE, VOID_TYPE))
7882 return NULL_RTX;
7883
5039610b
SL
7884 t_tramp = CALL_EXPR_ARG (exp, 0);
7885 t_func = CALL_EXPR_ARG (exp, 1);
7886 t_chain = CALL_EXPR_ARG (exp, 2);
6de9cd9a 7887
84217346 7888 r_tramp = expand_normal (t_tramp);
531ca746
RH
7889 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
7890 MEM_NOTRAP_P (m_tramp) = 1;
7891
183dd130
ILT
7892 /* If ONSTACK, the TRAMP argument should be the address of a field
7893 within the local function's FRAME decl. Either way, let's see if
7894 we can fill in the MEM_ATTRs for this memory. */
531ca746 7895 if (TREE_CODE (t_tramp) == ADDR_EXPR)
ad2e5b71 7896 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
531ca746 7897
183dd130
ILT
7898 /* Creator of a heap trampoline is responsible for making sure the
7899 address is aligned to at least STACK_BOUNDARY. Normally malloc
7900 will ensure this anyhow. */
531ca746
RH
7901 tmp = round_trampoline_addr (r_tramp);
7902 if (tmp != r_tramp)
7903 {
7904 m_tramp = change_address (m_tramp, BLKmode, tmp);
7905 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
f5541398 7906 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
531ca746
RH
7907 }
7908
7909 /* The FUNC argument should be the address of the nested function.
7910 Extract the actual function decl to pass to the hook. */
7911 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
7912 t_func = TREE_OPERAND (t_func, 0);
7913 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
7914
84217346 7915 r_chain = expand_normal (t_chain);
6de9cd9a
DN
7916
7917 /* Generate insns to initialize the trampoline. */
531ca746 7918 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6de9cd9a 7919
183dd130
ILT
7920 if (onstack)
7921 {
7922 trampolines_created = 1;
8ffadef9 7923
4c640e26
EB
7924 if (targetm.calls.custom_function_descriptors != 0)
7925 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
7926 "trampoline generated for nested function %qD", t_func);
183dd130 7927 }
8ffadef9 7928
6de9cd9a
DN
7929 return const0_rtx;
7930}
7931
7932static rtx
5039610b 7933expand_builtin_adjust_trampoline (tree exp)
6de9cd9a
DN
7934{
7935 rtx tramp;
7936
5039610b 7937 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
7938 return NULL_RTX;
7939
5039610b 7940 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6de9cd9a 7941 tramp = round_trampoline_addr (tramp);
531ca746
RH
7942 if (targetm.calls.trampoline_adjust_address)
7943 tramp = targetm.calls.trampoline_adjust_address (tramp);
6de9cd9a
DN
7944
7945 return tramp;
7946}
7947
4c640e26
EB
7948/* Expand a call to the builtin descriptor initialization routine.
7949 A descriptor is made up of a couple of pointers to the static
7950 chain and the code entry in this order. */
7951
7952static rtx
7953expand_builtin_init_descriptor (tree exp)
7954{
7955 tree t_descr, t_func, t_chain;
7956 rtx m_descr, r_descr, r_func, r_chain;
7957
7958 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
7959 VOID_TYPE))
7960 return NULL_RTX;
7961
7962 t_descr = CALL_EXPR_ARG (exp, 0);
7963 t_func = CALL_EXPR_ARG (exp, 1);
7964 t_chain = CALL_EXPR_ARG (exp, 2);
7965
7966 r_descr = expand_normal (t_descr);
7967 m_descr = gen_rtx_MEM (BLKmode, r_descr);
7968 MEM_NOTRAP_P (m_descr) = 1;
0bdf9f92 7969 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
4c640e26
EB
7970
7971 r_func = expand_normal (t_func);
7972 r_chain = expand_normal (t_chain);
7973
7974 /* Generate insns to initialize the descriptor. */
7975 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
7976 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
7977 POINTER_SIZE / BITS_PER_UNIT), r_func);
7978
7979 return const0_rtx;
7980}
7981
7982/* Expand a call to the builtin descriptor adjustment routine. */
7983
7984static rtx
7985expand_builtin_adjust_descriptor (tree exp)
7986{
7987 rtx tramp;
7988
7989 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7990 return NULL_RTX;
7991
7992 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
7993
7994 /* Unalign the descriptor to allow runtime identification. */
7995 tramp = plus_constant (ptr_mode, tramp,
7996 targetm.calls.custom_function_descriptors);
7997
7998 return force_operand (tramp, NULL_RTX);
7999}
8000
0f67fa83
WG
8001/* Expand the call EXP to the built-in signbit, signbitf or signbitl
8002 function. The function first checks whether the back end provides
8003 an insn to implement signbit for the respective mode. If not, it
8004 checks whether the floating point format of the value is such that
61717a45
FXC
8005 the sign bit can be extracted. If that is not the case, error out.
8006 EXP is the expression that is a call to the builtin function; if
8007 convenient, the result should be placed in TARGET. */
ef79730c
RS
8008static rtx
8009expand_builtin_signbit (tree exp, rtx target)
8010{
8011 const struct real_format *fmt;
b5f2d801 8012 scalar_float_mode fmode;
095a2d76 8013 scalar_int_mode rmode, imode;
5039610b 8014 tree arg;
e4fbead1 8015 int word, bitpos;
d0c9d431 8016 enum insn_code icode;
ef79730c 8017 rtx temp;
db3927fb 8018 location_t loc = EXPR_LOCATION (exp);
ef79730c 8019
5039610b
SL
8020 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
8021 return NULL_RTX;
ef79730c 8022
5039610b 8023 arg = CALL_EXPR_ARG (exp, 0);
b5f2d801 8024 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
7a504f33 8025 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
ef79730c
RS
8026 fmt = REAL_MODE_FORMAT (fmode);
8027
0f67fa83
WG
8028 arg = builtin_save_expr (arg);
8029
8030 /* Expand the argument yielding a RTX expression. */
8031 temp = expand_normal (arg);
8032
8033 /* Check if the back end provides an insn that handles signbit for the
8034 argument's mode. */
947131ba 8035 icode = optab_handler (signbit_optab, fmode);
d0c9d431 8036 if (icode != CODE_FOR_nothing)
0f67fa83 8037 {
58f4cf2a 8038 rtx_insn *last = get_last_insn ();
0f67fa83 8039 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8a0b1aa4
MM
8040 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
8041 return target;
8042 delete_insns_since (last);
0f67fa83
WG
8043 }
8044
ef79730c
RS
8045 /* For floating point formats without a sign bit, implement signbit
8046 as "ARG < 0.0". */
b87a0206 8047 bitpos = fmt->signbit_ro;
e4fbead1 8048 if (bitpos < 0)
ef79730c
RS
8049 {
8050 /* But we can't do this if the format supports signed zero. */
61717a45 8051 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
ef79730c 8052
db3927fb 8053 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
987b67bc 8054 build_real (TREE_TYPE (arg), dconst0));
ef79730c
RS
8055 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
8056 }
8057
e4fbead1 8058 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
ef79730c 8059 {
304b9962 8060 imode = int_mode_for_mode (fmode).require ();
e4fbead1 8061 temp = gen_lowpart (imode, temp);
254878ea
RS
8062 }
8063 else
8064 {
e4fbead1
RS
8065 imode = word_mode;
8066 /* Handle targets with different FP word orders. */
8067 if (FLOAT_WORDS_BIG_ENDIAN)
c22cacf3 8068 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
e4fbead1 8069 else
c22cacf3 8070 word = bitpos / BITS_PER_WORD;
e4fbead1
RS
8071 temp = operand_subword_force (temp, word, fmode);
8072 bitpos = bitpos % BITS_PER_WORD;
8073 }
8074
210e1852
RS
8075 /* Force the intermediate word_mode (or narrower) result into a
8076 register. This avoids attempting to create paradoxical SUBREGs
8077 of floating point modes below. */
8078 temp = force_reg (imode, temp);
8079
e4fbead1
RS
8080 /* If the bitpos is within the "result mode" lowpart, the operation
8081 can be implement with a single bitwise AND. Otherwise, we need
8082 a right shift and an AND. */
8083
8084 if (bitpos < GET_MODE_BITSIZE (rmode))
8085 {
807e902e 8086 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
ef79730c 8087
515e442a 8088 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
e4fbead1 8089 temp = gen_lowpart (rmode, temp);
254878ea 8090 temp = expand_binop (rmode, and_optab, temp,
807e902e 8091 immed_wide_int_const (mask, rmode),
e4fbead1 8092 NULL_RTX, 1, OPTAB_LIB_WIDEN);
ef79730c 8093 }
e4fbead1
RS
8094 else
8095 {
8096 /* Perform a logical right shift to place the signbit in the least
c22cacf3 8097 significant bit, then truncate the result to the desired mode
e4fbead1 8098 and mask just this bit. */
eb6c3df1 8099 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
e4fbead1
RS
8100 temp = gen_lowpart (rmode, temp);
8101 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
8102 NULL_RTX, 1, OPTAB_LIB_WIDEN);
8103 }
8104
ef79730c
RS
8105 return temp;
8106}
d1c38823
ZD
8107
8108/* Expand fork or exec calls. TARGET is the desired target of the
5039610b 8109 call. EXP is the call. FN is the
d1c38823
ZD
8110 identificator of the actual function. IGNORE is nonzero if the
8111 value is to be ignored. */
8112
8113static rtx
5039610b 8114expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
d1c38823
ZD
8115{
8116 tree id, decl;
8117 tree call;
8118
b5338fb3
MS
8119 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
8120 {
d14c547a 8121 tree path = CALL_EXPR_ARG (exp, 0);
b5338fb3 8122 /* Detect unterminated path. */
d14c547a 8123 if (!check_read_access (exp, path))
b5338fb3
MS
8124 return NULL_RTX;
8125
8126 /* Also detect unterminated first argument. */
8127 switch (DECL_FUNCTION_CODE (fn))
8128 {
8129 case BUILT_IN_EXECL:
8130 case BUILT_IN_EXECLE:
8131 case BUILT_IN_EXECLP:
d14c547a 8132 if (!check_read_access (exp, path))
b5338fb3
MS
8133 return NULL_RTX;
8134 default:
8135 break;
8136 }
8137 }
8138
8139
d1c38823
ZD
8140 /* If we are not profiling, just call the function. */
8141 if (!profile_arc_flag)
8142 return NULL_RTX;
8143
8144 /* Otherwise call the wrapper. This should be equivalent for the rest of
8145 compiler, so the code does not diverge, and the wrapper may run the
2b8a92de 8146 code necessary for keeping the profiling sane. */
d1c38823
ZD
8147
8148 switch (DECL_FUNCTION_CODE (fn))
8149 {
8150 case BUILT_IN_FORK:
8151 id = get_identifier ("__gcov_fork");
8152 break;
8153
8154 case BUILT_IN_EXECL:
8155 id = get_identifier ("__gcov_execl");
8156 break;
8157
8158 case BUILT_IN_EXECV:
8159 id = get_identifier ("__gcov_execv");
8160 break;
8161
8162 case BUILT_IN_EXECLP:
8163 id = get_identifier ("__gcov_execlp");
8164 break;
8165
8166 case BUILT_IN_EXECLE:
8167 id = get_identifier ("__gcov_execle");
8168 break;
8169
8170 case BUILT_IN_EXECVP:
8171 id = get_identifier ("__gcov_execvp");
8172 break;
8173
8174 case BUILT_IN_EXECVE:
8175 id = get_identifier ("__gcov_execve");
8176 break;
8177
8178 default:
298e6adc 8179 gcc_unreachable ();
d1c38823
ZD
8180 }
8181
c2255bc4
AH
8182 decl = build_decl (DECL_SOURCE_LOCATION (fn),
8183 FUNCTION_DECL, id, TREE_TYPE (fn));
d1c38823
ZD
8184 DECL_EXTERNAL (decl) = 1;
8185 TREE_PUBLIC (decl) = 1;
8186 DECL_ARTIFICIAL (decl) = 1;
8187 TREE_NOTHROW (decl) = 1;
ac382b62
JM
8188 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
8189 DECL_VISIBILITY_SPECIFIED (decl) = 1;
db3927fb 8190 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
d1c38823 8191 return expand_call (call, target, ignore);
5039610b 8192 }
b8698a0f 8193
48ae6c13
RH
8194
8195\f
02ee605c
RH
8196/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
8197 the pointer in these functions is void*, the tree optimizers may remove
8198 casts. The mode computed in expand_builtin isn't reliable either, due
8199 to __sync_bool_compare_and_swap.
8200
8201 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
8202 group of builtins. This gives us log2 of the mode size. */
8203
ef4bddc2 8204static inline machine_mode
02ee605c
RH
8205get_builtin_sync_mode (int fcode_diff)
8206{
2de0aa52
HPN
8207 /* The size is not negotiable, so ask not to get BLKmode in return
8208 if the target indicates that a smaller size would be better. */
f4b31647 8209 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
02ee605c
RH
8210}
8211
1387fef3
AS
8212/* Expand the memory expression LOC and return the appropriate memory operand
8213 for the builtin_sync operations. */
8214
8215static rtx
ef4bddc2 8216get_builtin_sync_mem (tree loc, machine_mode mode)
1387fef3
AS
8217{
8218 rtx addr, mem;
b6895597
AS
8219 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
8220 ? TREE_TYPE (TREE_TYPE (loc))
8221 : TREE_TYPE (loc));
8222 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
1387fef3 8223
b6895597 8224 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
1413a419 8225 addr = convert_memory_address (addr_mode, addr);
1387fef3
AS
8226
8227 /* Note that we explicitly do not want any alias information for this
8228 memory, so that we kill all other live memories. Otherwise we don't
8229 satisfy the full barrier semantics of the intrinsic. */
b6895597
AS
8230 mem = gen_rtx_MEM (mode, addr);
8231
8232 set_mem_addr_space (mem, addr_space);
8233
8234 mem = validize_mem (mem);
1387fef3 8235
1be38ccb
RG
8236 /* The alignment needs to be at least according to that of the mode. */
8237 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
0eb77834 8238 get_pointer_alignment (loc)));
9cd9e512 8239 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
1387fef3
AS
8240 MEM_VOLATILE_P (mem) = 1;
8241
8242 return mem;
8243}
8244
86951993
AM
8245/* Make sure an argument is in the right mode.
8246 EXP is the tree argument.
8247 MODE is the mode it should be in. */
8248
8249static rtx
ef4bddc2 8250expand_expr_force_mode (tree exp, machine_mode mode)
86951993
AM
8251{
8252 rtx val;
ef4bddc2 8253 machine_mode old_mode;
86951993 8254
410675cb
JJ
8255 if (TREE_CODE (exp) == SSA_NAME
8256 && TYPE_MODE (TREE_TYPE (exp)) != mode)
8257 {
8258 /* Undo argument promotion if possible, as combine might not
8259 be able to do it later due to MEM_VOLATILE_P uses in the
8260 patterns. */
8261 gimple *g = get_gimple_for_ssa_name (exp);
8262 if (g && gimple_assign_cast_p (g))
8263 {
8264 tree rhs = gimple_assign_rhs1 (g);
8265 tree_code code = gimple_assign_rhs_code (g);
8266 if (CONVERT_EXPR_CODE_P (code)
8267 && TYPE_MODE (TREE_TYPE (rhs)) == mode
8268 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
8269 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
8270 && (TYPE_PRECISION (TREE_TYPE (exp))
8271 > TYPE_PRECISION (TREE_TYPE (rhs))))
8272 exp = rhs;
8273 }
8274 }
8275
86951993
AM
8276 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
8277 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
8278 of CONST_INTs, where we know the old_mode only from the call argument. */
8279
8280 old_mode = GET_MODE (val);
8281 if (old_mode == VOIDmode)
8282 old_mode = TYPE_MODE (TREE_TYPE (exp));
8283 val = convert_modes (mode, old_mode, val, 1);
8284 return val;
8285}
8286
8287
48ae6c13 8288/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5039610b 8289 EXP is the CALL_EXPR. CODE is the rtx code
48ae6c13
RH
8290 that corresponds to the arithmetic or logical operation from the name;
8291 an exception here is that NOT actually means NAND. TARGET is an optional
8292 place for us to store the results; AFTER is true if this is the
86951993 8293 fetch_and_xxx form. */
48ae6c13
RH
8294
8295static rtx
ef4bddc2 8296expand_builtin_sync_operation (machine_mode mode, tree exp,
02ee605c 8297 enum rtx_code code, bool after,
86951993 8298 rtx target)
48ae6c13 8299{
1387fef3 8300 rtx val, mem;
c2255bc4 8301 location_t loc = EXPR_LOCATION (exp);
48ae6c13 8302
23462d4d
UB
8303 if (code == NOT && warn_sync_nand)
8304 {
8305 tree fndecl = get_callee_fndecl (exp);
8306 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8307
8308 static bool warned_f_a_n, warned_n_a_f;
8309
8310 switch (fcode)
8311 {
e0a8ecf2
AM
8312 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8313 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8314 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8315 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8316 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
23462d4d
UB
8317 if (warned_f_a_n)
8318 break;
8319
e79983f4 8320 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
c2255bc4 8321 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
8322 warned_f_a_n = true;
8323 break;
8324
e0a8ecf2
AM
8325 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8326 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8327 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8328 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8329 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
23462d4d
UB
8330 if (warned_n_a_f)
8331 break;
8332
e79983f4 8333 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
c2255bc4 8334 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
8335 warned_n_a_f = true;
8336 break;
8337
8338 default:
8339 gcc_unreachable ();
8340 }
8341 }
8342
48ae6c13 8343 /* Expand the operands. */
5039610b 8344 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993 8345 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
48ae6c13 8346
46b35980 8347 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
86951993 8348 after);
48ae6c13
RH
8349}
8350
8351/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5039610b 8352 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
48ae6c13
RH
8353 true if this is the boolean form. TARGET is a place for us to store the
8354 results; this is NOT optional if IS_BOOL is true. */
8355
8356static rtx
ef4bddc2 8357expand_builtin_compare_and_swap (machine_mode mode, tree exp,
02ee605c 8358 bool is_bool, rtx target)
48ae6c13 8359{
1387fef3 8360 rtx old_val, new_val, mem;
f0409b19 8361 rtx *pbool, *poval;
48ae6c13
RH
8362
8363 /* Expand the operands. */
5039610b 8364 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
8365 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8366 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
48ae6c13 8367
f0409b19
RH
8368 pbool = poval = NULL;
8369 if (target != const0_rtx)
8370 {
8371 if (is_bool)
8372 pbool = &target;
8373 else
8374 poval = &target;
8375 }
8376 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
46b35980
AM
8377 false, MEMMODEL_SYNC_SEQ_CST,
8378 MEMMODEL_SYNC_SEQ_CST))
86951993 8379 return NULL_RTX;
5039610b 8380
86951993 8381 return target;
48ae6c13
RH
8382}
8383
8384/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
8385 general form is actually an atomic exchange, and some targets only
8386 support a reduced form with the second argument being a constant 1.
b8698a0f 8387 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5039610b 8388 the results. */
48ae6c13
RH
8389
8390static rtx
ef4bddc2 8391expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
86951993 8392 rtx target)
48ae6c13 8393{
1387fef3 8394 rtx val, mem;
48ae6c13
RH
8395
8396 /* Expand the operands. */
5039610b 8397 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
8398 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8399
744accb2 8400 return expand_sync_lock_test_and_set (target, mem, val);
86951993
AM
8401}
8402
8403/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
8404
8405static void
ef4bddc2 8406expand_builtin_sync_lock_release (machine_mode mode, tree exp)
86951993
AM
8407{
8408 rtx mem;
8409
8410 /* Expand the operands. */
8411 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8412
46b35980 8413 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
86951993
AM
8414}
8415
8416/* Given an integer representing an ``enum memmodel'', verify its
8417 correctness and return the memory model enum. */
8418
8419static enum memmodel
8420get_memmodel (tree exp)
8421{
8422 rtx op;
5dcfdccd 8423 unsigned HOST_WIDE_INT val;
620e594b 8424 location_t loc
8d9fdb49 8425 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
8426
8427 /* If the parameter is not a constant, it's a run time value so we'll just
8428 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
8429 if (TREE_CODE (exp) != INTEGER_CST)
8430 return MEMMODEL_SEQ_CST;
8431
8432 op = expand_normal (exp);
5dcfdccd
KY
8433
8434 val = INTVAL (op);
8435 if (targetm.memmodel_check)
8436 val = targetm.memmodel_check (val);
8437 else if (val & ~MEMMODEL_MASK)
8438 {
8d9fdb49
MP
8439 warning_at (loc, OPT_Winvalid_memory_model,
8440 "unknown architecture specifier in memory model to builtin");
5dcfdccd
KY
8441 return MEMMODEL_SEQ_CST;
8442 }
8443
46b35980
AM
8444 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
8445 if (memmodel_base (val) >= MEMMODEL_LAST)
86951993 8446 {
8d9fdb49
MP
8447 warning_at (loc, OPT_Winvalid_memory_model,
8448 "invalid memory model argument to builtin");
86951993
AM
8449 return MEMMODEL_SEQ_CST;
8450 }
5dcfdccd 8451
8673b671
AM
8452 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
8453 be conservative and promote consume to acquire. */
8454 if (val == MEMMODEL_CONSUME)
8455 val = MEMMODEL_ACQUIRE;
8456
5dcfdccd 8457 return (enum memmodel) val;
86951993
AM
8458}
8459
8460/* Expand the __atomic_exchange intrinsic:
8461 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
8462 EXP is the CALL_EXPR.
8463 TARGET is an optional place for us to store the results. */
8464
8465static rtx
ef4bddc2 8466expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
86951993
AM
8467{
8468 rtx val, mem;
8469 enum memmodel model;
8470
8471 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
86951993
AM
8472
8473 if (!flag_inline_atomics)
8474 return NULL_RTX;
8475
8476 /* Expand the operands. */
8477 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8478 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8479
744accb2 8480 return expand_atomic_exchange (target, mem, val, model);
86951993
AM
8481}
8482
8483/* Expand the __atomic_compare_exchange intrinsic:
8484 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
8485 TYPE desired, BOOL weak,
8486 enum memmodel success,
8487 enum memmodel failure)
8488 EXP is the CALL_EXPR.
8489 TARGET is an optional place for us to store the results. */
8490
8491static rtx
ef4bddc2 8492expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
86951993
AM
8493 rtx target)
8494{
58f4cf2a
DM
8495 rtx expect, desired, mem, oldval;
8496 rtx_code_label *label;
86951993
AM
8497 enum memmodel success, failure;
8498 tree weak;
8499 bool is_weak;
620e594b 8500 location_t loc
8d9fdb49 8501 = expansion_point_location_if_in_system_header (input_location);
86951993
AM
8502
8503 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
8504 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
8505
77df5327
AM
8506 if (failure > success)
8507 {
8d9fdb49
MP
8508 warning_at (loc, OPT_Winvalid_memory_model,
8509 "failure memory model cannot be stronger than success "
8510 "memory model for %<__atomic_compare_exchange%>");
77df5327
AM
8511 success = MEMMODEL_SEQ_CST;
8512 }
8513
46b35980 8514 if (is_mm_release (failure) || is_mm_acq_rel (failure))
86951993 8515 {
8d9fdb49
MP
8516 warning_at (loc, OPT_Winvalid_memory_model,
8517 "invalid failure memory model for "
8518 "%<__atomic_compare_exchange%>");
77df5327
AM
8519 failure = MEMMODEL_SEQ_CST;
8520 success = MEMMODEL_SEQ_CST;
86951993
AM
8521 }
8522
77df5327 8523
86951993
AM
8524 if (!flag_inline_atomics)
8525 return NULL_RTX;
8526
8527 /* Expand the operands. */
8528 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8529
8530 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
8531 expect = convert_memory_address (Pmode, expect);
215770ad 8532 expect = gen_rtx_MEM (mode, expect);
86951993
AM
8533 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8534
8535 weak = CALL_EXPR_ARG (exp, 3);
8536 is_weak = false;
9439e9a1 8537 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
86951993
AM
8538 is_weak = true;
8539
672ce939
RH
8540 if (target == const0_rtx)
8541 target = NULL;
672ce939 8542
2fdc29e8
RH
8543 /* Lest the rtl backend create a race condition with an imporoper store
8544 to memory, always create a new pseudo for OLDVAL. */
8545 oldval = NULL;
8546
8547 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
f0409b19 8548 is_weak, success, failure))
86951993
AM
8549 return NULL_RTX;
8550
672ce939
RH
8551 /* Conditionally store back to EXPECT, lest we create a race condition
8552 with an improper store to memory. */
8553 /* ??? With a rearrangement of atomics at the gimple level, we can handle
8554 the normal case where EXPECT is totally private, i.e. a register. At
8555 which point the store can be unconditional. */
8556 label = gen_label_rtx ();
f8940d4a
JG
8557 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
8558 GET_MODE (target), 1, label);
672ce939
RH
8559 emit_move_insn (expect, oldval);
8560 emit_label (label);
215770ad 8561
86951993
AM
8562 return target;
8563}
8564
849a76a5
JJ
8565/* Helper function for expand_ifn_atomic_compare_exchange - expand
8566 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
8567 call. The weak parameter must be dropped to match the expected parameter
8568 list and the expected argument changed from value to pointer to memory
8569 slot. */
8570
8571static void
8572expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
8573{
8574 unsigned int z;
8575 vec<tree, va_gc> *vec;
8576
8577 vec_alloc (vec, 5);
8578 vec->quick_push (gimple_call_arg (call, 0));
8579 tree expected = gimple_call_arg (call, 1);
8580 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
8581 TREE_TYPE (expected));
8582 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
8583 if (expd != x)
8584 emit_move_insn (x, expd);
8585 tree v = make_tree (TREE_TYPE (expected), x);
8586 vec->quick_push (build1 (ADDR_EXPR,
8587 build_pointer_type (TREE_TYPE (expected)), v));
8588 vec->quick_push (gimple_call_arg (call, 2));
8589 /* Skip the boolean weak parameter. */
8590 for (z = 4; z < 6; z++)
8591 vec->quick_push (gimple_call_arg (call, z));
4871e1ed 8592 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
cf098191 8593 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
4871e1ed 8594 gcc_assert (bytes_log2 < 5);
849a76a5
JJ
8595 built_in_function fncode
8596 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
4871e1ed 8597 + bytes_log2);
849a76a5
JJ
8598 tree fndecl = builtin_decl_explicit (fncode);
8599 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
8600 fndecl);
8601 tree exp = build_call_vec (boolean_type_node, fn, vec);
8602 tree lhs = gimple_call_lhs (call);
8603 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
8604 if (lhs)
8605 {
8606 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8607 if (GET_MODE (boolret) != mode)
8608 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8609 x = force_reg (mode, x);
8610 write_complex_part (target, boolret, true);
8611 write_complex_part (target, x, false);
8612 }
8613}
8614
8615/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
8616
8617void
8618expand_ifn_atomic_compare_exchange (gcall *call)
8619{
8620 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
8621 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
f4b31647 8622 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
849a76a5
JJ
8623 rtx expect, desired, mem, oldval, boolret;
8624 enum memmodel success, failure;
8625 tree lhs;
8626 bool is_weak;
620e594b 8627 location_t loc
849a76a5
JJ
8628 = expansion_point_location_if_in_system_header (gimple_location (call));
8629
8630 success = get_memmodel (gimple_call_arg (call, 4));
8631 failure = get_memmodel (gimple_call_arg (call, 5));
8632
8633 if (failure > success)
8634 {
8635 warning_at (loc, OPT_Winvalid_memory_model,
8636 "failure memory model cannot be stronger than success "
8637 "memory model for %<__atomic_compare_exchange%>");
8638 success = MEMMODEL_SEQ_CST;
8639 }
8640
8641 if (is_mm_release (failure) || is_mm_acq_rel (failure))
8642 {
8643 warning_at (loc, OPT_Winvalid_memory_model,
8644 "invalid failure memory model for "
8645 "%<__atomic_compare_exchange%>");
8646 failure = MEMMODEL_SEQ_CST;
8647 success = MEMMODEL_SEQ_CST;
8648 }
8649
8650 if (!flag_inline_atomics)
8651 {
8652 expand_ifn_atomic_compare_exchange_into_call (call, mode);
8653 return;
8654 }
8655
8656 /* Expand the operands. */
8657 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
8658
8659 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
8660 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
8661
8662 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
8663
8664 boolret = NULL;
8665 oldval = NULL;
8666
8667 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
8668 is_weak, success, failure))
8669 {
8670 expand_ifn_atomic_compare_exchange_into_call (call, mode);
8671 return;
8672 }
8673
8674 lhs = gimple_call_lhs (call);
8675 if (lhs)
8676 {
8677 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8678 if (GET_MODE (boolret) != mode)
8679 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8680 write_complex_part (target, boolret, true);
8681 write_complex_part (target, oldval, false);
8682 }
8683}
8684
86951993
AM
8685/* Expand the __atomic_load intrinsic:
8686 TYPE __atomic_load (TYPE *object, enum memmodel)
8687 EXP is the CALL_EXPR.
8688 TARGET is an optional place for us to store the results. */
8689
8690static rtx
ef4bddc2 8691expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
86951993
AM
8692{
8693 rtx mem;
8694 enum memmodel model;
8695
8696 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
46b35980 8697 if (is_mm_release (model) || is_mm_acq_rel (model))
86951993 8698 {
620e594b 8699 location_t loc
8d9fdb49
MP
8700 = expansion_point_location_if_in_system_header (input_location);
8701 warning_at (loc, OPT_Winvalid_memory_model,
8702 "invalid memory model for %<__atomic_load%>");
77df5327 8703 model = MEMMODEL_SEQ_CST;
86951993
AM
8704 }
8705
8706 if (!flag_inline_atomics)
8707 return NULL_RTX;
8708
8709 /* Expand the operand. */
8710 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8711
8712 return expand_atomic_load (target, mem, model);
8713}
8714
8715
8716/* Expand the __atomic_store intrinsic:
8717 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
8718 EXP is the CALL_EXPR.
8719 TARGET is an optional place for us to store the results. */
8720
8721static rtx
ef4bddc2 8722expand_builtin_atomic_store (machine_mode mode, tree exp)
86951993
AM
8723{
8724 rtx mem, val;
8725 enum memmodel model;
8726
8727 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
46b35980
AM
8728 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
8729 || is_mm_release (model)))
86951993 8730 {
620e594b 8731 location_t loc
8d9fdb49
MP
8732 = expansion_point_location_if_in_system_header (input_location);
8733 warning_at (loc, OPT_Winvalid_memory_model,
8734 "invalid memory model for %<__atomic_store%>");
77df5327 8735 model = MEMMODEL_SEQ_CST;
86951993
AM
8736 }
8737
8738 if (!flag_inline_atomics)
8739 return NULL_RTX;
8740
8741 /* Expand the operands. */
8742 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8743 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8744
0669295b 8745 return expand_atomic_store (mem, val, model, false);
86951993
AM
8746}
8747
8748/* Expand the __atomic_fetch_XXX intrinsic:
8749 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
8750 EXP is the CALL_EXPR.
8751 TARGET is an optional place for us to store the results.
8752 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
8753 FETCH_AFTER is true if returning the result of the operation.
8754 FETCH_AFTER is false if returning the value before the operation.
8755 IGNORE is true if the result is not used.
8756 EXT_CALL is the correct builtin for an external call if this cannot be
8757 resolved to an instruction sequence. */
8758
8759static rtx
ef4bddc2 8760expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
86951993
AM
8761 enum rtx_code code, bool fetch_after,
8762 bool ignore, enum built_in_function ext_call)
8763{
8764 rtx val, mem, ret;
8765 enum memmodel model;
8766 tree fndecl;
8767 tree addr;
8768
8769 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8770
8771 /* Expand the operands. */
8772 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8773 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8774
8775 /* Only try generating instructions if inlining is turned on. */
8776 if (flag_inline_atomics)
8777 {
8778 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
8779 if (ret)
8780 return ret;
8781 }
8782
8783 /* Return if a different routine isn't needed for the library call. */
8784 if (ext_call == BUILT_IN_NONE)
8785 return NULL_RTX;
8786
8787 /* Change the call to the specified function. */
8788 fndecl = get_callee_fndecl (exp);
8789 addr = CALL_EXPR_FN (exp);
8790 STRIP_NOPS (addr);
8791
8792 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
c3284718 8793 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
86951993 8794
67914693 8795 /* If we will emit code after the call, the call cannot be a tail call.
08c273bb
SB
8796 If it is emitted as a tail call, a barrier is emitted after it, and
8797 then all trailing code is removed. */
8798 if (!ignore)
8799 CALL_EXPR_TAILCALL (exp) = 0;
8800
86951993
AM
8801 /* Expand the call here so we can emit trailing code. */
8802 ret = expand_call (exp, target, ignore);
8803
8804 /* Replace the original function just in case it matters. */
8805 TREE_OPERAND (addr, 0) = fndecl;
8806
8807 /* Then issue the arithmetic correction to return the right result. */
8808 if (!ignore)
154b68db
AM
8809 {
8810 if (code == NOT)
8811 {
8812 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
8813 OPTAB_LIB_WIDEN);
8814 ret = expand_simple_unop (mode, NOT, ret, target, true);
8815 }
8816 else
8817 ret = expand_simple_binop (mode, code, ret, val, target, true,
8818 OPTAB_LIB_WIDEN);
8819 }
86951993
AM
8820 return ret;
8821}
8822
adedd5c1
JJ
8823/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
8824
8825void
8826expand_ifn_atomic_bit_test_and (gcall *call)
8827{
8828 tree ptr = gimple_call_arg (call, 0);
8829 tree bit = gimple_call_arg (call, 1);
8830 tree flag = gimple_call_arg (call, 2);
8831 tree lhs = gimple_call_lhs (call);
8832 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
8833 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
8834 enum rtx_code code;
8835 optab optab;
99b1c316 8836 class expand_operand ops[5];
adedd5c1
JJ
8837
8838 gcc_assert (flag_inline_atomics);
8839
8840 if (gimple_call_num_args (call) == 4)
8841 model = get_memmodel (gimple_call_arg (call, 3));
8842
8843 rtx mem = get_builtin_sync_mem (ptr, mode);
8844 rtx val = expand_expr_force_mode (bit, mode);
8845
8846 switch (gimple_call_internal_fn (call))
8847 {
8848 case IFN_ATOMIC_BIT_TEST_AND_SET:
8849 code = IOR;
8850 optab = atomic_bit_test_and_set_optab;
8851 break;
8852 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
8853 code = XOR;
8854 optab = atomic_bit_test_and_complement_optab;
8855 break;
8856 case IFN_ATOMIC_BIT_TEST_AND_RESET:
8857 code = AND;
8858 optab = atomic_bit_test_and_reset_optab;
8859 break;
8860 default:
8861 gcc_unreachable ();
8862 }
8863
8864 if (lhs == NULL_TREE)
8865 {
8866 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8867 val, NULL_RTX, true, OPTAB_DIRECT);
8868 if (code == AND)
8869 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8870 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
8871 return;
8872 }
8873
8874 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8875 enum insn_code icode = direct_optab_handler (optab, mode);
8876 gcc_assert (icode != CODE_FOR_nothing);
8877 create_output_operand (&ops[0], target, mode);
8878 create_fixed_operand (&ops[1], mem);
8879 create_convert_operand_to (&ops[2], val, mode, true);
8880 create_integer_operand (&ops[3], model);
8881 create_integer_operand (&ops[4], integer_onep (flag));
8882 if (maybe_expand_insn (icode, 5, ops))
8883 return;
8884
8885 rtx bitval = val;
8886 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
8887 val, NULL_RTX, true, OPTAB_DIRECT);
8888 rtx maskval = val;
8889 if (code == AND)
8890 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
8891 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
8892 code, model, false);
8893 if (integer_onep (flag))
8894 {
8895 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
8896 NULL_RTX, true, OPTAB_DIRECT);
8897 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
8898 true, OPTAB_DIRECT);
8899 }
8900 else
8901 result = expand_simple_binop (mode, AND, result, maskval, target, true,
8902 OPTAB_DIRECT);
8903 if (result != target)
8904 emit_move_insn (target, result);
8905}
8906
d660c35e
AM
8907/* Expand an atomic clear operation.
8908 void _atomic_clear (BOOL *obj, enum memmodel)
8909 EXP is the call expression. */
8910
8911static rtx
8912expand_builtin_atomic_clear (tree exp)
8913{
ef4bddc2 8914 machine_mode mode;
d660c35e
AM
8915 rtx mem, ret;
8916 enum memmodel model;
8917
f4b31647 8918 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
8919 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8920 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8921
46b35980 8922 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
d660c35e 8923 {
620e594b 8924 location_t loc
8d9fdb49
MP
8925 = expansion_point_location_if_in_system_header (input_location);
8926 warning_at (loc, OPT_Winvalid_memory_model,
8927 "invalid memory model for %<__atomic_store%>");
77df5327 8928 model = MEMMODEL_SEQ_CST;
d660c35e
AM
8929 }
8930
8931 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
8932 Failing that, a store is issued by __atomic_store. The only way this can
8933 fail is if the bool type is larger than a word size. Unlikely, but
8934 handle it anyway for completeness. Assume a single threaded model since
8935 there is no atomic support in this case, and no barriers are required. */
8936 ret = expand_atomic_store (mem, const0_rtx, model, true);
8937 if (!ret)
8938 emit_move_insn (mem, const0_rtx);
8939 return const0_rtx;
8940}
8941
8942/* Expand an atomic test_and_set operation.
8943 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
8944 EXP is the call expression. */
8945
8946static rtx
744accb2 8947expand_builtin_atomic_test_and_set (tree exp, rtx target)
d660c35e 8948{
744accb2 8949 rtx mem;
d660c35e 8950 enum memmodel model;
ef4bddc2 8951 machine_mode mode;
d660c35e 8952
f4b31647 8953 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
d660c35e
AM
8954 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8955 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8956
744accb2 8957 return expand_atomic_test_and_set (target, mem, model);
d660c35e
AM
8958}
8959
8960
86951993
AM
8961/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
8962 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
8963
8964static tree
8965fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
8966{
8967 int size;
ef4bddc2 8968 machine_mode mode;
86951993
AM
8969 unsigned int mode_align, type_align;
8970
8971 if (TREE_CODE (arg0) != INTEGER_CST)
8972 return NULL_TREE;
48ae6c13 8973
f4b31647 8974 /* We need a corresponding integer mode for the access to be lock-free. */
86951993 8975 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
f4b31647
RS
8976 if (!int_mode_for_size (size, 0).exists (&mode))
8977 return boolean_false_node;
8978
86951993
AM
8979 mode_align = GET_MODE_ALIGNMENT (mode);
8980
310055e7
JW
8981 if (TREE_CODE (arg1) == INTEGER_CST)
8982 {
8983 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
8984
8985 /* Either this argument is null, or it's a fake pointer encoding
8986 the alignment of the object. */
146ec50f 8987 val = least_bit_hwi (val);
310055e7
JW
8988 val *= BITS_PER_UNIT;
8989
8990 if (val == 0 || mode_align < val)
8991 type_align = mode_align;
8992 else
8993 type_align = val;
8994 }
86951993
AM
8995 else
8996 {
8997 tree ttype = TREE_TYPE (arg1);
8998
8999 /* This function is usually invoked and folded immediately by the front
9000 end before anything else has a chance to look at it. The pointer
9001 parameter at this point is usually cast to a void *, so check for that
9002 and look past the cast. */
7d9cf801
JJ
9003 if (CONVERT_EXPR_P (arg1)
9004 && POINTER_TYPE_P (ttype)
9005 && VOID_TYPE_P (TREE_TYPE (ttype))
9006 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
86951993
AM
9007 arg1 = TREE_OPERAND (arg1, 0);
9008
9009 ttype = TREE_TYPE (arg1);
9010 gcc_assert (POINTER_TYPE_P (ttype));
9011
9012 /* Get the underlying type of the object. */
9013 ttype = TREE_TYPE (ttype);
9014 type_align = TYPE_ALIGN (ttype);
9015 }
9016
026c3cfd 9017 /* If the object has smaller alignment, the lock free routines cannot
86951993
AM
9018 be used. */
9019 if (type_align < mode_align)
58d38fd2 9020 return boolean_false_node;
86951993
AM
9021
9022 /* Check if a compare_and_swap pattern exists for the mode which represents
9023 the required size. The pattern is not allowed to fail, so the existence
969a32ce
TR
9024 of the pattern indicates support is present. Also require that an
9025 atomic load exists for the required size. */
9026 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
58d38fd2 9027 return boolean_true_node;
86951993 9028 else
58d38fd2 9029 return boolean_false_node;
86951993
AM
9030}
9031
9032/* Return true if the parameters to call EXP represent an object which will
9033 always generate lock free instructions. The first argument represents the
9034 size of the object, and the second parameter is a pointer to the object
9035 itself. If NULL is passed for the object, then the result is based on
9036 typical alignment for an object of the specified size. Otherwise return
9037 false. */
9038
9039static rtx
9040expand_builtin_atomic_always_lock_free (tree exp)
9041{
9042 tree size;
9043 tree arg0 = CALL_EXPR_ARG (exp, 0);
9044 tree arg1 = CALL_EXPR_ARG (exp, 1);
9045
9046 if (TREE_CODE (arg0) != INTEGER_CST)
9047 {
a9c697b8 9048 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
86951993
AM
9049 return const0_rtx;
9050 }
9051
9052 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
58d38fd2 9053 if (size == boolean_true_node)
86951993
AM
9054 return const1_rtx;
9055 return const0_rtx;
9056}
9057
9058/* Return a one or zero if it can be determined that object ARG1 of size ARG
9059 is lock free on this architecture. */
9060
9061static tree
9062fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
9063{
9064 if (!flag_inline_atomics)
9065 return NULL_TREE;
9066
9067 /* If it isn't always lock free, don't generate a result. */
58d38fd2
JJ
9068 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
9069 return boolean_true_node;
86951993
AM
9070
9071 return NULL_TREE;
9072}
9073
9074/* Return true if the parameters to call EXP represent an object which will
9075 always generate lock free instructions. The first argument represents the
9076 size of the object, and the second parameter is a pointer to the object
9077 itself. If NULL is passed for the object, then the result is based on
9078 typical alignment for an object of the specified size. Otherwise return
9079 NULL*/
9080
9081static rtx
9082expand_builtin_atomic_is_lock_free (tree exp)
9083{
9084 tree size;
9085 tree arg0 = CALL_EXPR_ARG (exp, 0);
9086 tree arg1 = CALL_EXPR_ARG (exp, 1);
9087
9088 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9089 {
a9c697b8 9090 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
86951993
AM
9091 return NULL_RTX;
9092 }
9093
9094 if (!flag_inline_atomics)
9095 return NULL_RTX;
9096
9097 /* If the value is known at compile time, return the RTX for it. */
9098 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
58d38fd2 9099 if (size == boolean_true_node)
86951993
AM
9100 return const1_rtx;
9101
9102 return NULL_RTX;
9103}
9104
86951993
AM
9105/* Expand the __atomic_thread_fence intrinsic:
9106 void __atomic_thread_fence (enum memmodel)
9107 EXP is the CALL_EXPR. */
9108
9109static void
9110expand_builtin_atomic_thread_fence (tree exp)
9111{
c39169c8
RH
9112 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9113 expand_mem_thread_fence (model);
86951993
AM
9114}
9115
9116/* Expand the __atomic_signal_fence intrinsic:
9117 void __atomic_signal_fence (enum memmodel)
9118 EXP is the CALL_EXPR. */
9119
9120static void
9121expand_builtin_atomic_signal_fence (tree exp)
9122{
c39169c8
RH
9123 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9124 expand_mem_signal_fence (model);
48ae6c13
RH
9125}
9126
9127/* Expand the __sync_synchronize intrinsic. */
9128
9129static void
e0a8ecf2 9130expand_builtin_sync_synchronize (void)
48ae6c13 9131{
46b35980 9132 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
48ae6c13
RH
9133}
9134
f959607b
CLT
9135static rtx
9136expand_builtin_thread_pointer (tree exp, rtx target)
9137{
9138 enum insn_code icode;
9139 if (!validate_arglist (exp, VOID_TYPE))
9140 return const0_rtx;
9141 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
9142 if (icode != CODE_FOR_nothing)
9143 {
99b1c316 9144 class expand_operand op;
b8a542c6
AP
9145 /* If the target is not sutitable then create a new target. */
9146 if (target == NULL_RTX
9147 || !REG_P (target)
9148 || GET_MODE (target) != Pmode)
f959607b
CLT
9149 target = gen_reg_rtx (Pmode);
9150 create_output_operand (&op, target, Pmode);
9151 expand_insn (icode, 1, &op);
9152 return target;
9153 }
a3f9f006 9154 error ("%<__builtin_thread_pointer%> is not supported on this target");
f959607b
CLT
9155 return const0_rtx;
9156}
9157
9158static void
9159expand_builtin_set_thread_pointer (tree exp)
9160{
9161 enum insn_code icode;
9162 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9163 return;
9164 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
9165 if (icode != CODE_FOR_nothing)
9166 {
99b1c316 9167 class expand_operand op;
f959607b
CLT
9168 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
9169 Pmode, EXPAND_NORMAL);
5440a1b0 9170 create_input_operand (&op, val, Pmode);
f959607b
CLT
9171 expand_insn (icode, 1, &op);
9172 return;
9173 }
a3f9f006 9174 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
f959607b
CLT
9175}
9176
28f4ec01 9177\f
862d0b35
DN
9178/* Emit code to restore the current value of stack. */
9179
9180static void
9181expand_stack_restore (tree var)
9182{
58f4cf2a
DM
9183 rtx_insn *prev;
9184 rtx sa = expand_normal (var);
862d0b35
DN
9185
9186 sa = convert_memory_address (Pmode, sa);
9187
9188 prev = get_last_insn ();
9189 emit_stack_restore (SAVE_BLOCK, sa);
d33606c3
EB
9190
9191 record_new_stack_level ();
9192
862d0b35
DN
9193 fixup_args_size_notes (prev, get_last_insn (), 0);
9194}
9195
862d0b35
DN
9196/* Emit code to save the current value of stack. */
9197
9198static rtx
9199expand_stack_save (void)
9200{
9201 rtx ret = NULL_RTX;
9202
862d0b35
DN
9203 emit_stack_save (SAVE_BLOCK, &ret);
9204 return ret;
9205}
9206
1f62d637
TV
9207/* Emit code to get the openacc gang, worker or vector id or size. */
9208
9209static rtx
9210expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
9211{
9212 const char *name;
9213 rtx fallback_retval;
9214 rtx_insn *(*gen_fn) (rtx, rtx);
9215 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
9216 {
9217 case BUILT_IN_GOACC_PARLEVEL_ID:
9218 name = "__builtin_goacc_parlevel_id";
9219 fallback_retval = const0_rtx;
9220 gen_fn = targetm.gen_oacc_dim_pos;
9221 break;
9222 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9223 name = "__builtin_goacc_parlevel_size";
9224 fallback_retval = const1_rtx;
9225 gen_fn = targetm.gen_oacc_dim_size;
9226 break;
9227 default:
9228 gcc_unreachable ();
9229 }
9230
9231 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
9232 {
9233 error ("%qs only supported in OpenACC code", name);
9234 return const0_rtx;
9235 }
9236
9237 tree arg = CALL_EXPR_ARG (exp, 0);
9238 if (TREE_CODE (arg) != INTEGER_CST)
9239 {
9240 error ("non-constant argument 0 to %qs", name);
9241 return const0_rtx;
9242 }
9243
9244 int dim = TREE_INT_CST_LOW (arg);
9245 switch (dim)
9246 {
9247 case GOMP_DIM_GANG:
9248 case GOMP_DIM_WORKER:
9249 case GOMP_DIM_VECTOR:
9250 break;
9251 default:
9252 error ("illegal argument 0 to %qs", name);
9253 return const0_rtx;
9254 }
9255
9256 if (ignore)
9257 return target;
9258
39bc9f83
TV
9259 if (target == NULL_RTX)
9260 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9261
1f62d637
TV
9262 if (!targetm.have_oacc_dim_size ())
9263 {
9264 emit_move_insn (target, fallback_retval);
9265 return target;
9266 }
9267
9268 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
9269 emit_insn (gen_fn (reg, GEN_INT (dim)));
9270 if (reg != target)
9271 emit_move_insn (target, reg);
9272
9273 return target;
9274}
41dbbb37 9275
10a0e2a9 9276/* Expand a string compare operation using a sequence of char comparison
b2272b13
QZ
9277 to get rid of the calling overhead, with result going to TARGET if
9278 that's convenient.
9279
9280 VAR_STR is the variable string source;
9281 CONST_STR is the constant string source;
9282 LENGTH is the number of chars to compare;
9283 CONST_STR_N indicates which source string is the constant string;
9284 IS_MEMCMP indicates whether it's a memcmp or strcmp.
10a0e2a9 9285
b2272b13
QZ
9286 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
9287
523a59ff
QZ
9288 target = (int) (unsigned char) var_str[0]
9289 - (int) (unsigned char) const_str[0];
b2272b13
QZ
9290 if (target != 0)
9291 goto ne_label;
9292 ...
523a59ff
QZ
9293 target = (int) (unsigned char) var_str[length - 2]
9294 - (int) (unsigned char) const_str[length - 2];
b2272b13
QZ
9295 if (target != 0)
9296 goto ne_label;
523a59ff
QZ
9297 target = (int) (unsigned char) var_str[length - 1]
9298 - (int) (unsigned char) const_str[length - 1];
b2272b13
QZ
9299 ne_label:
9300 */
9301
9302static rtx
10a0e2a9 9303inline_string_cmp (rtx target, tree var_str, const char *const_str,
b2272b13 9304 unsigned HOST_WIDE_INT length,
523a59ff 9305 int const_str_n, machine_mode mode)
b2272b13
QZ
9306{
9307 HOST_WIDE_INT offset = 0;
10a0e2a9 9308 rtx var_rtx_array
b2272b13
QZ
9309 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
9310 rtx var_rtx = NULL_RTX;
10a0e2a9
JJ
9311 rtx const_rtx = NULL_RTX;
9312 rtx result = target ? target : gen_reg_rtx (mode);
9313 rtx_code_label *ne_label = gen_label_rtx ();
523a59ff 9314 tree unit_type_node = unsigned_char_type_node;
10a0e2a9
JJ
9315 scalar_int_mode unit_mode
9316 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
b2272b13
QZ
9317
9318 start_sequence ();
9319
9320 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
9321 {
10a0e2a9 9322 var_rtx
b2272b13 9323 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
10a0e2a9 9324 const_rtx = c_readstr (const_str + offset, unit_mode);
b2272b13
QZ
9325 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
9326 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
10a0e2a9 9327
523a59ff
QZ
9328 op0 = convert_modes (mode, unit_mode, op0, 1);
9329 op1 = convert_modes (mode, unit_mode, op1, 1);
10a0e2a9 9330 result = expand_simple_binop (mode, MINUS, op0, op1,
523a59ff 9331 result, 1, OPTAB_WIDEN);
10a0e2a9
JJ
9332 if (i < length - 1)
9333 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
9334 mode, true, ne_label);
9335 offset += GET_MODE_SIZE (unit_mode);
b2272b13
QZ
9336 }
9337
9338 emit_label (ne_label);
9339 rtx_insn *insns = get_insns ();
9340 end_sequence ();
9341 emit_insn (insns);
9342
9343 return result;
9344}
9345
d5803b98
MS
9346/* Inline expansion of a call to str(n)cmp and memcmp, with result going
9347 to TARGET if that's convenient.
b2272b13 9348 If the call is not been inlined, return NULL_RTX. */
d5803b98 9349
b2272b13 9350static rtx
d5803b98 9351inline_expand_builtin_bytecmp (tree exp, rtx target)
b2272b13
QZ
9352{
9353 tree fndecl = get_callee_fndecl (exp);
9354 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
b2272b13
QZ
9355 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
9356
d5803b98 9357 /* Do NOT apply this inlining expansion when optimizing for size or
3d592d2d
QZ
9358 optimization level below 2. */
9359 if (optimize < 2 || optimize_insn_for_size_p ())
9360 return NULL_RTX;
9361
b2272b13 9362 gcc_checking_assert (fcode == BUILT_IN_STRCMP
10a0e2a9 9363 || fcode == BUILT_IN_STRNCMP
b2272b13
QZ
9364 || fcode == BUILT_IN_MEMCMP);
9365
523a59ff
QZ
9366 /* On a target where the type of the call (int) has same or narrower presicion
9367 than unsigned char, give up the inlining expansion. */
9368 if (TYPE_PRECISION (unsigned_char_type_node)
9369 >= TYPE_PRECISION (TREE_TYPE (exp)))
9370 return NULL_RTX;
9371
b2272b13
QZ
9372 tree arg1 = CALL_EXPR_ARG (exp, 0);
9373 tree arg2 = CALL_EXPR_ARG (exp, 1);
9374 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
9375
9376 unsigned HOST_WIDE_INT len1 = 0;
9377 unsigned HOST_WIDE_INT len2 = 0;
9378 unsigned HOST_WIDE_INT len3 = 0;
9379
d5803b98
MS
9380 /* Get the object representation of the initializers of ARG1 and ARG2
9381 as strings, provided they refer to constant objects, with their byte
9382 sizes in LEN1 and LEN2, respectively. */
866626ef
MS
9383 const char *bytes1 = getbyterep (arg1, &len1);
9384 const char *bytes2 = getbyterep (arg2, &len2);
10a0e2a9 9385
d5803b98
MS
9386 /* Fail if neither argument refers to an initialized constant. */
9387 if (!bytes1 && !bytes2)
b2272b13
QZ
9388 return NULL_RTX;
9389
6aa2e42c
ML
9390 if (is_ncmp)
9391 {
d5803b98 9392 /* Fail if the memcmp/strncmp bound is not a constant. */
6aa2e42c
ML
9393 if (!tree_fits_uhwi_p (len3_tree))
9394 return NULL_RTX;
6aa2e42c 9395
d5803b98 9396 len3 = tree_to_uhwi (len3_tree);
6aa2e42c 9397
d5803b98
MS
9398 if (fcode == BUILT_IN_MEMCMP)
9399 {
9400 /* Fail if the memcmp bound is greater than the size of either
9401 of the two constant objects. */
9402 if ((bytes1 && len1 < len3)
9403 || (bytes2 && len2 < len3))
9404 return NULL_RTX;
9405 }
9406 }
b2272b13 9407
d5803b98
MS
9408 if (fcode != BUILT_IN_MEMCMP)
9409 {
9410 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
9411 and LEN2 to the length of the nul-terminated string stored
9412 in each. */
9413 if (bytes1 != NULL)
9414 len1 = strnlen (bytes1, len1) + 1;
9415 if (bytes2 != NULL)
9416 len2 = strnlen (bytes2, len2) + 1;
9417 }
9418
9419 /* See inline_string_cmp. */
9420 int const_str_n;
b2272b13
QZ
9421 if (!len1)
9422 const_str_n = 2;
9423 else if (!len2)
9424 const_str_n = 1;
9425 else if (len2 > len1)
9426 const_str_n = 1;
9427 else
9428 const_str_n = 2;
9429
d5803b98
MS
9430 /* For strncmp only, compute the new bound as the smallest of
9431 the lengths of the two strings (plus 1) and the bound provided
9432 to the function. */
9433 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
9434 if (is_ncmp && len3 < bound)
9435 bound = len3;
b2272b13 9436
d5803b98 9437 /* If the bound of the comparison is larger than the threshold,
b2272b13 9438 do nothing. */
d5803b98 9439 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
b2272b13
QZ
9440 return NULL_RTX;
9441
9442 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9443
9444 /* Now, start inline expansion the call. */
10a0e2a9 9445 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
d5803b98 9446 (const_str_n == 1) ? bytes1 : bytes2, bound,
523a59ff 9447 const_str_n, mode);
b2272b13
QZ
9448}
9449
425fc685
RE
9450/* Expand a call to __builtin_speculation_safe_value_<N>. MODE
9451 represents the size of the first argument to that call, or VOIDmode
9452 if the argument is a pointer. IGNORE will be true if the result
9453 isn't used. */
9454static rtx
9455expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
9456 bool ignore)
9457{
9458 rtx val, failsafe;
9459 unsigned nargs = call_expr_nargs (exp);
9460
9461 tree arg0 = CALL_EXPR_ARG (exp, 0);
9462
9463 if (mode == VOIDmode)
9464 {
9465 mode = TYPE_MODE (TREE_TYPE (arg0));
9466 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
9467 }
9468
9469 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
9470
9471 /* An optional second argument can be used as a failsafe value on
9472 some machines. If it isn't present, then the failsafe value is
9473 assumed to be 0. */
9474 if (nargs > 1)
9475 {
9476 tree arg1 = CALL_EXPR_ARG (exp, 1);
9477 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
9478 }
9479 else
9480 failsafe = const0_rtx;
9481
9482 /* If the result isn't used, the behavior is undefined. It would be
9483 nice to emit a warning here, but path splitting means this might
9484 happen with legitimate code. So simply drop the builtin
9485 expansion in that case; we've handled any side-effects above. */
9486 if (ignore)
9487 return const0_rtx;
9488
9489 /* If we don't have a suitable target, create one to hold the result. */
9490 if (target == NULL || GET_MODE (target) != mode)
9491 target = gen_reg_rtx (mode);
9492
9493 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
9494 val = convert_modes (mode, VOIDmode, val, false);
9495
9496 return targetm.speculation_safe_value (mode, target, val, failsafe);
9497}
9498
28f4ec01
BS
9499/* Expand an expression EXP that calls a built-in function,
9500 with result going to TARGET if that's convenient
9501 (and in mode MODE if that's convenient).
9502 SUBTARGET may be used as the target for computing one of EXP's operands.
9503 IGNORE is nonzero if the value is to be ignored. */
9504
9505rtx
ef4bddc2 9506expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
4682ae04 9507 int ignore)
28f4ec01 9508{
2f503025 9509 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 9510 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9e3920e9 9511 int flags;
28f4ec01 9512
d51151b2
JJ
9513 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9514 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
9515
bdea98ca
MO
9516 /* When ASan is enabled, we don't want to expand some memory/string
9517 builtins and rely on libsanitizer's hooks. This allows us to avoid
9518 redundant checks and be sure, that possible overflow will be detected
9519 by ASan. */
9520
4d732405 9521 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
bdea98ca
MO
9522 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
9523 return expand_call (exp, target, ignore);
9524
28f4ec01
BS
9525 /* When not optimizing, generate calls to library functions for a certain
9526 set of builtins. */
d25225de 9527 if (!optimize
48ae6c13 9528 && !called_as_built_in (fndecl)
63bf9a90
JH
9529 && fcode != BUILT_IN_FORK
9530 && fcode != BUILT_IN_EXECL
9531 && fcode != BUILT_IN_EXECV
9532 && fcode != BUILT_IN_EXECLP
9533 && fcode != BUILT_IN_EXECLE
9534 && fcode != BUILT_IN_EXECVP
9535 && fcode != BUILT_IN_EXECVE
c05ece92 9536 && fcode != BUILT_IN_CLEAR_CACHE
9e878cf1 9537 && !ALLOCA_FUNCTION_CODE_P (fcode)
31db0fe0 9538 && fcode != BUILT_IN_FREE)
d25225de 9539 return expand_call (exp, target, ignore);
28f4ec01 9540
0a45ec5c
RS
9541 /* The built-in function expanders test for target == const0_rtx
9542 to determine whether the function's result will be ignored. */
9543 if (ignore)
9544 target = const0_rtx;
9545
9546 /* If the result of a pure or const built-in function is ignored, and
9547 none of its arguments are volatile, we can avoid expanding the
9548 built-in call and just evaluate the arguments for side-effects. */
9549 if (target == const0_rtx
9e3920e9
JJ
9550 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
9551 && !(flags & ECF_LOOPING_CONST_OR_PURE))
0a45ec5c
RS
9552 {
9553 bool volatilep = false;
9554 tree arg;
5039610b 9555 call_expr_arg_iterator iter;
0a45ec5c 9556
5039610b
SL
9557 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9558 if (TREE_THIS_VOLATILE (arg))
0a45ec5c
RS
9559 {
9560 volatilep = true;
9561 break;
9562 }
9563
9564 if (! volatilep)
9565 {
5039610b
SL
9566 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9567 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a45ec5c
RS
9568 return const0_rtx;
9569 }
9570 }
9571
28f4ec01
BS
9572 switch (fcode)
9573 {
ea6a6627 9574 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 9575 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
9576 case BUILT_IN_FABSD32:
9577 case BUILT_IN_FABSD64:
9578 case BUILT_IN_FABSD128:
5039610b 9579 target = expand_builtin_fabs (exp, target, subtarget);
075ec276 9580 if (target)
c22cacf3 9581 return target;
075ec276
RS
9582 break;
9583
ea6a6627 9584 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6dc198e3 9585 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5039610b 9586 target = expand_builtin_copysign (exp, target, subtarget);
046625fa
RH
9587 if (target)
9588 return target;
9589 break;
9590
5906d013
EC
9591 /* Just do a normal library call if we were unable to fold
9592 the values. */
ea6a6627 9593 CASE_FLT_FN (BUILT_IN_CABS):
075ec276 9594 break;
28f4ec01 9595
1b1562a5 9596 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 9597 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
9598 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
9599 if (target)
9600 return target;
9601 break;
9602
eaee4464
UB
9603 CASE_FLT_FN (BUILT_IN_ILOGB):
9604 if (! flag_unsafe_math_optimizations)
9605 break;
903c723b
TC
9606 gcc_fallthrough ();
9607 CASE_FLT_FN (BUILT_IN_ISINF):
9608 CASE_FLT_FN (BUILT_IN_FINITE):
9609 case BUILT_IN_ISFINITE:
9610 case BUILT_IN_ISNORMAL:
4359dc2a 9611 target = expand_builtin_interclass_mathfn (exp, target);
eaee4464
UB
9612 if (target)
9613 return target;
9614 break;
9615
6c32ee74 9616 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
9617 CASE_FLT_FN (BUILT_IN_LCEIL):
9618 CASE_FLT_FN (BUILT_IN_LLCEIL):
9619 CASE_FLT_FN (BUILT_IN_LFLOOR):
6c32ee74 9620 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627 9621 CASE_FLT_FN (BUILT_IN_LLFLOOR):
1856c8dc 9622 target = expand_builtin_int_roundingfn (exp, target);
d8b42d06
UB
9623 if (target)
9624 return target;
9625 break;
9626
6c32ee74 9627 CASE_FLT_FN (BUILT_IN_IRINT):
0bfa1541
RG
9628 CASE_FLT_FN (BUILT_IN_LRINT):
9629 CASE_FLT_FN (BUILT_IN_LLRINT):
6c32ee74 9630 CASE_FLT_FN (BUILT_IN_IROUND):
4d81bf84
RG
9631 CASE_FLT_FN (BUILT_IN_LROUND):
9632 CASE_FLT_FN (BUILT_IN_LLROUND):
1856c8dc 9633 target = expand_builtin_int_roundingfn_2 (exp, target);
0bfa1541
RG
9634 if (target)
9635 return target;
9636 break;
9637
ea6a6627 9638 CASE_FLT_FN (BUILT_IN_POWI):
4359dc2a 9639 target = expand_builtin_powi (exp, target);
17684d46
RG
9640 if (target)
9641 return target;
9642 break;
9643
75c7c595 9644 CASE_FLT_FN (BUILT_IN_CEXPI):
4359dc2a 9645 target = expand_builtin_cexpi (exp, target);
75c7c595
RG
9646 gcc_assert (target);
9647 return target;
9648
ea6a6627
VR
9649 CASE_FLT_FN (BUILT_IN_SIN):
9650 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
9651 if (! flag_unsafe_math_optimizations)
9652 break;
9653 target = expand_builtin_mathfn_3 (exp, target, subtarget);
9654 if (target)
9655 return target;
9656 break;
9657
403e54f0
RG
9658 CASE_FLT_FN (BUILT_IN_SINCOS):
9659 if (! flag_unsafe_math_optimizations)
9660 break;
9661 target = expand_builtin_sincos (exp);
9662 if (target)
9663 return target;
9664 break;
9665
28f4ec01
BS
9666 case BUILT_IN_APPLY_ARGS:
9667 return expand_builtin_apply_args ();
9668
9669 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
9670 FUNCTION with a copy of the parameters described by
9671 ARGUMENTS, and ARGSIZE. It returns a block of memory
9672 allocated on the stack into which is stored all the registers
9673 that might possibly be used for returning the result of a
9674 function. ARGUMENTS is the value returned by
9675 __builtin_apply_args. ARGSIZE is the number of bytes of
9676 arguments that must be copied. ??? How should this value be
9677 computed? We'll also need a safe worst case value for varargs
9678 functions. */
9679 case BUILT_IN_APPLY:
5039610b 9680 if (!validate_arglist (exp, POINTER_TYPE,
019fa094 9681 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5039610b 9682 && !validate_arglist (exp, REFERENCE_TYPE,
019fa094 9683 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01
BS
9684 return const0_rtx;
9685 else
9686 {
28f4ec01
BS
9687 rtx ops[3];
9688
5039610b
SL
9689 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
9690 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
9691 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
28f4ec01
BS
9692
9693 return expand_builtin_apply (ops[0], ops[1], ops[2]);
9694 }
9695
9696 /* __builtin_return (RESULT) causes the function to return the
9697 value described by RESULT. RESULT is address of the block of
9698 memory returned by __builtin_apply. */
9699 case BUILT_IN_RETURN:
5039610b
SL
9700 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9701 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
28f4ec01
BS
9702 return const0_rtx;
9703
9704 case BUILT_IN_SAVEREGS:
d3707adb 9705 return expand_builtin_saveregs ();
28f4ec01 9706
6ef5231b
JJ
9707 case BUILT_IN_VA_ARG_PACK:
9708 /* All valid uses of __builtin_va_arg_pack () are removed during
9709 inlining. */
c94ed7a1 9710 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6ef5231b
JJ
9711 return const0_rtx;
9712
ab0e176c
JJ
9713 case BUILT_IN_VA_ARG_PACK_LEN:
9714 /* All valid uses of __builtin_va_arg_pack_len () are removed during
9715 inlining. */
c94ed7a1 9716 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
ab0e176c
JJ
9717 return const0_rtx;
9718
28f4ec01
BS
9719 /* Return the address of the first anonymous stack arg. */
9720 case BUILT_IN_NEXT_ARG:
5039610b 9721 if (fold_builtin_next_arg (exp, false))
c22cacf3 9722 return const0_rtx;
8870e212 9723 return expand_builtin_next_arg ();
28f4ec01 9724
677feb77 9725 case BUILT_IN_CLEAR_CACHE:
c05ece92
AO
9726 expand_builtin___clear_cache (exp);
9727 return const0_rtx;
677feb77 9728
28f4ec01 9729 case BUILT_IN_CLASSIFY_TYPE:
5039610b 9730 return expand_builtin_classify_type (exp);
28f4ec01
BS
9731
9732 case BUILT_IN_CONSTANT_P:
6de9cd9a 9733 return const0_rtx;
28f4ec01
BS
9734
9735 case BUILT_IN_FRAME_ADDRESS:
9736 case BUILT_IN_RETURN_ADDRESS:
5039610b 9737 return expand_builtin_frame_address (fndecl, exp);
28f4ec01
BS
9738
9739 /* Returns the address of the area where the structure is returned.
9740 0 otherwise. */
9741 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5039610b 9742 if (call_expr_nargs (exp) != 0
ca7fd9cd 9743 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3c0cb5de 9744 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
ca7fd9cd 9745 return const0_rtx;
28f4ec01 9746 else
ca7fd9cd 9747 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
28f4ec01 9748
9e878cf1 9749 CASE_BUILT_IN_ALLOCA:
b7e52782 9750 target = expand_builtin_alloca (exp);
28f4ec01
BS
9751 if (target)
9752 return target;
9753 break;
9754
e3174bdf
MO
9755 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
9756 return expand_asan_emit_allocas_unpoison (exp);
9757
6de9cd9a
DN
9758 case BUILT_IN_STACK_SAVE:
9759 return expand_stack_save ();
9760
9761 case BUILT_IN_STACK_RESTORE:
5039610b 9762 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6de9cd9a
DN
9763 return const0_rtx;
9764
ac868f29 9765 case BUILT_IN_BSWAP16:
167fa32c
EC
9766 case BUILT_IN_BSWAP32:
9767 case BUILT_IN_BSWAP64:
fe7ebef7 9768 case BUILT_IN_BSWAP128:
ac868f29 9769 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
167fa32c
EC
9770 if (target)
9771 return target;
9772 break;
9773
ea6a6627 9774 CASE_INT_FN (BUILT_IN_FFS):
5039610b 9775 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 9776 subtarget, ffs_optab);
2928cd7a
RH
9777 if (target)
9778 return target;
9779 break;
9780
ea6a6627 9781 CASE_INT_FN (BUILT_IN_CLZ):
5039610b 9782 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 9783 subtarget, clz_optab);
2928cd7a
RH
9784 if (target)
9785 return target;
9786 break;
9787
ea6a6627 9788 CASE_INT_FN (BUILT_IN_CTZ):
5039610b 9789 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 9790 subtarget, ctz_optab);
2928cd7a
RH
9791 if (target)
9792 return target;
9793 break;
9794
3801c801 9795 CASE_INT_FN (BUILT_IN_CLRSB):
3801c801
BS
9796 target = expand_builtin_unop (target_mode, exp, target,
9797 subtarget, clrsb_optab);
9798 if (target)
9799 return target;
9800 break;
9801
ea6a6627 9802 CASE_INT_FN (BUILT_IN_POPCOUNT):
5039610b 9803 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 9804 subtarget, popcount_optab);
2928cd7a
RH
9805 if (target)
9806 return target;
9807 break;
9808
ea6a6627 9809 CASE_INT_FN (BUILT_IN_PARITY):
5039610b 9810 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 9811 subtarget, parity_optab);
28f4ec01
BS
9812 if (target)
9813 return target;
9814 break;
9815
9816 case BUILT_IN_STRLEN:
5039610b 9817 target = expand_builtin_strlen (exp, target, target_mode);
28f4ec01
BS
9818 if (target)
9819 return target;
9820 break;
9821
781ff3d8
MS
9822 case BUILT_IN_STRNLEN:
9823 target = expand_builtin_strnlen (exp, target, target_mode);
9824 if (target)
9825 return target;
9826 break;
9827
ee92e7ba 9828 case BUILT_IN_STRCAT:
b5338fb3 9829 target = expand_builtin_strcat (exp);
ee92e7ba
MS
9830 if (target)
9831 return target;
9832 break;
9833
b5338fb3
MS
9834 case BUILT_IN_GETTEXT:
9835 case BUILT_IN_PUTS:
9836 case BUILT_IN_PUTS_UNLOCKED:
9837 case BUILT_IN_STRDUP:
9838 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
d14c547a 9839 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
b5338fb3
MS
9840 break;
9841
9842 case BUILT_IN_INDEX:
9843 case BUILT_IN_RINDEX:
9844 case BUILT_IN_STRCHR:
9845 case BUILT_IN_STRRCHR:
9846 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
d14c547a 9847 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
b5338fb3
MS
9848 break;
9849
9850 case BUILT_IN_FPUTS:
9851 case BUILT_IN_FPUTS_UNLOCKED:
9852 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
d14c547a 9853 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
b5338fb3
MS
9854 break;
9855
9856 case BUILT_IN_STRNDUP:
9857 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
d14c547a 9858 check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1));
b5338fb3
MS
9859 break;
9860
9861 case BUILT_IN_STRCASECMP:
d14c547a
MS
9862 case BUILT_IN_STRPBRK:
9863 case BUILT_IN_STRSPN:
9864 case BUILT_IN_STRCSPN:
b5338fb3
MS
9865 case BUILT_IN_STRSTR:
9866 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
9867 {
d14c547a
MS
9868 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
9869 check_read_access (exp, CALL_EXPR_ARG (exp, 1));
b5338fb3
MS
9870 }
9871 break;
9872
28f4ec01 9873 case BUILT_IN_STRCPY:
44e10129 9874 target = expand_builtin_strcpy (exp, target);
28f4ec01
BS
9875 if (target)
9876 return target;
9877 break;
8d51ecf8 9878
ee92e7ba
MS
9879 case BUILT_IN_STRNCAT:
9880 target = expand_builtin_strncat (exp, target);
9881 if (target)
9882 return target;
9883 break;
9884
da9e9f08 9885 case BUILT_IN_STRNCPY:
44e10129 9886 target = expand_builtin_strncpy (exp, target);
da9e9f08
KG
9887 if (target)
9888 return target;
9889 break;
8d51ecf8 9890
9cb65f92 9891 case BUILT_IN_STPCPY:
609ae0e2 9892 target = expand_builtin_stpcpy (exp, target, mode);
9cb65f92
KG
9893 if (target)
9894 return target;
9895 break;
9896
e50d56a5
MS
9897 case BUILT_IN_STPNCPY:
9898 target = expand_builtin_stpncpy (exp, target);
9899 if (target)
9900 return target;
9901 break;
9902
d9c5a8b9
MS
9903 case BUILT_IN_MEMCHR:
9904 target = expand_builtin_memchr (exp, target);
9905 if (target)
9906 return target;
9907 break;
9908
28f4ec01 9909 case BUILT_IN_MEMCPY:
44e10129 9910 target = expand_builtin_memcpy (exp, target);
9cb65f92
KG
9911 if (target)
9912 return target;
9913 break;
9914
e50d56a5
MS
9915 case BUILT_IN_MEMMOVE:
9916 target = expand_builtin_memmove (exp, target);
9917 if (target)
9918 return target;
9919 break;
9920
9cb65f92 9921 case BUILT_IN_MEMPCPY:
671a00ee 9922 target = expand_builtin_mempcpy (exp, target);
28f4ec01
BS
9923 if (target)
9924 return target;
9925 break;
9926
9927 case BUILT_IN_MEMSET:
5039610b 9928 target = expand_builtin_memset (exp, target, mode);
28f4ec01
BS
9929 if (target)
9930 return target;
9931 break;
9932
e3a709be 9933 case BUILT_IN_BZERO:
8148fe65 9934 target = expand_builtin_bzero (exp);
e3a709be
KG
9935 if (target)
9936 return target;
9937 break;
9938
10a0e2a9 9939 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8b0b334a
QZ
9940 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
9941 when changing it to a strcmp call. */
9942 case BUILT_IN_STRCMP_EQ:
9943 target = expand_builtin_memcmp (exp, target, true);
9944 if (target)
9945 return target;
9946
9947 /* Change this call back to a BUILT_IN_STRCMP. */
10a0e2a9 9948 TREE_OPERAND (exp, 1)
8b0b334a
QZ
9949 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
9950
9951 /* Delete the last parameter. */
9952 unsigned int i;
9953 vec<tree, va_gc> *arg_vec;
9954 vec_alloc (arg_vec, 2);
9955 for (i = 0; i < 2; i++)
9956 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
9957 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
9958 /* FALLTHROUGH */
9959
28f4ec01 9960 case BUILT_IN_STRCMP:
44e10129 9961 target = expand_builtin_strcmp (exp, target);
28f4ec01
BS
9962 if (target)
9963 return target;
9964 break;
9965
8b0b334a
QZ
9966 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
9967 back to a BUILT_IN_STRNCMP. */
9968 case BUILT_IN_STRNCMP_EQ:
9969 target = expand_builtin_memcmp (exp, target, true);
9970 if (target)
9971 return target;
9972
9973 /* Change it back to a BUILT_IN_STRNCMP. */
10a0e2a9 9974 TREE_OPERAND (exp, 1)
8b0b334a
QZ
9975 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
9976 /* FALLTHROUGH */
9977
da9e9f08
KG
9978 case BUILT_IN_STRNCMP:
9979 target = expand_builtin_strncmp (exp, target, mode);
9980 if (target)
9981 return target;
9982 break;
9983
4b2a62db 9984 case BUILT_IN_BCMP:
28f4ec01 9985 case BUILT_IN_MEMCMP:
36b85e43
BS
9986 case BUILT_IN_MEMCMP_EQ:
9987 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
28f4ec01
BS
9988 if (target)
9989 return target;
36b85e43
BS
9990 if (fcode == BUILT_IN_MEMCMP_EQ)
9991 {
9992 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
9993 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
9994 }
28f4ec01 9995 break;
28f4ec01
BS
9996
9997 case BUILT_IN_SETJMP:
903c723b 9998 /* This should have been lowered to the builtins below. */
4f6c2131
EB
9999 gcc_unreachable ();
10000
10001 case BUILT_IN_SETJMP_SETUP:
10002 /* __builtin_setjmp_setup is passed a pointer to an array of five words
10003 and the receiver label. */
5039610b 10004 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4f6c2131 10005 {
5039610b 10006 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4f6c2131 10007 VOIDmode, EXPAND_NORMAL);
5039610b 10008 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
e67d1102 10009 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
10010
10011 /* This is copied from the handling of non-local gotos. */
10012 expand_builtin_setjmp_setup (buf_addr, label_r);
10013 nonlocal_goto_handler_labels
b5241a5a 10014 = gen_rtx_INSN_LIST (VOIDmode, label_r,
4f6c2131
EB
10015 nonlocal_goto_handler_labels);
10016 /* ??? Do not let expand_label treat us as such since we would
10017 not want to be both on the list of non-local labels and on
10018 the list of forced labels. */
10019 FORCED_LABEL (label) = 0;
10020 return const0_rtx;
10021 }
10022 break;
10023
4f6c2131
EB
10024 case BUILT_IN_SETJMP_RECEIVER:
10025 /* __builtin_setjmp_receiver is passed the receiver label. */
5039610b 10026 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4f6c2131 10027 {
5039610b 10028 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
e67d1102 10029 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
10030
10031 expand_builtin_setjmp_receiver (label_r);
10032 return const0_rtx;
10033 }
250d07b6 10034 break;
28f4ec01
BS
10035
10036 /* __builtin_longjmp is passed a pointer to an array of five words.
10037 It's similar to the C library longjmp function but works with
10038 __builtin_setjmp above. */
10039 case BUILT_IN_LONGJMP:
5039610b 10040 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01 10041 {
5039610b 10042 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
84217346 10043 VOIDmode, EXPAND_NORMAL);
5039610b 10044 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
28f4ec01
BS
10045
10046 if (value != const1_rtx)
10047 {
9e637a26 10048 error ("%<__builtin_longjmp%> second argument must be 1");
28f4ec01
BS
10049 return const0_rtx;
10050 }
10051
10052 expand_builtin_longjmp (buf_addr, value);
10053 return const0_rtx;
10054 }
4f6c2131 10055 break;
28f4ec01 10056
6de9cd9a 10057 case BUILT_IN_NONLOCAL_GOTO:
5039610b 10058 target = expand_builtin_nonlocal_goto (exp);
6de9cd9a
DN
10059 if (target)
10060 return target;
10061 break;
10062
2b92e7f5
RK
10063 /* This updates the setjmp buffer that is its argument with the value
10064 of the current stack pointer. */
10065 case BUILT_IN_UPDATE_SETJMP_BUF:
5039610b 10066 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2b92e7f5
RK
10067 {
10068 rtx buf_addr
5039610b 10069 = expand_normal (CALL_EXPR_ARG (exp, 0));
2b92e7f5
RK
10070
10071 expand_builtin_update_setjmp_buf (buf_addr);
10072 return const0_rtx;
10073 }
10074 break;
10075
28f4ec01 10076 case BUILT_IN_TRAP:
9602f5a0 10077 expand_builtin_trap ();
28f4ec01
BS
10078 return const0_rtx;
10079
468059bc
DD
10080 case BUILT_IN_UNREACHABLE:
10081 expand_builtin_unreachable ();
10082 return const0_rtx;
10083
ea6a6627 10084 CASE_FLT_FN (BUILT_IN_SIGNBIT):
44aea9ac
JJ
10085 case BUILT_IN_SIGNBITD32:
10086 case BUILT_IN_SIGNBITD64:
10087 case BUILT_IN_SIGNBITD128:
ef79730c
RS
10088 target = expand_builtin_signbit (exp, target);
10089 if (target)
10090 return target;
10091 break;
10092
28f4ec01
BS
10093 /* Various hooks for the DWARF 2 __throw routine. */
10094 case BUILT_IN_UNWIND_INIT:
10095 expand_builtin_unwind_init ();
10096 return const0_rtx;
10097 case BUILT_IN_DWARF_CFA:
10098 return virtual_cfa_rtx;
10099#ifdef DWARF2_UNWIND_INFO
9c80ff25
RH
10100 case BUILT_IN_DWARF_SP_COLUMN:
10101 return expand_builtin_dwarf_sp_column ();
d9d5c9de 10102 case BUILT_IN_INIT_DWARF_REG_SIZES:
5039610b 10103 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
d9d5c9de 10104 return const0_rtx;
28f4ec01
BS
10105#endif
10106 case BUILT_IN_FROB_RETURN_ADDR:
5039610b 10107 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 10108 case BUILT_IN_EXTRACT_RETURN_ADDR:
5039610b 10109 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 10110 case BUILT_IN_EH_RETURN:
5039610b
SL
10111 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
10112 CALL_EXPR_ARG (exp, 1));
28f4ec01 10113 return const0_rtx;
52a11cbf 10114 case BUILT_IN_EH_RETURN_DATA_REGNO:
5039610b 10115 return expand_builtin_eh_return_data_regno (exp);
c76362b4 10116 case BUILT_IN_EXTEND_POINTER:
5039610b 10117 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
1d65f45c
RH
10118 case BUILT_IN_EH_POINTER:
10119 return expand_builtin_eh_pointer (exp);
10120 case BUILT_IN_EH_FILTER:
10121 return expand_builtin_eh_filter (exp);
10122 case BUILT_IN_EH_COPY_VALUES:
10123 return expand_builtin_eh_copy_values (exp);
c76362b4 10124
6c535c69 10125 case BUILT_IN_VA_START:
5039610b 10126 return expand_builtin_va_start (exp);
d3707adb 10127 case BUILT_IN_VA_END:
5039610b 10128 return expand_builtin_va_end (exp);
d3707adb 10129 case BUILT_IN_VA_COPY:
5039610b 10130 return expand_builtin_va_copy (exp);
994a57cd 10131 case BUILT_IN_EXPECT:
5039610b 10132 return expand_builtin_expect (exp, target);
1e9168b2
ML
10133 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10134 return expand_builtin_expect_with_probability (exp, target);
45d439ac
JJ
10135 case BUILT_IN_ASSUME_ALIGNED:
10136 return expand_builtin_assume_aligned (exp, target);
a9ccbb60 10137 case BUILT_IN_PREFETCH:
5039610b 10138 expand_builtin_prefetch (exp);
a9ccbb60
JJ
10139 return const0_rtx;
10140
6de9cd9a 10141 case BUILT_IN_INIT_TRAMPOLINE:
183dd130
ILT
10142 return expand_builtin_init_trampoline (exp, true);
10143 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
10144 return expand_builtin_init_trampoline (exp, false);
6de9cd9a 10145 case BUILT_IN_ADJUST_TRAMPOLINE:
5039610b 10146 return expand_builtin_adjust_trampoline (exp);
6de9cd9a 10147
4c640e26
EB
10148 case BUILT_IN_INIT_DESCRIPTOR:
10149 return expand_builtin_init_descriptor (exp);
10150 case BUILT_IN_ADJUST_DESCRIPTOR:
10151 return expand_builtin_adjust_descriptor (exp);
10152
d1c38823
ZD
10153 case BUILT_IN_FORK:
10154 case BUILT_IN_EXECL:
10155 case BUILT_IN_EXECV:
10156 case BUILT_IN_EXECLP:
10157 case BUILT_IN_EXECLE:
10158 case BUILT_IN_EXECVP:
10159 case BUILT_IN_EXECVE:
5039610b 10160 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
d1c38823
ZD
10161 if (target)
10162 return target;
10163 break;
28f4ec01 10164
e0a8ecf2
AM
10165 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
10166 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
10167 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
10168 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
10169 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
10170 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
86951993 10171 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
48ae6c13
RH
10172 if (target)
10173 return target;
10174 break;
10175
e0a8ecf2
AM
10176 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
10177 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
10178 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
10179 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
10180 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
10181 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
86951993 10182 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
48ae6c13
RH
10183 if (target)
10184 return target;
10185 break;
10186
e0a8ecf2
AM
10187 case BUILT_IN_SYNC_FETCH_AND_OR_1:
10188 case BUILT_IN_SYNC_FETCH_AND_OR_2:
10189 case BUILT_IN_SYNC_FETCH_AND_OR_4:
10190 case BUILT_IN_SYNC_FETCH_AND_OR_8:
10191 case BUILT_IN_SYNC_FETCH_AND_OR_16:
10192 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
86951993 10193 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
48ae6c13
RH
10194 if (target)
10195 return target;
10196 break;
10197
e0a8ecf2
AM
10198 case BUILT_IN_SYNC_FETCH_AND_AND_1:
10199 case BUILT_IN_SYNC_FETCH_AND_AND_2:
10200 case BUILT_IN_SYNC_FETCH_AND_AND_4:
10201 case BUILT_IN_SYNC_FETCH_AND_AND_8:
10202 case BUILT_IN_SYNC_FETCH_AND_AND_16:
10203 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
86951993 10204 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
48ae6c13
RH
10205 if (target)
10206 return target;
10207 break;
10208
e0a8ecf2
AM
10209 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
10210 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
10211 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
10212 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
10213 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
10214 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
86951993 10215 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
48ae6c13
RH
10216 if (target)
10217 return target;
10218 break;
10219
e0a8ecf2
AM
10220 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
10221 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
10222 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
10223 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
10224 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
10225 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
86951993 10226 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
48ae6c13
RH
10227 if (target)
10228 return target;
10229 break;
10230
e0a8ecf2
AM
10231 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
10232 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
10233 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
10234 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
10235 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
10236 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
86951993 10237 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
48ae6c13
RH
10238 if (target)
10239 return target;
10240 break;
10241
e0a8ecf2
AM
10242 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
10243 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
10244 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
10245 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
10246 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
10247 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
86951993 10248 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
48ae6c13
RH
10249 if (target)
10250 return target;
10251 break;
10252
e0a8ecf2
AM
10253 case BUILT_IN_SYNC_OR_AND_FETCH_1:
10254 case BUILT_IN_SYNC_OR_AND_FETCH_2:
10255 case BUILT_IN_SYNC_OR_AND_FETCH_4:
10256 case BUILT_IN_SYNC_OR_AND_FETCH_8:
10257 case BUILT_IN_SYNC_OR_AND_FETCH_16:
10258 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
86951993 10259 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
48ae6c13
RH
10260 if (target)
10261 return target;
10262 break;
10263
e0a8ecf2
AM
10264 case BUILT_IN_SYNC_AND_AND_FETCH_1:
10265 case BUILT_IN_SYNC_AND_AND_FETCH_2:
10266 case BUILT_IN_SYNC_AND_AND_FETCH_4:
10267 case BUILT_IN_SYNC_AND_AND_FETCH_8:
10268 case BUILT_IN_SYNC_AND_AND_FETCH_16:
10269 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
86951993 10270 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
48ae6c13
RH
10271 if (target)
10272 return target;
10273 break;
10274
e0a8ecf2
AM
10275 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
10276 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
10277 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
10278 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
10279 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
10280 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
86951993 10281 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
48ae6c13
RH
10282 if (target)
10283 return target;
10284 break;
10285
e0a8ecf2
AM
10286 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
10287 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
10288 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
10289 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
10290 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
10291 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
86951993 10292 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
48ae6c13
RH
10293 if (target)
10294 return target;
10295 break;
10296
e0a8ecf2
AM
10297 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
10298 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
10299 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
10300 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
10301 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
5b5513d0
RH
10302 if (mode == VOIDmode)
10303 mode = TYPE_MODE (boolean_type_node);
48ae6c13
RH
10304 if (!target || !register_operand (target, mode))
10305 target = gen_reg_rtx (mode);
02ee605c 10306
e0a8ecf2
AM
10307 mode = get_builtin_sync_mode
10308 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
5039610b 10309 target = expand_builtin_compare_and_swap (mode, exp, true, target);
48ae6c13
RH
10310 if (target)
10311 return target;
10312 break;
10313
e0a8ecf2
AM
10314 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
10315 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
10316 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
10317 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
10318 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
10319 mode = get_builtin_sync_mode
10320 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
5039610b 10321 target = expand_builtin_compare_and_swap (mode, exp, false, target);
48ae6c13
RH
10322 if (target)
10323 return target;
10324 break;
10325
e0a8ecf2
AM
10326 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
10327 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
10328 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
10329 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
10330 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
10331 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
10332 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
48ae6c13
RH
10333 if (target)
10334 return target;
10335 break;
10336
e0a8ecf2
AM
10337 case BUILT_IN_SYNC_LOCK_RELEASE_1:
10338 case BUILT_IN_SYNC_LOCK_RELEASE_2:
10339 case BUILT_IN_SYNC_LOCK_RELEASE_4:
10340 case BUILT_IN_SYNC_LOCK_RELEASE_8:
10341 case BUILT_IN_SYNC_LOCK_RELEASE_16:
10342 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
10343 expand_builtin_sync_lock_release (mode, exp);
48ae6c13
RH
10344 return const0_rtx;
10345
e0a8ecf2
AM
10346 case BUILT_IN_SYNC_SYNCHRONIZE:
10347 expand_builtin_sync_synchronize ();
48ae6c13
RH
10348 return const0_rtx;
10349
86951993
AM
10350 case BUILT_IN_ATOMIC_EXCHANGE_1:
10351 case BUILT_IN_ATOMIC_EXCHANGE_2:
10352 case BUILT_IN_ATOMIC_EXCHANGE_4:
10353 case BUILT_IN_ATOMIC_EXCHANGE_8:
10354 case BUILT_IN_ATOMIC_EXCHANGE_16:
10355 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
10356 target = expand_builtin_atomic_exchange (mode, exp, target);
10357 if (target)
10358 return target;
10359 break;
10360
10361 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
10362 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
10363 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
10364 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
10365 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
e351ae85
AM
10366 {
10367 unsigned int nargs, z;
9771b263 10368 vec<tree, va_gc> *vec;
e351ae85
AM
10369
10370 mode =
10371 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
10372 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
10373 if (target)
10374 return target;
10375
10376 /* If this is turned into an external library call, the weak parameter
10377 must be dropped to match the expected parameter list. */
10378 nargs = call_expr_nargs (exp);
9771b263 10379 vec_alloc (vec, nargs - 1);
e351ae85 10380 for (z = 0; z < 3; z++)
9771b263 10381 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
10382 /* Skip the boolean weak parameter. */
10383 for (z = 4; z < 6; z++)
9771b263 10384 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
10385 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
10386 break;
10387 }
86951993
AM
10388
10389 case BUILT_IN_ATOMIC_LOAD_1:
10390 case BUILT_IN_ATOMIC_LOAD_2:
10391 case BUILT_IN_ATOMIC_LOAD_4:
10392 case BUILT_IN_ATOMIC_LOAD_8:
10393 case BUILT_IN_ATOMIC_LOAD_16:
10394 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
10395 target = expand_builtin_atomic_load (mode, exp, target);
10396 if (target)
10397 return target;
10398 break;
10399
10400 case BUILT_IN_ATOMIC_STORE_1:
10401 case BUILT_IN_ATOMIC_STORE_2:
10402 case BUILT_IN_ATOMIC_STORE_4:
10403 case BUILT_IN_ATOMIC_STORE_8:
10404 case BUILT_IN_ATOMIC_STORE_16:
10405 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
10406 target = expand_builtin_atomic_store (mode, exp);
10407 if (target)
10408 return const0_rtx;
10409 break;
10410
10411 case BUILT_IN_ATOMIC_ADD_FETCH_1:
10412 case BUILT_IN_ATOMIC_ADD_FETCH_2:
10413 case BUILT_IN_ATOMIC_ADD_FETCH_4:
10414 case BUILT_IN_ATOMIC_ADD_FETCH_8:
10415 case BUILT_IN_ATOMIC_ADD_FETCH_16:
10416 {
10417 enum built_in_function lib;
10418 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
10419 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
10420 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
10421 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
10422 ignore, lib);
10423 if (target)
10424 return target;
10425 break;
10426 }
10427 case BUILT_IN_ATOMIC_SUB_FETCH_1:
10428 case BUILT_IN_ATOMIC_SUB_FETCH_2:
10429 case BUILT_IN_ATOMIC_SUB_FETCH_4:
10430 case BUILT_IN_ATOMIC_SUB_FETCH_8:
10431 case BUILT_IN_ATOMIC_SUB_FETCH_16:
10432 {
10433 enum built_in_function lib;
10434 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
10435 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
10436 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
10437 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
10438 ignore, lib);
10439 if (target)
10440 return target;
10441 break;
10442 }
10443 case BUILT_IN_ATOMIC_AND_FETCH_1:
10444 case BUILT_IN_ATOMIC_AND_FETCH_2:
10445 case BUILT_IN_ATOMIC_AND_FETCH_4:
10446 case BUILT_IN_ATOMIC_AND_FETCH_8:
10447 case BUILT_IN_ATOMIC_AND_FETCH_16:
10448 {
10449 enum built_in_function lib;
10450 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
10451 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
10452 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
10453 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
10454 ignore, lib);
10455 if (target)
10456 return target;
10457 break;
10458 }
10459 case BUILT_IN_ATOMIC_NAND_FETCH_1:
10460 case BUILT_IN_ATOMIC_NAND_FETCH_2:
10461 case BUILT_IN_ATOMIC_NAND_FETCH_4:
10462 case BUILT_IN_ATOMIC_NAND_FETCH_8:
10463 case BUILT_IN_ATOMIC_NAND_FETCH_16:
10464 {
10465 enum built_in_function lib;
10466 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
10467 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
10468 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
10469 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
10470 ignore, lib);
10471 if (target)
10472 return target;
10473 break;
10474 }
10475 case BUILT_IN_ATOMIC_XOR_FETCH_1:
10476 case BUILT_IN_ATOMIC_XOR_FETCH_2:
10477 case BUILT_IN_ATOMIC_XOR_FETCH_4:
10478 case BUILT_IN_ATOMIC_XOR_FETCH_8:
10479 case BUILT_IN_ATOMIC_XOR_FETCH_16:
10480 {
10481 enum built_in_function lib;
10482 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
10483 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
10484 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
10485 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
10486 ignore, lib);
10487 if (target)
10488 return target;
10489 break;
10490 }
10491 case BUILT_IN_ATOMIC_OR_FETCH_1:
10492 case BUILT_IN_ATOMIC_OR_FETCH_2:
10493 case BUILT_IN_ATOMIC_OR_FETCH_4:
10494 case BUILT_IN_ATOMIC_OR_FETCH_8:
10495 case BUILT_IN_ATOMIC_OR_FETCH_16:
10496 {
10497 enum built_in_function lib;
10498 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
10499 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
10500 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
10501 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
10502 ignore, lib);
10503 if (target)
10504 return target;
10505 break;
10506 }
10507 case BUILT_IN_ATOMIC_FETCH_ADD_1:
10508 case BUILT_IN_ATOMIC_FETCH_ADD_2:
10509 case BUILT_IN_ATOMIC_FETCH_ADD_4:
10510 case BUILT_IN_ATOMIC_FETCH_ADD_8:
10511 case BUILT_IN_ATOMIC_FETCH_ADD_16:
10512 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
10513 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
10514 ignore, BUILT_IN_NONE);
10515 if (target)
10516 return target;
10517 break;
10518
10519 case BUILT_IN_ATOMIC_FETCH_SUB_1:
10520 case BUILT_IN_ATOMIC_FETCH_SUB_2:
10521 case BUILT_IN_ATOMIC_FETCH_SUB_4:
10522 case BUILT_IN_ATOMIC_FETCH_SUB_8:
10523 case BUILT_IN_ATOMIC_FETCH_SUB_16:
10524 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
10525 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
10526 ignore, BUILT_IN_NONE);
10527 if (target)
10528 return target;
10529 break;
10530
10531 case BUILT_IN_ATOMIC_FETCH_AND_1:
10532 case BUILT_IN_ATOMIC_FETCH_AND_2:
10533 case BUILT_IN_ATOMIC_FETCH_AND_4:
10534 case BUILT_IN_ATOMIC_FETCH_AND_8:
10535 case BUILT_IN_ATOMIC_FETCH_AND_16:
10536 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
10537 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
10538 ignore, BUILT_IN_NONE);
10539 if (target)
10540 return target;
10541 break;
10542
10543 case BUILT_IN_ATOMIC_FETCH_NAND_1:
10544 case BUILT_IN_ATOMIC_FETCH_NAND_2:
10545 case BUILT_IN_ATOMIC_FETCH_NAND_4:
10546 case BUILT_IN_ATOMIC_FETCH_NAND_8:
10547 case BUILT_IN_ATOMIC_FETCH_NAND_16:
10548 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
10549 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
10550 ignore, BUILT_IN_NONE);
10551 if (target)
10552 return target;
10553 break;
10554
10555 case BUILT_IN_ATOMIC_FETCH_XOR_1:
10556 case BUILT_IN_ATOMIC_FETCH_XOR_2:
10557 case BUILT_IN_ATOMIC_FETCH_XOR_4:
10558 case BUILT_IN_ATOMIC_FETCH_XOR_8:
10559 case BUILT_IN_ATOMIC_FETCH_XOR_16:
10560 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
10561 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
10562 ignore, BUILT_IN_NONE);
10563 if (target)
10564 return target;
10565 break;
10566
10567 case BUILT_IN_ATOMIC_FETCH_OR_1:
10568 case BUILT_IN_ATOMIC_FETCH_OR_2:
10569 case BUILT_IN_ATOMIC_FETCH_OR_4:
10570 case BUILT_IN_ATOMIC_FETCH_OR_8:
10571 case BUILT_IN_ATOMIC_FETCH_OR_16:
10572 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
10573 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
10574 ignore, BUILT_IN_NONE);
10575 if (target)
10576 return target;
10577 break;
d660c35e
AM
10578
10579 case BUILT_IN_ATOMIC_TEST_AND_SET:
744accb2 10580 return expand_builtin_atomic_test_and_set (exp, target);
d660c35e
AM
10581
10582 case BUILT_IN_ATOMIC_CLEAR:
10583 return expand_builtin_atomic_clear (exp);
86951993
AM
10584
10585 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10586 return expand_builtin_atomic_always_lock_free (exp);
10587
10588 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10589 target = expand_builtin_atomic_is_lock_free (exp);
10590 if (target)
10591 return target;
10592 break;
10593
10594 case BUILT_IN_ATOMIC_THREAD_FENCE:
10595 expand_builtin_atomic_thread_fence (exp);
10596 return const0_rtx;
10597
10598 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
10599 expand_builtin_atomic_signal_fence (exp);
10600 return const0_rtx;
10601
10a0d495
JJ
10602 case BUILT_IN_OBJECT_SIZE:
10603 return expand_builtin_object_size (exp);
10604
10605 case BUILT_IN_MEMCPY_CHK:
10606 case BUILT_IN_MEMPCPY_CHK:
10607 case BUILT_IN_MEMMOVE_CHK:
10608 case BUILT_IN_MEMSET_CHK:
10609 target = expand_builtin_memory_chk (exp, target, mode, fcode);
10610 if (target)
10611 return target;
10612 break;
10613
10614 case BUILT_IN_STRCPY_CHK:
10615 case BUILT_IN_STPCPY_CHK:
10616 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 10617 case BUILT_IN_STPNCPY_CHK:
10a0d495 10618 case BUILT_IN_STRCAT_CHK:
1c2fc017 10619 case BUILT_IN_STRNCAT_CHK:
10a0d495
JJ
10620 case BUILT_IN_SNPRINTF_CHK:
10621 case BUILT_IN_VSNPRINTF_CHK:
10622 maybe_emit_chk_warning (exp, fcode);
10623 break;
10624
10625 case BUILT_IN_SPRINTF_CHK:
10626 case BUILT_IN_VSPRINTF_CHK:
10627 maybe_emit_sprintf_chk_warning (exp, fcode);
10628 break;
10629
f959607b
CLT
10630 case BUILT_IN_THREAD_POINTER:
10631 return expand_builtin_thread_pointer (exp, target);
10632
10633 case BUILT_IN_SET_THREAD_POINTER:
10634 expand_builtin_set_thread_pointer (exp);
10635 return const0_rtx;
10636
41dbbb37 10637 case BUILT_IN_ACC_ON_DEVICE:
164453bb
NS
10638 /* Do library call, if we failed to expand the builtin when
10639 folding. */
41dbbb37
TS
10640 break;
10641
1f62d637
TV
10642 case BUILT_IN_GOACC_PARLEVEL_ID:
10643 case BUILT_IN_GOACC_PARLEVEL_SIZE:
10644 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
10645
425fc685
RE
10646 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
10647 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
10648
10649 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
10650 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
10651 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
10652 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
10653 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
10654 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
10655 return expand_speculation_safe_value (mode, exp, target, ignore);
10656
e62f4abc 10657 default: /* just do library call, if unknown builtin */
84b8b0e0 10658 break;
28f4ec01
BS
10659 }
10660
10661 /* The switch statement above can drop through to cause the function
10662 to be called normally. */
10663 return expand_call (exp, target, ignore);
10664}
b0b3afb2 10665
4977bab6 10666/* Determine whether a tree node represents a call to a built-in
feda1845
RS
10667 function. If the tree T is a call to a built-in function with
10668 the right number of arguments of the appropriate types, return
10669 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
10670 Otherwise the return value is END_BUILTINS. */
4682ae04 10671
4977bab6 10672enum built_in_function
fa233e34 10673builtin_mathfn_code (const_tree t)
4977bab6 10674{
fa233e34
KG
10675 const_tree fndecl, arg, parmlist;
10676 const_tree argtype, parmtype;
10677 const_call_expr_arg_iterator iter;
4977bab6 10678
5f92d109 10679 if (TREE_CODE (t) != CALL_EXPR)
4977bab6
ZW
10680 return END_BUILTINS;
10681
2f503025 10682 fndecl = get_callee_fndecl (t);
3d78e008
ML
10683 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10684 return END_BUILTINS;
4977bab6 10685
feda1845 10686 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
fa233e34 10687 init_const_call_expr_arg_iterator (t, &iter);
feda1845 10688 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
c0a47a61 10689 {
feda1845
RS
10690 /* If a function doesn't take a variable number of arguments,
10691 the last element in the list will have type `void'. */
10692 parmtype = TREE_VALUE (parmlist);
10693 if (VOID_TYPE_P (parmtype))
10694 {
fa233e34 10695 if (more_const_call_expr_args_p (&iter))
feda1845
RS
10696 return END_BUILTINS;
10697 return DECL_FUNCTION_CODE (fndecl);
10698 }
10699
fa233e34 10700 if (! more_const_call_expr_args_p (&iter))
c0a47a61 10701 return END_BUILTINS;
b8698a0f 10702
fa233e34 10703 arg = next_const_call_expr_arg (&iter);
5039610b 10704 argtype = TREE_TYPE (arg);
feda1845
RS
10705
10706 if (SCALAR_FLOAT_TYPE_P (parmtype))
10707 {
10708 if (! SCALAR_FLOAT_TYPE_P (argtype))
10709 return END_BUILTINS;
10710 }
10711 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
10712 {
10713 if (! COMPLEX_FLOAT_TYPE_P (argtype))
10714 return END_BUILTINS;
10715 }
10716 else if (POINTER_TYPE_P (parmtype))
10717 {
10718 if (! POINTER_TYPE_P (argtype))
10719 return END_BUILTINS;
10720 }
10721 else if (INTEGRAL_TYPE_P (parmtype))
10722 {
10723 if (! INTEGRAL_TYPE_P (argtype))
10724 return END_BUILTINS;
10725 }
10726 else
c0a47a61 10727 return END_BUILTINS;
c0a47a61
RS
10728 }
10729
feda1845 10730 /* Variable-length argument list. */
4977bab6
ZW
10731 return DECL_FUNCTION_CODE (fndecl);
10732}
10733
5039610b
SL
10734/* Fold a call to __builtin_constant_p, if we know its argument ARG will
10735 evaluate to a constant. */
b0b3afb2
BS
10736
10737static tree
5039610b 10738fold_builtin_constant_p (tree arg)
b0b3afb2 10739{
b0b3afb2
BS
10740 /* We return 1 for a numeric type that's known to be a constant
10741 value at compile-time or for an aggregate type that's a
10742 literal constant. */
5039610b 10743 STRIP_NOPS (arg);
b0b3afb2
BS
10744
10745 /* If we know this is a constant, emit the constant of one. */
5039610b
SL
10746 if (CONSTANT_CLASS_P (arg)
10747 || (TREE_CODE (arg) == CONSTRUCTOR
10748 && TREE_CONSTANT (arg)))
b0b3afb2 10749 return integer_one_node;
5039610b 10750 if (TREE_CODE (arg) == ADDR_EXPR)
fb664a2c 10751 {
5039610b 10752 tree op = TREE_OPERAND (arg, 0);
fb664a2c
RG
10753 if (TREE_CODE (op) == STRING_CST
10754 || (TREE_CODE (op) == ARRAY_REF
10755 && integer_zerop (TREE_OPERAND (op, 1))
10756 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
10757 return integer_one_node;
10758 }
b0b3afb2 10759
0dcd3840
RH
10760 /* If this expression has side effects, show we don't know it to be a
10761 constant. Likewise if it's a pointer or aggregate type since in
10762 those case we only want literals, since those are only optimized
13104975
ZW
10763 when generating RTL, not later.
10764 And finally, if we are compiling an initializer, not code, we
10765 need to return a definite result now; there's not going to be any
10766 more optimization done. */
5039610b
SL
10767 if (TREE_SIDE_EFFECTS (arg)
10768 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
10769 || POINTER_TYPE_P (TREE_TYPE (arg))
63b48197 10770 || cfun == 0
4e7d7b3d
JJ
10771 || folding_initializer
10772 || force_folding_builtin_constant_p)
b0b3afb2
BS
10773 return integer_zero_node;
10774
5039610b 10775 return NULL_TREE;
b0b3afb2
BS
10776}
10777
1e9168b2
ML
10778/* Create builtin_expect or builtin_expect_with_probability
10779 with PRED and EXPECTED as its arguments and return it as a truthvalue.
10780 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
10781 builtin_expect_with_probability instead uses third argument as PROBABILITY
10782 value. */
6de9cd9a
DN
10783
10784static tree
ed9c79e1 10785build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
1e9168b2 10786 tree predictor, tree probability)
6de9cd9a 10787{
419ce103 10788 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6de9cd9a 10789
1e9168b2
ML
10790 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
10791 : BUILT_IN_EXPECT_WITH_PROBABILITY);
419ce103
AN
10792 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
10793 ret_type = TREE_TYPE (TREE_TYPE (fn));
10794 pred_type = TREE_VALUE (arg_types);
10795 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
10796
db3927fb
AH
10797 pred = fold_convert_loc (loc, pred_type, pred);
10798 expected = fold_convert_loc (loc, expected_type, expected);
1e9168b2
ML
10799
10800 if (probability)
10801 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
10802 else
10803 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
10804 predictor);
419ce103
AN
10805
10806 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
10807 build_int_cst (ret_type, 0));
10808}
10809
1e9168b2 10810/* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
419ce103
AN
10811 NULL_TREE if no simplification is possible. */
10812
ed9c79e1 10813tree
1e9168b2
ML
10814fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
10815 tree arg3)
419ce103 10816{
be31603a 10817 tree inner, fndecl, inner_arg0;
419ce103
AN
10818 enum tree_code code;
10819
be31603a
KT
10820 /* Distribute the expected value over short-circuiting operators.
10821 See through the cast from truthvalue_type_node to long. */
10822 inner_arg0 = arg0;
625a9766 10823 while (CONVERT_EXPR_P (inner_arg0)
be31603a
KT
10824 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
10825 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
10826 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
10827
419ce103
AN
10828 /* If this is a builtin_expect within a builtin_expect keep the
10829 inner one. See through a comparison against a constant. It
10830 might have been added to create a thruthvalue. */
be31603a
KT
10831 inner = inner_arg0;
10832
419ce103
AN
10833 if (COMPARISON_CLASS_P (inner)
10834 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
10835 inner = TREE_OPERAND (inner, 0);
10836
10837 if (TREE_CODE (inner) == CALL_EXPR
10838 && (fndecl = get_callee_fndecl (inner))
3d78e008
ML
10839 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
10840 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
419ce103
AN
10841 return arg0;
10842
be31603a 10843 inner = inner_arg0;
419ce103
AN
10844 code = TREE_CODE (inner);
10845 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
10846 {
10847 tree op0 = TREE_OPERAND (inner, 0);
10848 tree op1 = TREE_OPERAND (inner, 1);
0d2f7959 10849 arg1 = save_expr (arg1);
419ce103 10850
1e9168b2
ML
10851 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
10852 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
419ce103
AN
10853 inner = build2 (code, TREE_TYPE (inner), op0, op1);
10854
db3927fb 10855 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
419ce103
AN
10856 }
10857
10858 /* If the argument isn't invariant then there's nothing else we can do. */
be31603a 10859 if (!TREE_CONSTANT (inner_arg0))
5039610b 10860 return NULL_TREE;
6de9cd9a 10861
419ce103
AN
10862 /* If we expect that a comparison against the argument will fold to
10863 a constant return the constant. In practice, this means a true
10864 constant or the address of a non-weak symbol. */
be31603a 10865 inner = inner_arg0;
6de9cd9a
DN
10866 STRIP_NOPS (inner);
10867 if (TREE_CODE (inner) == ADDR_EXPR)
10868 {
10869 do
10870 {
10871 inner = TREE_OPERAND (inner, 0);
10872 }
10873 while (TREE_CODE (inner) == COMPONENT_REF
10874 || TREE_CODE (inner) == ARRAY_REF);
8813a647 10875 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
5039610b 10876 return NULL_TREE;
6de9cd9a
DN
10877 }
10878
419ce103
AN
10879 /* Otherwise, ARG0 already has the proper type for the return value. */
10880 return arg0;
6de9cd9a
DN
10881}
10882
5039610b 10883/* Fold a call to __builtin_classify_type with argument ARG. */
5197bd50 10884
ad82abb8 10885static tree
5039610b 10886fold_builtin_classify_type (tree arg)
ad82abb8 10887{
5039610b 10888 if (arg == 0)
45a2c477 10889 return build_int_cst (integer_type_node, no_type_class);
ad82abb8 10890
45a2c477 10891 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
ad82abb8
ZW
10892}
10893
d14c547a
MS
10894/* Fold a call EXPR (which may be null) to __builtin_strlen with argument
10895 ARG. */
667bbbbb
EC
10896
10897static tree
d14c547a 10898fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
667bbbbb 10899{
5039610b 10900 if (!validate_arg (arg, POINTER_TYPE))
667bbbbb
EC
10901 return NULL_TREE;
10902 else
10903 {
e09aa5bd
MS
10904 c_strlen_data lendata = { };
10905 tree len = c_strlen (arg, 0, &lendata);
667bbbbb
EC
10906
10907 if (len)
ab996409 10908 return fold_convert_loc (loc, type, len);
667bbbbb 10909
e09aa5bd
MS
10910 if (!lendata.decl)
10911 c_strlen (arg, 1, &lendata);
6ab24ea8 10912
e09aa5bd 10913 if (lendata.decl)
6ab24ea8
MS
10914 {
10915 if (EXPR_HAS_LOCATION (arg))
10916 loc = EXPR_LOCATION (arg);
10917 else if (loc == UNKNOWN_LOCATION)
10918 loc = input_location;
d14c547a 10919 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
6ab24ea8
MS
10920 }
10921
667bbbbb
EC
10922 return NULL_TREE;
10923 }
10924}
10925
ab5e2615
RH
10926/* Fold a call to __builtin_inf or __builtin_huge_val. */
10927
10928static tree
db3927fb 10929fold_builtin_inf (location_t loc, tree type, int warn)
ab5e2615 10930{
efdc7e19
RH
10931 REAL_VALUE_TYPE real;
10932
6d84156b
JM
10933 /* __builtin_inff is intended to be usable to define INFINITY on all
10934 targets. If an infinity is not available, INFINITY expands "to a
10935 positive constant of type float that overflows at translation
10936 time", footnote "In this case, using INFINITY will violate the
10937 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
10938 Thus we pedwarn to ensure this constraint violation is
10939 diagnosed. */
ab5e2615 10940 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
db3927fb 10941 pedwarn (loc, 0, "target format does not support infinity");
ab5e2615 10942
efdc7e19
RH
10943 real_inf (&real);
10944 return build_real (type, real);
ab5e2615
RH
10945}
10946
75c7c595
RG
10947/* Fold function call to builtin sincos, sincosf, or sincosl. Return
10948 NULL_TREE if no simplification can be made. */
10949
10950static tree
db3927fb
AH
10951fold_builtin_sincos (location_t loc,
10952 tree arg0, tree arg1, tree arg2)
75c7c595 10953{
5039610b 10954 tree type;
5c1a2e63 10955 tree fndecl, call = NULL_TREE;
75c7c595 10956
5039610b
SL
10957 if (!validate_arg (arg0, REAL_TYPE)
10958 || !validate_arg (arg1, POINTER_TYPE)
10959 || !validate_arg (arg2, POINTER_TYPE))
75c7c595
RG
10960 return NULL_TREE;
10961
75c7c595 10962 type = TREE_TYPE (arg0);
75c7c595
RG
10963
10964 /* Calculate the result when the argument is a constant. */
b03ff92e 10965 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
5c1a2e63 10966 if (fn == END_BUILTINS)
75c7c595
RG
10967 return NULL_TREE;
10968
5c1a2e63
RS
10969 /* Canonicalize sincos to cexpi. */
10970 if (TREE_CODE (arg0) == REAL_CST)
10971 {
10972 tree complex_type = build_complex_type (type);
d7ebef06 10973 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
5c1a2e63
RS
10974 }
10975 if (!call)
10976 {
bae974e6 10977 if (!targetm.libc_has_function (function_c99_math_complex, type)
5c1a2e63
RS
10978 || !builtin_decl_implicit_p (fn))
10979 return NULL_TREE;
10980 fndecl = builtin_decl_explicit (fn);
10981 call = build_call_expr_loc (loc, fndecl, 1, arg0);
10982 call = builtin_save_expr (call);
10983 }
75c7c595 10984
1b17b994
RB
10985 tree ptype = build_pointer_type (type);
10986 arg1 = fold_convert (ptype, arg1);
10987 arg2 = fold_convert (ptype, arg2);
928c19bb 10988 return build2 (COMPOUND_EXPR, void_type_node,
75c7c595 10989 build2 (MODIFY_EXPR, void_type_node,
db3927fb 10990 build_fold_indirect_ref_loc (loc, arg1),
5c1a2e63 10991 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
75c7c595 10992 build2 (MODIFY_EXPR, void_type_node,
db3927fb 10993 build_fold_indirect_ref_loc (loc, arg2),
5c1a2e63 10994 fold_build1_loc (loc, REALPART_EXPR, type, call)));
75c7c595
RG
10995}
10996
5039610b
SL
10997/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
10998 Return NULL_TREE if no simplification can be made. */
5bb650ec
RS
10999
11000static tree
db3927fb 11001fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
5bb650ec 11002{
5039610b
SL
11003 if (!validate_arg (arg1, POINTER_TYPE)
11004 || !validate_arg (arg2, POINTER_TYPE)
11005 || !validate_arg (len, INTEGER_TYPE))
11006 return NULL_TREE;
5bb650ec
RS
11007
11008 /* If the LEN parameter is zero, return zero. */
11009 if (integer_zerop (len))
db3927fb 11010 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
01847e9d 11011 arg1, arg2);
5bb650ec
RS
11012
11013 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
11014 if (operand_equal_p (arg1, arg2, 0))
db3927fb 11015 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
01847e9d 11016
01847e9d
RS
11017 /* If len parameter is one, return an expression corresponding to
11018 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
ae7e9ddd 11019 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
01847e9d
RS
11020 {
11021 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
11022 tree cst_uchar_ptr_node
11023 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
11024
db3927fb
AH
11025 tree ind1
11026 = fold_convert_loc (loc, integer_type_node,
11027 build1 (INDIRECT_REF, cst_uchar_node,
11028 fold_convert_loc (loc,
11029 cst_uchar_ptr_node,
01847e9d 11030 arg1)));
db3927fb
AH
11031 tree ind2
11032 = fold_convert_loc (loc, integer_type_node,
11033 build1 (INDIRECT_REF, cst_uchar_node,
11034 fold_convert_loc (loc,
11035 cst_uchar_ptr_node,
01847e9d 11036 arg2)));
db3927fb 11037 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
01847e9d 11038 }
5bb650ec 11039
5039610b 11040 return NULL_TREE;
5bb650ec
RS
11041}
11042
5039610b 11043/* Fold a call to builtin isascii with argument ARG. */
df0785d6
KG
11044
11045static tree
db3927fb 11046fold_builtin_isascii (location_t loc, tree arg)
df0785d6 11047{
5039610b
SL
11048 if (!validate_arg (arg, INTEGER_TYPE))
11049 return NULL_TREE;
df0785d6
KG
11050 else
11051 {
11052 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
6728ee79 11053 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
45a2c477 11054 build_int_cst (integer_type_node,
6728ee79 11055 ~ (unsigned HOST_WIDE_INT) 0x7f));
db3927fb 11056 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
45a2c477 11057 arg, integer_zero_node);
df0785d6
KG
11058 }
11059}
11060
5039610b 11061/* Fold a call to builtin toascii with argument ARG. */
df0785d6
KG
11062
11063static tree
db3927fb 11064fold_builtin_toascii (location_t loc, tree arg)
df0785d6 11065{
5039610b
SL
11066 if (!validate_arg (arg, INTEGER_TYPE))
11067 return NULL_TREE;
b8698a0f 11068
5039610b 11069 /* Transform toascii(c) -> (c & 0x7f). */
db3927fb 11070 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
45a2c477 11071 build_int_cst (integer_type_node, 0x7f));
df0785d6
KG
11072}
11073
5039610b 11074/* Fold a call to builtin isdigit with argument ARG. */
61218d19
KG
11075
11076static tree
db3927fb 11077fold_builtin_isdigit (location_t loc, tree arg)
61218d19 11078{
5039610b
SL
11079 if (!validate_arg (arg, INTEGER_TYPE))
11080 return NULL_TREE;
61218d19
KG
11081 else
11082 {
11083 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
c5ff069d
ZW
11084 /* According to the C standard, isdigit is unaffected by locale.
11085 However, it definitely is affected by the target character set. */
c5ff069d
ZW
11086 unsigned HOST_WIDE_INT target_digit0
11087 = lang_hooks.to_target_charset ('0');
11088
11089 if (target_digit0 == 0)
11090 return NULL_TREE;
11091
db3927fb 11092 arg = fold_convert_loc (loc, unsigned_type_node, arg);
6728ee79
MM
11093 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
11094 build_int_cst (unsigned_type_node, target_digit0));
db3927fb 11095 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
5cdc4a26 11096 build_int_cst (unsigned_type_node, 9));
61218d19
KG
11097 }
11098}
ef79730c 11099
5039610b 11100/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9655d83b
RS
11101
11102static tree
db3927fb 11103fold_builtin_fabs (location_t loc, tree arg, tree type)
9655d83b 11104{
5039610b
SL
11105 if (!validate_arg (arg, REAL_TYPE))
11106 return NULL_TREE;
9655d83b 11107
db3927fb 11108 arg = fold_convert_loc (loc, type, arg);
db3927fb 11109 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
11110}
11111
5039610b 11112/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9655d83b
RS
11113
11114static tree
db3927fb 11115fold_builtin_abs (location_t loc, tree arg, tree type)
9655d83b 11116{
5039610b
SL
11117 if (!validate_arg (arg, INTEGER_TYPE))
11118 return NULL_TREE;
9655d83b 11119
db3927fb 11120 arg = fold_convert_loc (loc, type, arg);
db3927fb 11121 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
11122}
11123
527cab20
KG
11124/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
11125
11126static tree
db3927fb 11127fold_builtin_carg (location_t loc, tree arg, tree type)
527cab20 11128{
c128599a
KG
11129 if (validate_arg (arg, COMPLEX_TYPE)
11130 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
527cab20
KG
11131 {
11132 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
b8698a0f 11133
527cab20
KG
11134 if (atan2_fn)
11135 {
5039610b 11136 tree new_arg = builtin_save_expr (arg);
db3927fb
AH
11137 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
11138 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
11139 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
527cab20
KG
11140 }
11141 }
b8698a0f 11142
527cab20
KG
11143 return NULL_TREE;
11144}
11145
7a2a25ab
KG
11146/* Fold a call to builtin frexp, we can assume the base is 2. */
11147
11148static tree
db3927fb 11149fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7a2a25ab
KG
11150{
11151 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11152 return NULL_TREE;
b8698a0f 11153
7a2a25ab 11154 STRIP_NOPS (arg0);
b8698a0f 11155
7a2a25ab
KG
11156 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11157 return NULL_TREE;
b8698a0f 11158
db3927fb 11159 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7a2a25ab
KG
11160
11161 /* Proceed if a valid pointer type was passed in. */
11162 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
11163 {
11164 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11165 tree frac, exp;
b8698a0f 11166
7a2a25ab
KG
11167 switch (value->cl)
11168 {
11169 case rvc_zero:
11170 /* For +-0, return (*exp = 0, +-0). */
11171 exp = integer_zero_node;
11172 frac = arg0;
11173 break;
11174 case rvc_nan:
11175 case rvc_inf:
11176 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
db3927fb 11177 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7a2a25ab
KG
11178 case rvc_normal:
11179 {
11180 /* Since the frexp function always expects base 2, and in
11181 GCC normalized significands are already in the range
11182 [0.5, 1.0), we have exactly what frexp wants. */
11183 REAL_VALUE_TYPE frac_rvt = *value;
11184 SET_REAL_EXP (&frac_rvt, 0);
11185 frac = build_real (rettype, frac_rvt);
45a2c477 11186 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7a2a25ab
KG
11187 }
11188 break;
11189 default:
11190 gcc_unreachable ();
11191 }
b8698a0f 11192
7a2a25ab 11193 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 11194 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7a2a25ab 11195 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 11196 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7a2a25ab
KG
11197 }
11198
11199 return NULL_TREE;
11200}
11201
3d577eaf
KG
11202/* Fold a call to builtin modf. */
11203
11204static tree
db3927fb 11205fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
3d577eaf
KG
11206{
11207 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11208 return NULL_TREE;
b8698a0f 11209
3d577eaf 11210 STRIP_NOPS (arg0);
b8698a0f 11211
3d577eaf
KG
11212 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11213 return NULL_TREE;
b8698a0f 11214
db3927fb 11215 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3d577eaf
KG
11216
11217 /* Proceed if a valid pointer type was passed in. */
11218 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
11219 {
11220 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11221 REAL_VALUE_TYPE trunc, frac;
11222
11223 switch (value->cl)
11224 {
11225 case rvc_nan:
11226 case rvc_zero:
11227 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
11228 trunc = frac = *value;
11229 break;
11230 case rvc_inf:
11231 /* For +-Inf, return (*arg1 = arg0, +-0). */
11232 frac = dconst0;
11233 frac.sign = value->sign;
11234 trunc = *value;
11235 break;
11236 case rvc_normal:
11237 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
11238 real_trunc (&trunc, VOIDmode, value);
11239 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
11240 /* If the original number was negative and already
11241 integral, then the fractional part is -0.0. */
11242 if (value->sign && frac.cl == rvc_zero)
11243 frac.sign = value->sign;
11244 break;
11245 }
b8698a0f 11246
3d577eaf 11247 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 11248 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
3d577eaf
KG
11249 build_real (rettype, trunc));
11250 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 11251 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
3d577eaf
KG
11252 build_real (rettype, frac));
11253 }
b8698a0f 11254
3d577eaf
KG
11255 return NULL_TREE;
11256}
11257
903c723b
TC
11258/* Given a location LOC, an interclass builtin function decl FNDECL
11259 and its single argument ARG, return an folded expression computing
11260 the same, or NULL_TREE if we either couldn't or didn't want to fold
11261 (the latter happen if there's an RTL instruction available). */
11262
11263static tree
11264fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
11265{
11266 machine_mode mode;
11267
11268 if (!validate_arg (arg, REAL_TYPE))
11269 return NULL_TREE;
11270
11271 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
11272 return NULL_TREE;
11273
11274 mode = TYPE_MODE (TREE_TYPE (arg));
11275
11276 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
02cf2861 11277
903c723b
TC
11278 /* If there is no optab, try generic code. */
11279 switch (DECL_FUNCTION_CODE (fndecl))
11280 {
11281 tree result;
44e10129 11282
903c723b
TC
11283 CASE_FLT_FN (BUILT_IN_ISINF):
11284 {
11285 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
11286 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11287 tree type = TREE_TYPE (arg);
11288 REAL_VALUE_TYPE r;
11289 char buf[128];
11290
11291 if (is_ibm_extended)
11292 {
11293 /* NaN and Inf are encoded in the high-order double value
11294 only. The low-order value is not significant. */
11295 type = double_type_node;
11296 mode = DFmode;
11297 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11298 }
00be2a5f 11299 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
11300 real_from_string (&r, buf);
11301 result = build_call_expr (isgr_fn, 2,
11302 fold_build1_loc (loc, ABS_EXPR, type, arg),
11303 build_real (type, r));
11304 return result;
11305 }
11306 CASE_FLT_FN (BUILT_IN_FINITE):
11307 case BUILT_IN_ISFINITE:
11308 {
11309 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
11310 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11311 tree type = TREE_TYPE (arg);
11312 REAL_VALUE_TYPE r;
11313 char buf[128];
11314
11315 if (is_ibm_extended)
11316 {
11317 /* NaN and Inf are encoded in the high-order double value
11318 only. The low-order value is not significant. */
11319 type = double_type_node;
11320 mode = DFmode;
11321 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11322 }
00be2a5f 11323 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
11324 real_from_string (&r, buf);
11325 result = build_call_expr (isle_fn, 2,
11326 fold_build1_loc (loc, ABS_EXPR, type, arg),
11327 build_real (type, r));
11328 /*result = fold_build2_loc (loc, UNGT_EXPR,
11329 TREE_TYPE (TREE_TYPE (fndecl)),
11330 fold_build1_loc (loc, ABS_EXPR, type, arg),
11331 build_real (type, r));
11332 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
11333 TREE_TYPE (TREE_TYPE (fndecl)),
11334 result);*/
11335 return result;
11336 }
11337 case BUILT_IN_ISNORMAL:
11338 {
11339 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
11340 islessequal(fabs(x),DBL_MAX). */
11341 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11342 tree type = TREE_TYPE (arg);
11343 tree orig_arg, max_exp, min_exp;
11344 machine_mode orig_mode = mode;
11345 REAL_VALUE_TYPE rmax, rmin;
11346 char buf[128];
11347
11348 orig_arg = arg = builtin_save_expr (arg);
11349 if (is_ibm_extended)
11350 {
11351 /* Use double to test the normal range of IBM extended
11352 precision. Emin for IBM extended precision is
11353 different to emin for IEEE double, being 53 higher
11354 since the low double exponent is at least 53 lower
11355 than the high double exponent. */
11356 type = double_type_node;
11357 mode = DFmode;
11358 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11359 }
11360 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
11361
00be2a5f 11362 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
903c723b
TC
11363 real_from_string (&rmax, buf);
11364 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
11365 real_from_string (&rmin, buf);
11366 max_exp = build_real (type, rmax);
11367 min_exp = build_real (type, rmin);
11368
11369 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
11370 if (is_ibm_extended)
11371 {
11372 /* Testing the high end of the range is done just using
11373 the high double, using the same test as isfinite().
11374 For the subnormal end of the range we first test the
11375 high double, then if its magnitude is equal to the
11376 limit of 0x1p-969, we test whether the low double is
11377 non-zero and opposite sign to the high double. */
11378 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
11379 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11380 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
11381 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
11382 arg, min_exp);
11383 tree as_complex = build1 (VIEW_CONVERT_EXPR,
11384 complex_double_type_node, orig_arg);
11385 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
11386 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
11387 tree zero = build_real (type, dconst0);
11388 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
11389 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
11390 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
11391 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
11392 fold_build3 (COND_EXPR,
11393 integer_type_node,
11394 hilt, logt, lolt));
11395 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
11396 eq_min, ok_lo);
11397 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
11398 gt_min, eq_min);
11399 }
11400 else
11401 {
11402 tree const isge_fn
11403 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
11404 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
11405 }
11406 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
11407 max_exp, min_exp);
11408 return result;
11409 }
11410 default:
11411 break;
11412 }
11413
11414 return NULL_TREE;
11415}
11416
11417/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
5039610b 11418 ARG is the argument for the call. */
64a9295a
PB
11419
11420static tree
903c723b 11421fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
64a9295a 11422{
903c723b
TC
11423 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11424
5039610b 11425 if (!validate_arg (arg, REAL_TYPE))
83322951 11426 return NULL_TREE;
64a9295a 11427
64a9295a
PB
11428 switch (builtin_index)
11429 {
903c723b 11430 case BUILT_IN_ISINF:
1be48781
RS
11431 if (tree_expr_infinite_p (arg))
11432 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11433 if (!tree_expr_maybe_infinite_p (arg))
903c723b 11434 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
903c723b
TC
11435 return NULL_TREE;
11436
05f41289
KG
11437 case BUILT_IN_ISINF_SIGN:
11438 {
11439 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
11440 /* In a boolean context, GCC will fold the inner COND_EXPR to
11441 1. So e.g. "if (isinf_sign(x))" would be folded to just
11442 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
72f52f30 11443 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
e79983f4 11444 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
05f41289
KG
11445 tree tmp = NULL_TREE;
11446
11447 arg = builtin_save_expr (arg);
11448
11449 if (signbit_fn && isinf_fn)
11450 {
db3927fb
AH
11451 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
11452 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
05f41289 11453
db3927fb 11454 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 11455 signbit_call, integer_zero_node);
db3927fb 11456 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 11457 isinf_call, integer_zero_node);
b8698a0f 11458
db3927fb 11459 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
05f41289 11460 integer_minus_one_node, integer_one_node);
db3927fb
AH
11461 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11462 isinf_call, tmp,
05f41289
KG
11463 integer_zero_node);
11464 }
11465
11466 return tmp;
11467 }
11468
903c723b 11469 case BUILT_IN_ISFINITE:
1be48781 11470 if (tree_expr_finite_p (arg))
903c723b 11471 return omit_one_operand_loc (loc, type, integer_one_node, arg);
1be48781
RS
11472 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
11473 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
903c723b
TC
11474 return NULL_TREE;
11475
11476 case BUILT_IN_ISNAN:
1be48781
RS
11477 if (tree_expr_nan_p (arg))
11478 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11479 if (!tree_expr_maybe_nan_p (arg))
903c723b
TC
11480 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11481
11482 {
11483 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
11484 if (is_ibm_extended)
11485 {
11486 /* NaN and Inf are encoded in the high-order double value
11487 only. The low-order value is not significant. */
11488 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
11489 }
11490 }
11491 arg = builtin_save_expr (arg);
11492 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
11493
64a9295a 11494 default:
298e6adc 11495 gcc_unreachable ();
64a9295a
PB
11496 }
11497}
11498
903c723b
TC
11499/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
11500 This builtin will generate code to return the appropriate floating
11501 point classification depending on the value of the floating point
11502 number passed in. The possible return values must be supplied as
11503 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
11504 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
11505 one floating point argument which is "type generic". */
11506
11507static tree
11508fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
11509{
11510 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
11511 arg, type, res, tmp;
11512 machine_mode mode;
11513 REAL_VALUE_TYPE r;
11514 char buf[128];
11515
11516 /* Verify the required arguments in the original call. */
11517 if (nargs != 6
11518 || !validate_arg (args[0], INTEGER_TYPE)
11519 || !validate_arg (args[1], INTEGER_TYPE)
11520 || !validate_arg (args[2], INTEGER_TYPE)
11521 || !validate_arg (args[3], INTEGER_TYPE)
11522 || !validate_arg (args[4], INTEGER_TYPE)
11523 || !validate_arg (args[5], REAL_TYPE))
11524 return NULL_TREE;
11525
11526 fp_nan = args[0];
11527 fp_infinite = args[1];
11528 fp_normal = args[2];
11529 fp_subnormal = args[3];
11530 fp_zero = args[4];
11531 arg = args[5];
11532 type = TREE_TYPE (arg);
11533 mode = TYPE_MODE (type);
11534 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
11535
11536 /* fpclassify(x) ->
11537 isnan(x) ? FP_NAN :
11538 (fabs(x) == Inf ? FP_INFINITE :
11539 (fabs(x) >= DBL_MIN ? FP_NORMAL :
11540 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
11541
11542 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11543 build_real (type, dconst0));
11544 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11545 tmp, fp_zero, fp_subnormal);
11546
11547 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
11548 real_from_string (&r, buf);
11549 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
11550 arg, build_real (type, r));
11551 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
11552
1be48781 11553 if (tree_expr_maybe_infinite_p (arg))
903c723b
TC
11554 {
11555 real_inf (&r);
11556 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11557 build_real (type, r));
11558 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
11559 fp_infinite, res);
11560 }
11561
1be48781 11562 if (tree_expr_maybe_nan_p (arg))
903c723b
TC
11563 {
11564 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
11565 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
11566 }
11567
11568 return res;
11569}
11570
08039bd8 11571/* Fold a call to an unordered comparison function such as
a35da91f 11572 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
5039610b 11573 being called and ARG0 and ARG1 are the arguments for the call.
64a9295a
PB
11574 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
11575 the opposite of the desired result. UNORDERED_CODE is used
11576 for modes that can hold NaNs and ORDERED_CODE is used for
11577 the rest. */
08039bd8
RS
11578
11579static tree
db3927fb 11580fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
08039bd8
RS
11581 enum tree_code unordered_code,
11582 enum tree_code ordered_code)
11583{
14f661f1 11584 tree type = TREE_TYPE (TREE_TYPE (fndecl));
08039bd8 11585 enum tree_code code;
1aeaea8d
GK
11586 tree type0, type1;
11587 enum tree_code code0, code1;
11588 tree cmp_type = NULL_TREE;
08039bd8 11589
1aeaea8d
GK
11590 type0 = TREE_TYPE (arg0);
11591 type1 = TREE_TYPE (arg1);
c22cacf3 11592
1aeaea8d
GK
11593 code0 = TREE_CODE (type0);
11594 code1 = TREE_CODE (type1);
c22cacf3 11595
1aeaea8d
GK
11596 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
11597 /* Choose the wider of two real types. */
11598 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
11599 ? type0 : type1;
11600 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
11601 cmp_type = type0;
11602 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
11603 cmp_type = type1;
c22cacf3 11604
db3927fb
AH
11605 arg0 = fold_convert_loc (loc, cmp_type, arg0);
11606 arg1 = fold_convert_loc (loc, cmp_type, arg1);
14f661f1
RS
11607
11608 if (unordered_code == UNORDERED_EXPR)
11609 {
1be48781
RS
11610 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
11611 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
11612 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
db3927fb
AH
11613 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
11614 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
14f661f1 11615 }
08039bd8 11616
1be48781
RS
11617 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
11618 ? unordered_code : ordered_code;
db3927fb
AH
11619 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
11620 fold_build2_loc (loc, code, type, arg0, arg1));
08039bd8
RS
11621}
11622
1304953e
JJ
11623/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
11624 arithmetics if it can never overflow, or into internal functions that
11625 return both result of arithmetics and overflowed boolean flag in
44a845ca
MS
11626 a complex integer result, or some other check for overflow.
11627 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
11628 checking part of that. */
1304953e
JJ
11629
11630static tree
11631fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
11632 tree arg0, tree arg1, tree arg2)
11633{
11634 enum internal_fn ifn = IFN_LAST;
43574e4f 11635 /* The code of the expression corresponding to the built-in. */
44a845ca
MS
11636 enum tree_code opcode = ERROR_MARK;
11637 bool ovf_only = false;
11638
1304953e
JJ
11639 switch (fcode)
11640 {
44a845ca
MS
11641 case BUILT_IN_ADD_OVERFLOW_P:
11642 ovf_only = true;
11643 /* FALLTHRU */
1304953e
JJ
11644 case BUILT_IN_ADD_OVERFLOW:
11645 case BUILT_IN_SADD_OVERFLOW:
11646 case BUILT_IN_SADDL_OVERFLOW:
11647 case BUILT_IN_SADDLL_OVERFLOW:
11648 case BUILT_IN_UADD_OVERFLOW:
11649 case BUILT_IN_UADDL_OVERFLOW:
11650 case BUILT_IN_UADDLL_OVERFLOW:
43574e4f 11651 opcode = PLUS_EXPR;
1304953e
JJ
11652 ifn = IFN_ADD_OVERFLOW;
11653 break;
44a845ca
MS
11654 case BUILT_IN_SUB_OVERFLOW_P:
11655 ovf_only = true;
11656 /* FALLTHRU */
1304953e
JJ
11657 case BUILT_IN_SUB_OVERFLOW:
11658 case BUILT_IN_SSUB_OVERFLOW:
11659 case BUILT_IN_SSUBL_OVERFLOW:
11660 case BUILT_IN_SSUBLL_OVERFLOW:
11661 case BUILT_IN_USUB_OVERFLOW:
11662 case BUILT_IN_USUBL_OVERFLOW:
11663 case BUILT_IN_USUBLL_OVERFLOW:
43574e4f 11664 opcode = MINUS_EXPR;
1304953e
JJ
11665 ifn = IFN_SUB_OVERFLOW;
11666 break;
44a845ca
MS
11667 case BUILT_IN_MUL_OVERFLOW_P:
11668 ovf_only = true;
11669 /* FALLTHRU */
1304953e
JJ
11670 case BUILT_IN_MUL_OVERFLOW:
11671 case BUILT_IN_SMUL_OVERFLOW:
11672 case BUILT_IN_SMULL_OVERFLOW:
11673 case BUILT_IN_SMULLL_OVERFLOW:
11674 case BUILT_IN_UMUL_OVERFLOW:
11675 case BUILT_IN_UMULL_OVERFLOW:
11676 case BUILT_IN_UMULLL_OVERFLOW:
43574e4f 11677 opcode = MULT_EXPR;
1304953e
JJ
11678 ifn = IFN_MUL_OVERFLOW;
11679 break;
11680 default:
11681 gcc_unreachable ();
11682 }
44a845ca
MS
11683
11684 /* For the "generic" overloads, the first two arguments can have different
11685 types and the last argument determines the target type to use to check
11686 for overflow. The arguments of the other overloads all have the same
11687 type. */
11688 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
11689
11690 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
11691 arguments are constant, attempt to fold the built-in call into a constant
11692 expression indicating whether or not it detected an overflow. */
11693 if (ovf_only
11694 && TREE_CODE (arg0) == INTEGER_CST
11695 && TREE_CODE (arg1) == INTEGER_CST)
11696 /* Perform the computation in the target type and check for overflow. */
11697 return omit_one_operand_loc (loc, boolean_type_node,
11698 arith_overflowed_p (opcode, type, arg0, arg1)
11699 ? boolean_true_node : boolean_false_node,
11700 arg2);
11701
43574e4f
JJ
11702 tree intres, ovfres;
11703 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
11704 {
11705 intres = fold_binary_loc (loc, opcode, type,
11706 fold_convert_loc (loc, type, arg0),
11707 fold_convert_loc (loc, type, arg1));
11708 if (TREE_OVERFLOW (intres))
11709 intres = drop_tree_overflow (intres);
11710 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
11711 ? boolean_true_node : boolean_false_node);
11712 }
11713 else
11714 {
11715 tree ctype = build_complex_type (type);
11716 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
11717 arg0, arg1);
11718 tree tgt = save_expr (call);
11719 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
11720 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
11721 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
11722 }
44a845ca
MS
11723
11724 if (ovf_only)
11725 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
11726
11727 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
1304953e
JJ
11728 tree store
11729 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
11730 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
11731}
11732
b25aad5f
MS
11733/* Fold a call to __builtin_FILE to a constant string. */
11734
11735static inline tree
11736fold_builtin_FILE (location_t loc)
11737{
11738 if (const char *fname = LOCATION_FILE (loc))
7365279f
BK
11739 {
11740 /* The documentation says this builtin is equivalent to the preprocessor
11741 __FILE__ macro so it appears appropriate to use the same file prefix
11742 mappings. */
11743 fname = remap_macro_filename (fname);
b25aad5f 11744 return build_string_literal (strlen (fname) + 1, fname);
7365279f 11745 }
b25aad5f
MS
11746
11747 return build_string_literal (1, "");
11748}
11749
11750/* Fold a call to __builtin_FUNCTION to a constant string. */
11751
11752static inline tree
11753fold_builtin_FUNCTION ()
11754{
f76b4224
NS
11755 const char *name = "";
11756
b25aad5f 11757 if (current_function_decl)
f76b4224 11758 name = lang_hooks.decl_printable_name (current_function_decl, 0);
b25aad5f 11759
f76b4224 11760 return build_string_literal (strlen (name) + 1, name);
b25aad5f
MS
11761}
11762
11763/* Fold a call to __builtin_LINE to an integer constant. */
11764
11765static inline tree
11766fold_builtin_LINE (location_t loc, tree type)
11767{
11768 return build_int_cst (type, LOCATION_LINE (loc));
11769}
11770
5039610b 11771/* Fold a call to built-in function FNDECL with 0 arguments.
2625bb5d 11772 This function returns NULL_TREE if no simplification was possible. */
b0b3afb2 11773
6de9cd9a 11774static tree
2625bb5d 11775fold_builtin_0 (location_t loc, tree fndecl)
b0b3afb2 11776{
c0a47a61 11777 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5039610b 11778 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
a0d2281e 11779 switch (fcode)
b0b3afb2 11780 {
b25aad5f
MS
11781 case BUILT_IN_FILE:
11782 return fold_builtin_FILE (loc);
11783
11784 case BUILT_IN_FUNCTION:
11785 return fold_builtin_FUNCTION ();
11786
11787 case BUILT_IN_LINE:
11788 return fold_builtin_LINE (loc, type);
11789
5039610b 11790 CASE_FLT_FN (BUILT_IN_INF):
6dc198e3 11791 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
5039610b
SL
11792 case BUILT_IN_INFD32:
11793 case BUILT_IN_INFD64:
11794 case BUILT_IN_INFD128:
db3927fb 11795 return fold_builtin_inf (loc, type, true);
d3147f64 11796
5039610b 11797 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
6dc198e3 11798 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
db3927fb 11799 return fold_builtin_inf (loc, type, false);
d3147f64 11800
5039610b
SL
11801 case BUILT_IN_CLASSIFY_TYPE:
11802 return fold_builtin_classify_type (NULL_TREE);
d3147f64 11803
5039610b
SL
11804 default:
11805 break;
11806 }
11807 return NULL_TREE;
11808}
d3147f64 11809
5039610b 11810/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
2625bb5d 11811 This function returns NULL_TREE if no simplification was possible. */
d3147f64 11812
5039610b 11813static tree
d14c547a 11814fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
5039610b
SL
11815{
11816 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11817 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
11818
11819 if (TREE_CODE (arg0) == ERROR_MARK)
11820 return NULL_TREE;
11821
d7ebef06 11822 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
5c1a2e63
RS
11823 return ret;
11824
5039610b
SL
11825 switch (fcode)
11826 {
b0b3afb2 11827 case BUILT_IN_CONSTANT_P:
d3147f64 11828 {
5039610b 11829 tree val = fold_builtin_constant_p (arg0);
d3147f64 11830
d3147f64
EC
11831 /* Gimplification will pull the CALL_EXPR for the builtin out of
11832 an if condition. When not optimizing, we'll not CSE it back.
11833 To avoid link error types of regressions, return false now. */
11834 if (!val && !optimize)
11835 val = integer_zero_node;
11836
11837 return val;
11838 }
b0b3afb2 11839
ad82abb8 11840 case BUILT_IN_CLASSIFY_TYPE:
5039610b 11841 return fold_builtin_classify_type (arg0);
ad82abb8 11842
b0b3afb2 11843 case BUILT_IN_STRLEN:
d14c547a 11844 return fold_builtin_strlen (loc, expr, type, arg0);
b0b3afb2 11845
ea6a6627 11846 CASE_FLT_FN (BUILT_IN_FABS):
6dc198e3 11847 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
e2323f5b
PB
11848 case BUILT_IN_FABSD32:
11849 case BUILT_IN_FABSD64:
11850 case BUILT_IN_FABSD128:
db3927fb 11851 return fold_builtin_fabs (loc, arg0, type);
9655d83b
RS
11852
11853 case BUILT_IN_ABS:
11854 case BUILT_IN_LABS:
11855 case BUILT_IN_LLABS:
11856 case BUILT_IN_IMAXABS:
db3927fb 11857 return fold_builtin_abs (loc, arg0, type);
07bae5ad 11858
ea6a6627 11859 CASE_FLT_FN (BUILT_IN_CONJ):
c128599a 11860 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 11861 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 11862 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
5039610b 11863 break;
aa6c7c3a 11864
ea6a6627 11865 CASE_FLT_FN (BUILT_IN_CREAL):
c128599a 11866 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 11867 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
6f3d1a5e 11868 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
5039610b 11869 break;
aa6c7c3a 11870
ea6a6627 11871 CASE_FLT_FN (BUILT_IN_CIMAG):
376da68e
KG
11872 if (validate_arg (arg0, COMPLEX_TYPE)
11873 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 11874 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
5039610b 11875 break;
aa6c7c3a 11876
5c1a2e63
RS
11877 CASE_FLT_FN (BUILT_IN_CARG):
11878 return fold_builtin_carg (loc, arg0, type);
43272bf5 11879
5c1a2e63
RS
11880 case BUILT_IN_ISASCII:
11881 return fold_builtin_isascii (loc, arg0);
b8698a0f 11882
5c1a2e63
RS
11883 case BUILT_IN_TOASCII:
11884 return fold_builtin_toascii (loc, arg0);
b8698a0f 11885
5c1a2e63
RS
11886 case BUILT_IN_ISDIGIT:
11887 return fold_builtin_isdigit (loc, arg0);
b8698a0f 11888
903c723b
TC
11889 CASE_FLT_FN (BUILT_IN_FINITE):
11890 case BUILT_IN_FINITED32:
11891 case BUILT_IN_FINITED64:
11892 case BUILT_IN_FINITED128:
11893 case BUILT_IN_ISFINITE:
11894 {
11895 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
11896 if (ret)
11897 return ret;
11898 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11899 }
11900
11901 CASE_FLT_FN (BUILT_IN_ISINF):
11902 case BUILT_IN_ISINFD32:
11903 case BUILT_IN_ISINFD64:
11904 case BUILT_IN_ISINFD128:
11905 {
11906 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
11907 if (ret)
11908 return ret;
11909 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11910 }
11911
11912 case BUILT_IN_ISNORMAL:
11913 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
11914
5c1a2e63 11915 case BUILT_IN_ISINF_SIGN:
903c723b
TC
11916 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
11917
11918 CASE_FLT_FN (BUILT_IN_ISNAN):
11919 case BUILT_IN_ISNAND32:
11920 case BUILT_IN_ISNAND64:
11921 case BUILT_IN_ISNAND128:
11922 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
b8698a0f 11923
5c1a2e63
RS
11924 case BUILT_IN_FREE:
11925 if (integer_zerop (arg0))
11926 return build_empty_stmt (loc);
abcc43f5 11927 break;
07bae5ad 11928
5c1a2e63 11929 default:
4835c978 11930 break;
5c1a2e63 11931 }
4977bab6 11932
5c1a2e63 11933 return NULL_TREE;
e19f6bde 11934
5c1a2e63 11935}
b53fed56 11936
b5338fb3
MS
11937/* Folds a call EXPR (which may be null) to built-in function FNDECL
11938 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
11939 if no simplification was possible. */
5039610b
SL
11940
11941static tree
b5338fb3 11942fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
5039610b
SL
11943{
11944 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11945 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11946
5c1a2e63
RS
11947 if (TREE_CODE (arg0) == ERROR_MARK
11948 || TREE_CODE (arg1) == ERROR_MARK)
11949 return NULL_TREE;
ea91f957 11950
d7ebef06 11951 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
5c1a2e63 11952 return ret;
752b7d38 11953
5c1a2e63
RS
11954 switch (fcode)
11955 {
752b7d38
KG
11956 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
11957 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
11958 if (validate_arg (arg0, REAL_TYPE)
c3284718 11959 && validate_arg (arg1, POINTER_TYPE))
752b7d38
KG
11960 return do_mpfr_lgamma_r (arg0, arg1, type);
11961 break;
5039610b 11962
7a2a25ab 11963 CASE_FLT_FN (BUILT_IN_FREXP):
db3927fb 11964 return fold_builtin_frexp (loc, arg0, arg1, type);
7a2a25ab 11965
3d577eaf 11966 CASE_FLT_FN (BUILT_IN_MODF):
db3927fb 11967 return fold_builtin_modf (loc, arg0, arg1, type);
3d577eaf 11968
5039610b 11969 case BUILT_IN_STRSPN:
b5338fb3 11970 return fold_builtin_strspn (loc, expr, arg0, arg1);
5039610b
SL
11971
11972 case BUILT_IN_STRCSPN:
b5338fb3 11973 return fold_builtin_strcspn (loc, expr, arg0, arg1);
5039610b 11974
5039610b 11975 case BUILT_IN_STRPBRK:
b5338fb3 11976 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
5039610b
SL
11977
11978 case BUILT_IN_EXPECT:
1e9168b2 11979 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
5039610b 11980
08039bd8 11981 case BUILT_IN_ISGREATER:
db3927fb
AH
11982 return fold_builtin_unordered_cmp (loc, fndecl,
11983 arg0, arg1, UNLE_EXPR, LE_EXPR);
08039bd8 11984 case BUILT_IN_ISGREATEREQUAL:
db3927fb
AH
11985 return fold_builtin_unordered_cmp (loc, fndecl,
11986 arg0, arg1, UNLT_EXPR, LT_EXPR);
08039bd8 11987 case BUILT_IN_ISLESS:
db3927fb
AH
11988 return fold_builtin_unordered_cmp (loc, fndecl,
11989 arg0, arg1, UNGE_EXPR, GE_EXPR);
08039bd8 11990 case BUILT_IN_ISLESSEQUAL:
db3927fb
AH
11991 return fold_builtin_unordered_cmp (loc, fndecl,
11992 arg0, arg1, UNGT_EXPR, GT_EXPR);
08039bd8 11993 case BUILT_IN_ISLESSGREATER:
db3927fb
AH
11994 return fold_builtin_unordered_cmp (loc, fndecl,
11995 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
08039bd8 11996 case BUILT_IN_ISUNORDERED:
db3927fb
AH
11997 return fold_builtin_unordered_cmp (loc, fndecl,
11998 arg0, arg1, UNORDERED_EXPR,
a35da91f 11999 NOP_EXPR);
08039bd8 12000
d3147f64
EC
12001 /* We do the folding for va_start in the expander. */
12002 case BUILT_IN_VA_START:
12003 break;
a32e70c3 12004
10a0d495 12005 case BUILT_IN_OBJECT_SIZE:
5039610b 12006 return fold_builtin_object_size (arg0, arg1);
10a0d495 12007
86951993
AM
12008 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
12009 return fold_builtin_atomic_always_lock_free (arg0, arg1);
12010
12011 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
12012 return fold_builtin_atomic_is_lock_free (arg0, arg1);
12013
5039610b
SL
12014 default:
12015 break;
12016 }
12017 return NULL_TREE;
12018}
12019
12020/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
2625bb5d 12021 and ARG2.
5039610b
SL
12022 This function returns NULL_TREE if no simplification was possible. */
12023
12024static tree
db3927fb 12025fold_builtin_3 (location_t loc, tree fndecl,
2625bb5d 12026 tree arg0, tree arg1, tree arg2)
5039610b
SL
12027{
12028 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12029 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5c1a2e63
RS
12030
12031 if (TREE_CODE (arg0) == ERROR_MARK
12032 || TREE_CODE (arg1) == ERROR_MARK
12033 || TREE_CODE (arg2) == ERROR_MARK)
12034 return NULL_TREE;
12035
d7ebef06
RS
12036 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
12037 arg0, arg1, arg2))
5c1a2e63
RS
12038 return ret;
12039
5039610b
SL
12040 switch (fcode)
12041 {
12042
12043 CASE_FLT_FN (BUILT_IN_SINCOS):
db3927fb 12044 return fold_builtin_sincos (loc, arg0, arg1, arg2);
5039610b 12045
ea91f957
KG
12046 CASE_FLT_FN (BUILT_IN_REMQUO):
12047 if (validate_arg (arg0, REAL_TYPE)
c3284718
RS
12048 && validate_arg (arg1, REAL_TYPE)
12049 && validate_arg (arg2, POINTER_TYPE))
ea91f957
KG
12050 return do_mpfr_remquo (arg0, arg1, arg2);
12051 break;
ea91f957 12052
5039610b 12053 case BUILT_IN_MEMCMP:
5de73c05 12054 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
5039610b 12055
ed9c79e1 12056 case BUILT_IN_EXPECT:
1e9168b2
ML
12057 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
12058
12059 case BUILT_IN_EXPECT_WITH_PROBABILITY:
12060 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
ed9c79e1 12061
1304953e
JJ
12062 case BUILT_IN_ADD_OVERFLOW:
12063 case BUILT_IN_SUB_OVERFLOW:
12064 case BUILT_IN_MUL_OVERFLOW:
44a845ca
MS
12065 case BUILT_IN_ADD_OVERFLOW_P:
12066 case BUILT_IN_SUB_OVERFLOW_P:
12067 case BUILT_IN_MUL_OVERFLOW_P:
1304953e
JJ
12068 case BUILT_IN_SADD_OVERFLOW:
12069 case BUILT_IN_SADDL_OVERFLOW:
12070 case BUILT_IN_SADDLL_OVERFLOW:
12071 case BUILT_IN_SSUB_OVERFLOW:
12072 case BUILT_IN_SSUBL_OVERFLOW:
12073 case BUILT_IN_SSUBLL_OVERFLOW:
12074 case BUILT_IN_SMUL_OVERFLOW:
12075 case BUILT_IN_SMULL_OVERFLOW:
12076 case BUILT_IN_SMULLL_OVERFLOW:
12077 case BUILT_IN_UADD_OVERFLOW:
12078 case BUILT_IN_UADDL_OVERFLOW:
12079 case BUILT_IN_UADDLL_OVERFLOW:
12080 case BUILT_IN_USUB_OVERFLOW:
12081 case BUILT_IN_USUBL_OVERFLOW:
12082 case BUILT_IN_USUBLL_OVERFLOW:
12083 case BUILT_IN_UMUL_OVERFLOW:
12084 case BUILT_IN_UMULL_OVERFLOW:
12085 case BUILT_IN_UMULLL_OVERFLOW:
12086 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
12087
b0b3afb2
BS
12088 default:
12089 break;
12090 }
5039610b
SL
12091 return NULL_TREE;
12092}
b0b3afb2 12093
b5338fb3
MS
12094/* Folds a call EXPR (which may be null) to built-in function FNDECL.
12095 ARGS is an array of NARGS arguments. IGNORE is true if the result
12096 of the function call is ignored. This function returns NULL_TREE
12097 if no simplification was possible. */
b8698a0f 12098
b5338fb3
MS
12099static tree
12100fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
12101 int nargs, bool)
5039610b
SL
12102{
12103 tree ret = NULL_TREE;
f4577fcd 12104
5039610b
SL
12105 switch (nargs)
12106 {
12107 case 0:
2625bb5d 12108 ret = fold_builtin_0 (loc, fndecl);
5039610b
SL
12109 break;
12110 case 1:
d14c547a 12111 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
5039610b
SL
12112 break;
12113 case 2:
b5338fb3 12114 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
5039610b
SL
12115 break;
12116 case 3:
2625bb5d 12117 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
5039610b 12118 break;
5039610b 12119 default:
903c723b 12120 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
5039610b
SL
12121 break;
12122 }
12123 if (ret)
12124 {
726a989a 12125 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
db3927fb 12126 SET_EXPR_LOCATION (ret, loc);
5039610b
SL
12127 return ret;
12128 }
12129 return NULL_TREE;
12130}
12131
862d0b35
DN
12132/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
12133 list ARGS along with N new arguments in NEWARGS. SKIP is the number
12134 of arguments in ARGS to be omitted. OLDNARGS is the number of
12135 elements in ARGS. */
5039610b
SL
12136
12137static tree
862d0b35
DN
12138rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
12139 int skip, tree fndecl, int n, va_list newargs)
5039610b 12140{
862d0b35
DN
12141 int nargs = oldnargs - skip + n;
12142 tree *buffer;
5039610b 12143
862d0b35 12144 if (n > 0)
5039610b 12145 {
862d0b35 12146 int i, j;
5039610b 12147
862d0b35
DN
12148 buffer = XALLOCAVEC (tree, nargs);
12149 for (i = 0; i < n; i++)
12150 buffer[i] = va_arg (newargs, tree);
12151 for (j = skip; j < oldnargs; j++, i++)
12152 buffer[i] = args[j];
12153 }
12154 else
12155 buffer = args + skip;
3bf5906b 12156
862d0b35
DN
12157 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
12158}
5039610b 12159
0889e9bc
JJ
12160/* Return true if FNDECL shouldn't be folded right now.
12161 If a built-in function has an inline attribute always_inline
12162 wrapper, defer folding it after always_inline functions have
12163 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
12164 might not be performed. */
12165
e7f9dae0 12166bool
0889e9bc
JJ
12167avoid_folding_inline_builtin (tree fndecl)
12168{
12169 return (DECL_DECLARED_INLINE_P (fndecl)
12170 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
12171 && cfun
12172 && !cfun->always_inline_functions_inlined
12173 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
12174}
12175
6de9cd9a 12176/* A wrapper function for builtin folding that prevents warnings for
caf93cb0 12177 "statement without effect" and the like, caused by removing the
6de9cd9a
DN
12178 call node earlier than the warning is generated. */
12179
12180tree
db3927fb 12181fold_call_expr (location_t loc, tree exp, bool ignore)
6de9cd9a 12182{
5039610b
SL
12183 tree ret = NULL_TREE;
12184 tree fndecl = get_callee_fndecl (exp);
3d78e008 12185 if (fndecl && fndecl_built_in_p (fndecl)
6ef5231b
JJ
12186 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
12187 yet. Defer folding until we see all the arguments
12188 (after inlining). */
12189 && !CALL_EXPR_VA_ARG_PACK (exp))
12190 {
12191 int nargs = call_expr_nargs (exp);
12192
12193 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
12194 instead last argument is __builtin_va_arg_pack (). Defer folding
12195 even in that case, until arguments are finalized. */
12196 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
12197 {
12198 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
3d78e008 12199 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
6ef5231b
JJ
12200 return NULL_TREE;
12201 }
12202
0889e9bc
JJ
12203 if (avoid_folding_inline_builtin (fndecl))
12204 return NULL_TREE;
12205
5039610b 12206 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
f311c3b4
NF
12207 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
12208 CALL_EXPR_ARGP (exp), ignore);
5039610b
SL
12209 else
12210 {
a6a0570f 12211 tree *args = CALL_EXPR_ARGP (exp);
b5338fb3 12212 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
5039610b 12213 if (ret)
db3927fb 12214 return ret;
5039610b 12215 }
6de9cd9a 12216 }
5039610b
SL
12217 return NULL_TREE;
12218}
b8698a0f 12219
a6a0570f
RB
12220/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
12221 N arguments are passed in the array ARGARRAY. Return a folded
12222 expression or NULL_TREE if no simplification was possible. */
4977bab6
ZW
12223
12224tree
a6a0570f 12225fold_builtin_call_array (location_t loc, tree,
94a0dd7b
SL
12226 tree fn,
12227 int n,
12228 tree *argarray)
6385a28f 12229{
a6a0570f
RB
12230 if (TREE_CODE (fn) != ADDR_EXPR)
12231 return NULL_TREE;
5039610b 12232
a6a0570f
RB
12233 tree fndecl = TREE_OPERAND (fn, 0);
12234 if (TREE_CODE (fndecl) == FUNCTION_DECL
3d78e008 12235 && fndecl_built_in_p (fndecl))
a6a0570f
RB
12236 {
12237 /* If last argument is __builtin_va_arg_pack (), arguments to this
12238 function are not finalized yet. Defer folding until they are. */
12239 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
12240 {
12241 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
3d78e008 12242 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
a6a0570f
RB
12243 return NULL_TREE;
12244 }
12245 if (avoid_folding_inline_builtin (fndecl))
12246 return NULL_TREE;
12247 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12248 return targetm.fold_builtin (fndecl, n, argarray, false);
12249 else
b5338fb3 12250 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
a6a0570f 12251 }
5039610b 12252
a6a0570f 12253 return NULL_TREE;
5039610b
SL
12254}
12255
43ea30dc
NF
12256/* Construct a new CALL_EXPR using the tail of the argument list of EXP
12257 along with N new arguments specified as the "..." parameters. SKIP
12258 is the number of arguments in EXP to be omitted. This function is used
12259 to do varargs-to-varargs transformations. */
12260
12261static tree
12262rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
12263{
12264 va_list ap;
12265 tree t;
12266
12267 va_start (ap, n);
12268 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
12269 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
12270 va_end (ap);
5039610b 12271
43ea30dc 12272 return t;
5039610b
SL
12273}
12274
12275/* Validate a single argument ARG against a tree code CODE representing
0dba7960 12276 a type. Return true when argument is valid. */
b8698a0f 12277
5039610b 12278static bool
0dba7960 12279validate_arg (const_tree arg, enum tree_code code)
5039610b
SL
12280{
12281 if (!arg)
12282 return false;
12283 else if (code == POINTER_TYPE)
0dba7960 12284 return POINTER_TYPE_P (TREE_TYPE (arg));
4cd8e76f
RG
12285 else if (code == INTEGER_TYPE)
12286 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
5039610b 12287 return code == TREE_CODE (TREE_TYPE (arg));
6385a28f 12288}
019fa094 12289
726a989a
RB
12290/* This function validates the types of a function call argument list
12291 against a specified list of tree_codes. If the last specifier is a 0,
12292 that represents an ellipses, otherwise the last specifier must be a
12293 VOID_TYPE.
12294
12295 This is the GIMPLE version of validate_arglist. Eventually we want to
12296 completely convert builtins.c to work from GIMPLEs and the tree based
12297 validate_arglist will then be removed. */
12298
12299bool
538dd0b7 12300validate_gimple_arglist (const gcall *call, ...)
726a989a
RB
12301{
12302 enum tree_code code;
12303 bool res = 0;
12304 va_list ap;
12305 const_tree arg;
12306 size_t i;
12307
12308 va_start (ap, call);
12309 i = 0;
12310
12311 do
12312 {
72b5577d 12313 code = (enum tree_code) va_arg (ap, int);
726a989a
RB
12314 switch (code)
12315 {
12316 case 0:
12317 /* This signifies an ellipses, any further arguments are all ok. */
12318 res = true;
12319 goto end;
12320 case VOID_TYPE:
12321 /* This signifies an endlink, if no arguments remain, return
12322 true, otherwise return false. */
12323 res = (i == gimple_call_num_args (call));
12324 goto end;
12325 default:
12326 /* If no parameters remain or the parameter's code does not
12327 match the specified code, return false. Otherwise continue
12328 checking any remaining arguments. */
12329 arg = gimple_call_arg (call, i++);
12330 if (!validate_arg (arg, code))
12331 goto end;
12332 break;
12333 }
12334 }
12335 while (1);
12336
12337 /* We need gotos here since we can only have one VA_CLOSE in a
12338 function. */
12339 end: ;
12340 va_end (ap);
12341
12342 return res;
12343}
12344
f6155fda
SS
12345/* Default target-specific builtin expander that does nothing. */
12346
12347rtx
4682ae04
AJ
12348default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
12349 rtx target ATTRIBUTE_UNUSED,
12350 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 12351 machine_mode mode ATTRIBUTE_UNUSED,
4682ae04 12352 int ignore ATTRIBUTE_UNUSED)
f6155fda
SS
12353{
12354 return NULL_RTX;
12355}
34ee7f82 12356
7dc61d6c
KG
12357/* Returns true is EXP represents data that would potentially reside
12358 in a readonly section. */
12359
fef5a0d9 12360bool
7dc61d6c
KG
12361readonly_data_expr (tree exp)
12362{
12363 STRIP_NOPS (exp);
12364
aef0afc4
UW
12365 if (TREE_CODE (exp) != ADDR_EXPR)
12366 return false;
12367
12368 exp = get_base_address (TREE_OPERAND (exp, 0));
12369 if (!exp)
12370 return false;
12371
12372 /* Make sure we call decl_readonly_section only for trees it
12373 can handle (since it returns true for everything it doesn't
12374 understand). */
caf93cb0 12375 if (TREE_CODE (exp) == STRING_CST
aef0afc4 12376 || TREE_CODE (exp) == CONSTRUCTOR
8813a647 12377 || (VAR_P (exp) && TREE_STATIC (exp)))
aef0afc4 12378 return decl_readonly_section (exp, 0);
7dc61d6c
KG
12379 else
12380 return false;
12381}
6de9cd9a 12382
5039610b
SL
12383/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
12384 to the call, and TYPE is its return type.
6de9cd9a 12385
5039610b 12386 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
12387 simplified form of the call as a tree.
12388
12389 The simplified form may be a constant or other expression which
12390 computes the same value, but in a more efficient manner (including
12391 calls to other builtin functions).
12392
12393 The call may contain arguments which need to be evaluated, but
12394 which are not useful to determine the result of the call. In
12395 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12396 COMPOUND_EXPR will be an argument which must be evaluated.
12397 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12398 COMPOUND_EXPR in the chain will contain the tree for the simplified
12399 form of the builtin function call. */
12400
12401static tree
d14c547a 12402fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
6de9cd9a 12403{
5039610b
SL
12404 if (!validate_arg (s1, POINTER_TYPE)
12405 || !validate_arg (s2, POINTER_TYPE))
12406 return NULL_TREE;
6de9cd9a 12407
b5338fb3
MS
12408 tree fn;
12409 const char *p1, *p2;
6de9cd9a 12410
b5338fb3
MS
12411 p2 = c_getstr (s2);
12412 if (p2 == NULL)
12413 return NULL_TREE;
6de9cd9a 12414
b5338fb3
MS
12415 p1 = c_getstr (s1);
12416 if (p1 != NULL)
12417 {
12418 const char *r = strpbrk (p1, p2);
12419 tree tem;
6de9cd9a 12420
b5338fb3
MS
12421 if (r == NULL)
12422 return build_int_cst (TREE_TYPE (s1), 0);
6de9cd9a 12423
b5338fb3
MS
12424 /* Return an offset into the constant string argument. */
12425 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
12426 return fold_convert_loc (loc, type, tem);
12427 }
6de9cd9a 12428
b5338fb3
MS
12429 if (p2[0] == '\0')
12430 /* strpbrk(x, "") == NULL.
12431 Evaluate and ignore s1 in case it had side-effects. */
12432 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
6de9cd9a 12433
b5338fb3
MS
12434 if (p2[1] != '\0')
12435 return NULL_TREE; /* Really call strpbrk. */
12436
12437 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
12438 if (!fn)
12439 return NULL_TREE;
12440
12441 /* New argument list transforming strpbrk(s1, s2) to
12442 strchr(s1, s2[0]). */
12443 return build_call_expr_loc (loc, fn, 2, s1,
12444 build_int_cst (integer_type_node, p2[0]));
6de9cd9a
DN
12445}
12446
5039610b
SL
12447/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
12448 to the call.
6de9cd9a 12449
5039610b 12450 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
12451 simplified form of the call as a tree.
12452
12453 The simplified form may be a constant or other expression which
12454 computes the same value, but in a more efficient manner (including
12455 calls to other builtin functions).
12456
12457 The call may contain arguments which need to be evaluated, but
12458 which are not useful to determine the result of the call. In
12459 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12460 COMPOUND_EXPR will be an argument which must be evaluated.
12461 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12462 COMPOUND_EXPR in the chain will contain the tree for the simplified
12463 form of the builtin function call. */
12464
12465static tree
b5338fb3 12466fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
6de9cd9a 12467{
5039610b
SL
12468 if (!validate_arg (s1, POINTER_TYPE)
12469 || !validate_arg (s2, POINTER_TYPE))
12470 return NULL_TREE;
6de9cd9a 12471
b5338fb3
MS
12472 if (!check_nul_terminated_array (expr, s1)
12473 || !check_nul_terminated_array (expr, s2))
12474 return NULL_TREE;
12475
12476 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12477
12478 /* If either argument is "", return NULL_TREE. */
12479 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
12480 /* Evaluate and ignore both arguments in case either one has
12481 side-effects. */
12482 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
08039bd8 12483 s1, s2);
b5338fb3 12484 return NULL_TREE;
6de9cd9a
DN
12485}
12486
5039610b
SL
12487/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12488 to the call.
6de9cd9a 12489
5039610b 12490 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
12491 simplified form of the call as a tree.
12492
12493 The simplified form may be a constant or other expression which
12494 computes the same value, but in a more efficient manner (including
12495 calls to other builtin functions).
12496
12497 The call may contain arguments which need to be evaluated, but
12498 which are not useful to determine the result of the call. In
12499 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12500 COMPOUND_EXPR will be an argument which must be evaluated.
12501 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12502 COMPOUND_EXPR in the chain will contain the tree for the simplified
12503 form of the builtin function call. */
12504
12505static tree
b5338fb3 12506fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
6de9cd9a 12507{
5039610b
SL
12508 if (!validate_arg (s1, POINTER_TYPE)
12509 || !validate_arg (s2, POINTER_TYPE))
12510 return NULL_TREE;
b5338fb3
MS
12511
12512 if (!check_nul_terminated_array (expr, s1)
12513 || !check_nul_terminated_array (expr, s2))
12514 return NULL_TREE;
12515
12516 /* If the first argument is "", return NULL_TREE. */
12517 const char *p1 = c_getstr (s1);
12518 if (p1 && *p1 == '\0')
6de9cd9a 12519 {
b5338fb3
MS
12520 /* Evaluate and ignore argument s2 in case it has
12521 side-effects. */
12522 return omit_one_operand_loc (loc, size_type_node,
002bd9f0 12523 size_zero_node, s2);
b5338fb3 12524 }
6de9cd9a 12525
b5338fb3
MS
12526 /* If the second argument is "", return __builtin_strlen(s1). */
12527 const char *p2 = c_getstr (s2);
12528 if (p2 && *p2 == '\0')
12529 {
12530 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
6de9cd9a 12531
b5338fb3
MS
12532 /* If the replacement _DECL isn't initialized, don't do the
12533 transformation. */
12534 if (!fn)
12535 return NULL_TREE;
6de9cd9a 12536
b5338fb3 12537 return build_call_expr_loc (loc, fn, 1, s1);
6de9cd9a 12538 }
b5338fb3 12539 return NULL_TREE;
6de9cd9a
DN
12540}
12541
5039610b 12542/* Fold the next_arg or va_start call EXP. Returns true if there was an error
2efcfa4e
AP
12543 produced. False otherwise. This is done so that we don't output the error
12544 or warning twice or three times. */
726a989a 12545
2efcfa4e 12546bool
5039610b 12547fold_builtin_next_arg (tree exp, bool va_start_p)
6de9cd9a
DN
12548{
12549 tree fntype = TREE_TYPE (current_function_decl);
5039610b
SL
12550 int nargs = call_expr_nargs (exp);
12551 tree arg;
34c88790
DS
12552 /* There is good chance the current input_location points inside the
12553 definition of the va_start macro (perhaps on the token for
12554 builtin) in a system header, so warnings will not be emitted.
12555 Use the location in real source code. */
620e594b 12556 location_t current_location =
34c88790
DS
12557 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12558 NULL);
6de9cd9a 12559
f38958e8 12560 if (!stdarg_p (fntype))
2efcfa4e 12561 {
a9c697b8 12562 error ("%<va_start%> used in function with fixed arguments");
2efcfa4e
AP
12563 return true;
12564 }
5039610b
SL
12565
12566 if (va_start_p)
8870e212 12567 {
5039610b
SL
12568 if (va_start_p && (nargs != 2))
12569 {
12570 error ("wrong number of arguments to function %<va_start%>");
12571 return true;
12572 }
12573 arg = CALL_EXPR_ARG (exp, 1);
8870e212
JJ
12574 }
12575 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12576 when we checked the arguments and if needed issued a warning. */
5039610b 12577 else
6de9cd9a 12578 {
5039610b
SL
12579 if (nargs == 0)
12580 {
12581 /* Evidently an out of date version of <stdarg.h>; can't validate
12582 va_start's second argument, but can still work as intended. */
34c88790 12583 warning_at (current_location,
b9c8da34
DS
12584 OPT_Wvarargs,
12585 "%<__builtin_next_arg%> called without an argument");
5039610b
SL
12586 return true;
12587 }
12588 else if (nargs > 1)
c22cacf3 12589 {
5039610b 12590 error ("wrong number of arguments to function %<__builtin_next_arg%>");
c22cacf3
MS
12591 return true;
12592 }
5039610b
SL
12593 arg = CALL_EXPR_ARG (exp, 0);
12594 }
12595
4e3825db
MM
12596 if (TREE_CODE (arg) == SSA_NAME)
12597 arg = SSA_NAME_VAR (arg);
12598
5039610b 12599 /* We destructively modify the call to be __builtin_va_start (ap, 0)
b8698a0f 12600 or __builtin_next_arg (0) the first time we see it, after checking
5039610b
SL
12601 the arguments and if needed issuing a warning. */
12602 if (!integer_zerop (arg))
12603 {
12604 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8870e212 12605
6de9cd9a
DN
12606 /* Strip off all nops for the sake of the comparison. This
12607 is not quite the same as STRIP_NOPS. It does more.
12608 We must also strip off INDIRECT_EXPR for C++ reference
12609 parameters. */
1043771b 12610 while (CONVERT_EXPR_P (arg)
6de9cd9a
DN
12611 || TREE_CODE (arg) == INDIRECT_REF)
12612 arg = TREE_OPERAND (arg, 0);
12613 if (arg != last_parm)
c22cacf3 12614 {
118f3b19
KH
12615 /* FIXME: Sometimes with the tree optimizers we can get the
12616 not the last argument even though the user used the last
12617 argument. We just warn and set the arg to be the last
12618 argument so that we will get wrong-code because of
12619 it. */
34c88790 12620 warning_at (current_location,
b9c8da34 12621 OPT_Wvarargs,
34c88790 12622 "second parameter of %<va_start%> not last named argument");
2efcfa4e 12623 }
2985f531
MLI
12624
12625 /* Undefined by C99 7.15.1.4p4 (va_start):
12626 "If the parameter parmN is declared with the register storage
12627 class, with a function or array type, or with a type that is
12628 not compatible with the type that results after application of
12629 the default argument promotions, the behavior is undefined."
12630 */
12631 else if (DECL_REGISTER (arg))
34c88790
DS
12632 {
12633 warning_at (current_location,
b9c8da34 12634 OPT_Wvarargs,
9c582551 12635 "undefined behavior when second parameter of "
34c88790
DS
12636 "%<va_start%> is declared with %<register%> storage");
12637 }
2985f531 12638
8870e212 12639 /* We want to verify the second parameter just once before the tree
c22cacf3
MS
12640 optimizers are run and then avoid keeping it in the tree,
12641 as otherwise we could warn even for correct code like:
12642 void foo (int i, ...)
12643 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
5039610b
SL
12644 if (va_start_p)
12645 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12646 else
12647 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
2efcfa4e
AP
12648 }
12649 return false;
6de9cd9a
DN
12650}
12651
12652
5039610b 12653/* Expand a call EXP to __builtin_object_size. */
10a0d495 12654
9b2b7279 12655static rtx
10a0d495
JJ
12656expand_builtin_object_size (tree exp)
12657{
12658 tree ost;
12659 int object_size_type;
12660 tree fndecl = get_callee_fndecl (exp);
10a0d495 12661
5039610b 12662 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10a0d495 12663 {
0f2c4a8f 12664 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
c94ed7a1 12665 exp, fndecl);
10a0d495
JJ
12666 expand_builtin_trap ();
12667 return const0_rtx;
12668 }
12669
5039610b 12670 ost = CALL_EXPR_ARG (exp, 1);
10a0d495
JJ
12671 STRIP_NOPS (ost);
12672
12673 if (TREE_CODE (ost) != INTEGER_CST
12674 || tree_int_cst_sgn (ost) < 0
12675 || compare_tree_int (ost, 3) > 0)
12676 {
0f2c4a8f 12677 error ("%Klast argument of %qD is not integer constant between 0 and 3",
c94ed7a1 12678 exp, fndecl);
10a0d495
JJ
12679 expand_builtin_trap ();
12680 return const0_rtx;
12681 }
12682
9439e9a1 12683 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
12684
12685 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12686}
12687
12688/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12689 FCODE is the BUILT_IN_* to use.
5039610b 12690 Return NULL_RTX if we failed; the caller should emit a normal call,
10a0d495
JJ
12691 otherwise try to get the result in TARGET, if convenient (and in
12692 mode MODE if that's convenient). */
12693
12694static rtx
ef4bddc2 12695expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10a0d495
JJ
12696 enum built_in_function fcode)
12697{
5039610b 12698 if (!validate_arglist (exp,
10a0d495
JJ
12699 POINTER_TYPE,
12700 fcode == BUILT_IN_MEMSET_CHK
12701 ? INTEGER_TYPE : POINTER_TYPE,
12702 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5039610b 12703 return NULL_RTX;
10a0d495 12704
cc8bea0a
MS
12705 tree dest = CALL_EXPR_ARG (exp, 0);
12706 tree src = CALL_EXPR_ARG (exp, 1);
12707 tree len = CALL_EXPR_ARG (exp, 2);
12708 tree size = CALL_EXPR_ARG (exp, 3);
10a0d495 12709
d14c547a
MS
12710 /* FIXME: Set access mode to write only for memset et al. */
12711 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
12712 /*srcstr=*/NULL_TREE, size, access_read_write);
ee92e7ba
MS
12713
12714 if (!tree_fits_uhwi_p (size))
5039610b 12715 return NULL_RTX;
10a0d495 12716
cc269bb6 12717 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10a0d495 12718 {
ee92e7ba
MS
12719 /* Avoid transforming the checking call to an ordinary one when
12720 an overflow has been detected or when the call couldn't be
12721 validated because the size is not constant. */
12722 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
12723 return NULL_RTX;
10a0d495 12724
ee92e7ba 12725 tree fn = NULL_TREE;
10a0d495
JJ
12726 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12727 mem{cpy,pcpy,move,set} is available. */
12728 switch (fcode)
12729 {
12730 case BUILT_IN_MEMCPY_CHK:
e79983f4 12731 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10a0d495
JJ
12732 break;
12733 case BUILT_IN_MEMPCPY_CHK:
e79983f4 12734 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10a0d495
JJ
12735 break;
12736 case BUILT_IN_MEMMOVE_CHK:
e79983f4 12737 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10a0d495
JJ
12738 break;
12739 case BUILT_IN_MEMSET_CHK:
e79983f4 12740 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10a0d495
JJ
12741 break;
12742 default:
12743 break;
12744 }
12745
12746 if (! fn)
5039610b 12747 return NULL_RTX;
10a0d495 12748
aa493694 12749 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
44e10129
MM
12750 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12751 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
12752 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12753 }
12754 else if (fcode == BUILT_IN_MEMSET_CHK)
5039610b 12755 return NULL_RTX;
10a0d495
JJ
12756 else
12757 {
0eb77834 12758 unsigned int dest_align = get_pointer_alignment (dest);
10a0d495
JJ
12759
12760 /* If DEST is not a pointer type, call the normal function. */
12761 if (dest_align == 0)
5039610b 12762 return NULL_RTX;
10a0d495
JJ
12763
12764 /* If SRC and DEST are the same (and not volatile), do nothing. */
12765 if (operand_equal_p (src, dest, 0))
12766 {
12767 tree expr;
12768
12769 if (fcode != BUILT_IN_MEMPCPY_CHK)
12770 {
12771 /* Evaluate and ignore LEN in case it has side-effects. */
12772 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12773 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12774 }
12775
5d49b6a7 12776 expr = fold_build_pointer_plus (dest, len);
10a0d495
JJ
12777 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12778 }
12779
12780 /* __memmove_chk special case. */
12781 if (fcode == BUILT_IN_MEMMOVE_CHK)
12782 {
0eb77834 12783 unsigned int src_align = get_pointer_alignment (src);
10a0d495
JJ
12784
12785 if (src_align == 0)
5039610b 12786 return NULL_RTX;
10a0d495
JJ
12787
12788 /* If src is categorized for a readonly section we can use
12789 normal __memcpy_chk. */
12790 if (readonly_data_expr (src))
12791 {
e79983f4 12792 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10a0d495 12793 if (!fn)
5039610b 12794 return NULL_RTX;
aa493694
JJ
12795 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12796 dest, src, len, size);
44e10129
MM
12797 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12798 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
12799 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12800 }
12801 }
5039610b 12802 return NULL_RTX;
10a0d495
JJ
12803 }
12804}
12805
12806/* Emit warning if a buffer overflow is detected at compile time. */
12807
12808static void
12809maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12810{
ee92e7ba
MS
12811 /* The source string. */
12812 tree srcstr = NULL_TREE;
d14c547a 12813 /* The size of the destination object returned by __builtin_object_size. */
ee92e7ba
MS
12814 tree objsize = NULL_TREE;
12815 /* The string that is being concatenated with (as in __strcat_chk)
12816 or null if it isn't. */
12817 tree catstr = NULL_TREE;
12818 /* The maximum length of the source sequence in a bounded operation
12819 (such as __strncat_chk) or null if the operation isn't bounded
12820 (such as __strcat_chk). */
cc8bea0a 12821 tree maxread = NULL_TREE;
9c1caf50
MS
12822 /* The exact size of the access (such as in __strncpy_chk). */
12823 tree size = NULL_TREE;
d14c547a
MS
12824 /* The access by the function that's checked. Except for snprintf
12825 both writing and reading is checked. */
12826 access_mode mode = access_read_write;
10a0d495
JJ
12827
12828 switch (fcode)
12829 {
12830 case BUILT_IN_STRCPY_CHK:
12831 case BUILT_IN_STPCPY_CHK:
ee92e7ba
MS
12832 srcstr = CALL_EXPR_ARG (exp, 1);
12833 objsize = CALL_EXPR_ARG (exp, 2);
12834 break;
12835
10a0d495 12836 case BUILT_IN_STRCAT_CHK:
ee92e7ba
MS
12837 /* For __strcat_chk the warning will be emitted only if overflowing
12838 by at least strlen (dest) + 1 bytes. */
12839 catstr = CALL_EXPR_ARG (exp, 0);
12840 srcstr = CALL_EXPR_ARG (exp, 1);
12841 objsize = CALL_EXPR_ARG (exp, 2);
10a0d495 12842 break;
ee92e7ba 12843
1c2fc017 12844 case BUILT_IN_STRNCAT_CHK:
ee92e7ba
MS
12845 catstr = CALL_EXPR_ARG (exp, 0);
12846 srcstr = CALL_EXPR_ARG (exp, 1);
cc8bea0a 12847 maxread = CALL_EXPR_ARG (exp, 2);
ee92e7ba
MS
12848 objsize = CALL_EXPR_ARG (exp, 3);
12849 break;
12850
10a0d495 12851 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 12852 case BUILT_IN_STPNCPY_CHK:
ee92e7ba 12853 srcstr = CALL_EXPR_ARG (exp, 1);
9c1caf50 12854 size = CALL_EXPR_ARG (exp, 2);
ee92e7ba 12855 objsize = CALL_EXPR_ARG (exp, 3);
10a0d495 12856 break;
ee92e7ba 12857
10a0d495
JJ
12858 case BUILT_IN_SNPRINTF_CHK:
12859 case BUILT_IN_VSNPRINTF_CHK:
cc8bea0a 12860 maxread = CALL_EXPR_ARG (exp, 1);
ee92e7ba 12861 objsize = CALL_EXPR_ARG (exp, 3);
d14c547a
MS
12862 /* The only checked access the write to the destination. */
12863 mode = access_write_only;
10a0d495
JJ
12864 break;
12865 default:
12866 gcc_unreachable ();
12867 }
12868
cc8bea0a 12869 if (catstr && maxread)
10a0d495 12870 {
ee92e7ba
MS
12871 /* Check __strncat_chk. There is no way to determine the length
12872 of the string to which the source string is being appended so
12873 just warn when the length of the source string is not known. */
d9c5a8b9
MS
12874 check_strncat_sizes (exp, objsize);
12875 return;
10a0d495 12876 }
10a0d495 12877
d14c547a 12878 check_access (exp, size, maxread, srcstr, objsize, mode);
10a0d495
JJ
12879}
12880
12881/* Emit warning if a buffer overflow is detected at compile time
12882 in __sprintf_chk/__vsprintf_chk calls. */
12883
12884static void
12885maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12886{
451409e4 12887 tree size, len, fmt;
10a0d495 12888 const char *fmt_str;
5039610b 12889 int nargs = call_expr_nargs (exp);
10a0d495
JJ
12890
12891 /* Verify the required arguments in the original call. */
b8698a0f 12892
5039610b 12893 if (nargs < 4)
10a0d495 12894 return;
5039610b
SL
12895 size = CALL_EXPR_ARG (exp, 2);
12896 fmt = CALL_EXPR_ARG (exp, 3);
10a0d495 12897
cc269bb6 12898 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10a0d495
JJ
12899 return;
12900
12901 /* Check whether the format is a literal string constant. */
12902 fmt_str = c_getstr (fmt);
12903 if (fmt_str == NULL)
12904 return;
12905
62e5bf5d 12906 if (!init_target_chars ())
000ba23d
KG
12907 return;
12908
10a0d495 12909 /* If the format doesn't contain % args or %%, we know its size. */
000ba23d 12910 if (strchr (fmt_str, target_percent) == 0)
10a0d495
JJ
12911 len = build_int_cstu (size_type_node, strlen (fmt_str));
12912 /* If the format is "%s" and first ... argument is a string literal,
12913 we know it too. */
5039610b
SL
12914 else if (fcode == BUILT_IN_SPRINTF_CHK
12915 && strcmp (fmt_str, target_percent_s) == 0)
10a0d495
JJ
12916 {
12917 tree arg;
12918
5039610b 12919 if (nargs < 5)
10a0d495 12920 return;
5039610b 12921 arg = CALL_EXPR_ARG (exp, 4);
10a0d495
JJ
12922 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12923 return;
12924
12925 len = c_strlen (arg, 1);
cc269bb6 12926 if (!len || ! tree_fits_uhwi_p (len))
10a0d495
JJ
12927 return;
12928 }
12929 else
12930 return;
12931
ee92e7ba
MS
12932 /* Add one for the terminating nul. */
12933 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
cc8bea0a 12934
d14c547a
MS
12935 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
12936 access_write_only);
10a0d495
JJ
12937}
12938
dce6c58d
MS
12939/* Return true if STMT is a call to an allocation function. Unless
12940 ALL_ALLOC is set, consider only functions that return dynmamically
12941 allocated objects. Otherwise return true even for all forms of
12942 alloca (including VLA). */
f9555f40 12943
dce6c58d
MS
12944static bool
12945fndecl_alloc_p (tree fndecl, bool all_alloc)
12946{
12947 if (!fndecl)
12948 return false;
12949
12950 /* A call to operator new isn't recognized as one to a built-in. */
12951 if (DECL_IS_OPERATOR_NEW_P (fndecl))
12952 return true;
12953
12954 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12955 {
12956 switch (DECL_FUNCTION_CODE (fndecl))
12957 {
12958 case BUILT_IN_ALLOCA:
12959 case BUILT_IN_ALLOCA_WITH_ALIGN:
12960 return all_alloc;
fe7f75cf 12961 case BUILT_IN_ALIGNED_ALLOC:
dce6c58d 12962 case BUILT_IN_CALLOC:
fe7f75cf 12963 case BUILT_IN_GOMP_ALLOC:
dce6c58d
MS
12964 case BUILT_IN_MALLOC:
12965 case BUILT_IN_REALLOC:
12966 case BUILT_IN_STRDUP:
12967 case BUILT_IN_STRNDUP:
12968 return true;
12969 default:
12970 break;
12971 }
12972 }
12973
12974 /* A function is considered an allocation function if it's declared
12975 with attribute malloc with an argument naming its associated
12976 deallocation function. */
12977 tree attrs = DECL_ATTRIBUTES (fndecl);
12978 if (!attrs)
12979 return false;
12980
12981 for (tree allocs = attrs;
12982 (allocs = lookup_attribute ("malloc", allocs));
12983 allocs = TREE_CHAIN (allocs))
12984 {
12985 tree args = TREE_VALUE (allocs);
12986 if (!args)
12987 continue;
12988
12989 if (TREE_VALUE (args))
12990 return true;
12991 }
12992
12993 return false;
12994}
12995
12996/* Return true if STMT is a call to an allocation function. A wrapper
12997 around fndecl_alloc_p. */
12998
12999static bool
13000gimple_call_alloc_p (gimple *stmt, bool all_alloc = false)
13001{
13002 return fndecl_alloc_p (gimple_call_fndecl (stmt), all_alloc);
13003}
13004
13005/* Return the zero-based number corresponding to the argument being
13006 deallocated if STMT is a call to a deallocation function or UINT_MAX
13007 if it isn't. */
13008
13009static unsigned
13010call_dealloc_argno (tree exp)
13011{
13012 tree fndecl = get_callee_fndecl (exp);
13013 if (!fndecl)
13014 return UINT_MAX;
13015
13016 /* A call to operator delete isn't recognized as one to a built-in. */
13017 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
13018 return 0;
13019
13020 /* TODO: Handle user-defined functions with attribute malloc? Handle
13021 known non-built-ins like fopen? */
13022 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13023 {
13024 switch (DECL_FUNCTION_CODE (fndecl))
13025 {
13026 case BUILT_IN_FREE:
13027 case BUILT_IN_REALLOC:
13028 return 0;
13029 default:
13030 break;
13031 }
13032 return UINT_MAX;
13033 }
13034
13035 tree attrs = DECL_ATTRIBUTES (fndecl);
13036 if (!attrs)
13037 return UINT_MAX;
13038
13039 for (tree atfree = attrs;
13040 (atfree = lookup_attribute ("*dealloc", atfree));
13041 atfree = TREE_CHAIN (atfree))
13042 {
13043 tree alloc = TREE_VALUE (atfree);
13044 if (!alloc)
13045 continue;
13046
13047 tree pos = TREE_CHAIN (alloc);
13048 if (!pos)
13049 return 0;
13050
13051 pos = TREE_VALUE (pos);
13052 return TREE_INT_CST_LOW (pos) - 1;
13053 }
13054
13055 return UINT_MAX;
13056}
13057
fe7f75cf
MS
13058/* Return true if DELETE_DECL is an operator delete that's not suitable
13059 to call with a pointer returned fron NEW_DECL. */
dce6c58d 13060
fe7f75cf
MS
13061static bool
13062new_delete_mismatch_p (tree new_decl, tree delete_decl)
dce6c58d 13063{
fe7f75cf
MS
13064 tree new_name = DECL_ASSEMBLER_NAME (new_decl);
13065 tree delete_name = DECL_ASSEMBLER_NAME (delete_decl);
13066
13067 /* valid_new_delete_pair_p() returns a conservative result. A true
13068 result is reliable but a false result doesn't necessarily mean
13069 the operators don't match. */
13070 if (valid_new_delete_pair_p (new_name, delete_name))
13071 return false;
13072
13073 const char *new_str = IDENTIFIER_POINTER (new_name);
13074 const char *del_str = IDENTIFIER_POINTER (delete_name);
13075
13076 if (*new_str != '_')
13077 return *new_str != *del_str;
13078
13079 ++del_str;
13080 if (*++new_str != 'Z')
13081 return *new_str != *del_str;
13082
13083 ++del_str;
13084 if (*++new_str == 'n')
13085 return *del_str != 'd';
13086
13087 if (*new_str != 'N')
13088 return *del_str != 'N';
13089
13090 /* Handle user-defined member operators below. */
13091 ++new_str;
13092 ++del_str;
13093
13094 do
13095 {
13096 /* Determine if both operators are members of the same type.
13097 If not, they don't match. */
13098 char *new_end, *del_end;
13099 unsigned long nlen = strtoul (new_str, &new_end, 10);
13100 unsigned long dlen = strtoul (del_str, &del_end, 10);
13101 if (nlen != dlen)
13102 return true;
13103
13104 /* Skip past the name length. */
13105 new_str = new_end;
13106 del_str = del_end;
13107
13108 /* Skip past the names making sure each has the expected length
13109 (it would suggest some sort of a corruption if they didn't). */
13110 while (nlen--)
13111 if (!*++new_end)
13112 return true;
13113
13114 for (nlen = dlen; nlen--; )
13115 if (!*++del_end)
13116 return true;
13117
13118 /* The names have the expected length. Compare them. */
13119 if (memcmp (new_str, del_str, dlen))
13120 return true;
13121
13122 new_str = new_end;
13123 del_str = del_end;
13124
13125 if (*new_str == 'I')
13126 {
13127 /* Template instantiation. */
13128 do
13129 {
13130 ++new_str;
13131 ++del_str;
13132
13133 if (*new_str == 'n')
13134 break;
13135 if (*new_str != *del_str)
13136 return true;
13137 }
13138 while (*new_str);
13139 }
13140
13141 if (*new_str == 'n')
13142 {
13143 if (*del_str != 'd')
13144 return true;
13145
13146 ++del_str;
13147 if (*++new_str == 'w' && *del_str != 'l')
13148 return true;
13149 if (*new_str == 'a' && *del_str != 'a')
13150 return true;
13151 ++new_str;
13152 ++del_str;
13153 break;
13154 }
13155 } while (true);
13156
13157 if (*new_str != 'E')
13158 return *del_str != *new_str;
13159
13160 ++new_str;
13161 ++del_str;
13162 if (*new_str != 'j' && *new_str != 'm' && *new_str != 'y')
13163 return true;
13164 if (*del_str != 'P' || *++del_str != 'v')
13165 return true;
13166
13167 /* Ignore any remaining arguments. Since both operators are members
13168 of the same class, mismatches in those should be detectable and
13169 diagnosed by the front end. */
13170 return false;
dce6c58d
MS
13171}
13172
13173/* ALLOC_DECL and DEALLOC_DECL are pair of allocation and deallocation
13174 functions. Return true if the latter is suitable to deallocate objects
13175 allocated by calls to the former. */
13176
13177static bool
13178matching_alloc_calls_p (tree alloc_decl, tree dealloc_decl)
13179{
fe7f75cf
MS
13180 /* Set to alloc_kind_t::builtin if ALLOC_DECL is associated with
13181 a built-in deallocator. */
13182 enum class alloc_kind_t { none, builtin, user }
13183 alloc_dealloc_kind = alloc_kind_t::none;
13184
dce6c58d
MS
13185 if (DECL_IS_OPERATOR_NEW_P (alloc_decl))
13186 {
13187 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
fe7f75cf
MS
13188 /* Return true iff both functions are of the same array or
13189 singleton form and false otherwise. */
13190 return !new_delete_mismatch_p (alloc_decl, dealloc_decl);
dce6c58d
MS
13191
13192 /* Return false for deallocation functions that are known not
13193 to match. */
13194 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13195 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13196 return false;
13197 /* Otherwise proceed below to check the deallocation function's
13198 "*dealloc" attributes to look for one that mentions this operator
13199 new. */
13200 }
13201 else if (fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL))
13202 {
13203 switch (DECL_FUNCTION_CODE (alloc_decl))
13204 {
13205 case BUILT_IN_ALLOCA:
13206 case BUILT_IN_ALLOCA_WITH_ALIGN:
13207 return false;
13208
fe7f75cf 13209 case BUILT_IN_ALIGNED_ALLOC:
dce6c58d 13210 case BUILT_IN_CALLOC:
fe7f75cf 13211 case BUILT_IN_GOMP_ALLOC:
dce6c58d
MS
13212 case BUILT_IN_MALLOC:
13213 case BUILT_IN_REALLOC:
13214 case BUILT_IN_STRDUP:
13215 case BUILT_IN_STRNDUP:
13216 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13217 return false;
13218
13219 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13220 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13221 return true;
fe7f75cf
MS
13222
13223 alloc_dealloc_kind = alloc_kind_t::builtin;
dce6c58d
MS
13224 break;
13225
13226 default:
13227 break;
13228 }
13229 }
13230
fe7f75cf
MS
13231 /* Set if DEALLOC_DECL both allocates and deallocates. */
13232 alloc_kind_t realloc_kind = alloc_kind_t::none;
13233
13234 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_NORMAL))
13235 {
13236 built_in_function dealloc_code = DECL_FUNCTION_CODE (dealloc_decl);
13237 if (dealloc_code == BUILT_IN_REALLOC)
13238 realloc_kind = alloc_kind_t::builtin;
13239
13240 for (tree amats = DECL_ATTRIBUTES (alloc_decl);
13241 (amats = lookup_attribute ("malloc", amats));
13242 amats = TREE_CHAIN (amats))
13243 {
13244 tree args = TREE_VALUE (amats);
13245 if (!args)
13246 continue;
13247
13248 tree fndecl = TREE_VALUE (args);
13249 if (!fndecl || !DECL_P (fndecl))
13250 continue;
13251
13252 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
13253 && dealloc_code == DECL_FUNCTION_CODE (fndecl))
13254 return true;
13255 }
13256 }
13257
13258 const bool alloc_builtin = fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL);
13259 alloc_kind_t realloc_dealloc_kind = alloc_kind_t::none;
dce6c58d 13260
fe7f75cf
MS
13261 /* If DEALLOC_DECL has an internal "*dealloc" attribute scan the list
13262 of its associated allocation functions for ALLOC_DECL.
13263 If the corresponding ALLOC_DECL is found they're a matching pair,
13264 otherwise they're not.
13265 With DDATS set to the Deallocator's *Dealloc ATtributes... */
13266 for (tree ddats = DECL_ATTRIBUTES (dealloc_decl);
13267 (ddats = lookup_attribute ("*dealloc", ddats));
13268 ddats = TREE_CHAIN (ddats))
dce6c58d 13269 {
fe7f75cf 13270 tree args = TREE_VALUE (ddats);
dce6c58d
MS
13271 if (!args)
13272 continue;
13273
fe7f75cf
MS
13274 tree alloc = TREE_VALUE (args);
13275 if (!alloc)
dce6c58d
MS
13276 continue;
13277
fe7f75cf
MS
13278 if (alloc == DECL_NAME (dealloc_decl))
13279 realloc_kind = alloc_kind_t::user;
13280
13281 if (DECL_P (alloc))
13282 {
13283 gcc_checking_assert (fndecl_built_in_p (alloc, BUILT_IN_NORMAL));
13284
13285 switch (DECL_FUNCTION_CODE (alloc))
13286 {
13287 case BUILT_IN_ALIGNED_ALLOC:
13288 case BUILT_IN_CALLOC:
13289 case BUILT_IN_GOMP_ALLOC:
13290 case BUILT_IN_MALLOC:
13291 case BUILT_IN_REALLOC:
13292 case BUILT_IN_STRDUP:
13293 case BUILT_IN_STRNDUP:
13294 realloc_dealloc_kind = alloc_kind_t::builtin;
13295 break;
13296 default:
13297 break;
13298 }
13299
13300 if (!alloc_builtin)
13301 continue;
13302
13303 if (DECL_FUNCTION_CODE (alloc) != DECL_FUNCTION_CODE (alloc_decl))
13304 continue;
13305
13306 return true;
13307 }
13308
13309 if (alloc == DECL_NAME (alloc_decl))
dce6c58d
MS
13310 return true;
13311 }
13312
fe7f75cf
MS
13313 if (realloc_kind == alloc_kind_t::none)
13314 return false;
13315
13316 hash_set<tree> common_deallocs;
13317 /* Special handling for deallocators. Iterate over both the allocator's
13318 and the reallocator's associated deallocator functions looking for
13319 the first one in common. If one is found, the de/reallocator is
13320 a match for the allocator even though the latter isn't directly
13321 associated with the former. This simplifies declarations in system
13322 headers.
13323 With AMATS set to the Allocator's Malloc ATtributes,
13324 and RMATS set to Reallocator's Malloc ATtributes... */
13325 for (tree amats = DECL_ATTRIBUTES (alloc_decl),
13326 rmats = DECL_ATTRIBUTES (dealloc_decl);
13327 (amats = lookup_attribute ("malloc", amats))
13328 || (rmats = lookup_attribute ("malloc", rmats));
13329 amats = amats ? TREE_CHAIN (amats) : NULL_TREE,
13330 rmats = rmats ? TREE_CHAIN (rmats) : NULL_TREE)
13331 {
13332 if (tree args = amats ? TREE_VALUE (amats) : NULL_TREE)
13333 if (tree adealloc = TREE_VALUE (args))
13334 {
13335 if (DECL_P (adealloc)
13336 && fndecl_built_in_p (adealloc, BUILT_IN_NORMAL))
13337 {
13338 built_in_function fncode = DECL_FUNCTION_CODE (adealloc);
13339 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13340 {
13341 if (realloc_kind == alloc_kind_t::builtin)
13342 return true;
13343 alloc_dealloc_kind = alloc_kind_t::builtin;
13344 }
13345 continue;
13346 }
13347
13348 common_deallocs.add (adealloc);
13349 }
13350
13351 if (tree args = rmats ? TREE_VALUE (rmats) : NULL_TREE)
13352 if (tree ddealloc = TREE_VALUE (args))
13353 {
13354 if (DECL_P (ddealloc)
13355 && fndecl_built_in_p (ddealloc, BUILT_IN_NORMAL))
13356 {
13357 built_in_function fncode = DECL_FUNCTION_CODE (ddealloc);
13358 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13359 {
13360 if (alloc_dealloc_kind == alloc_kind_t::builtin)
13361 return true;
13362 realloc_dealloc_kind = alloc_kind_t::builtin;
13363 }
13364 continue;
13365 }
13366
13367 if (common_deallocs.add (ddealloc))
13368 return true;
13369 }
13370 }
13371
13372 /* Succeed only if ALLOC_DECL and the reallocator DEALLOC_DECL share
13373 a built-in deallocator. */
13374 return (alloc_dealloc_kind == alloc_kind_t::builtin
13375 && realloc_dealloc_kind == alloc_kind_t::builtin);
dce6c58d
MS
13376}
13377
13378/* Return true if DEALLOC_DECL is a function suitable to deallocate
13379 objectes allocated by the ALLOC call. */
13380
13381static bool
13382matching_alloc_calls_p (gimple *alloc, tree dealloc_decl)
13383{
13384 tree alloc_decl = gimple_call_fndecl (alloc);
13385 if (!alloc_decl)
13386 return true;
13387
13388 return matching_alloc_calls_p (alloc_decl, dealloc_decl);
13389}
13390
fe7f75cf
MS
13391/* Diagnose a call EXP to deallocate a pointer referenced by AREF if it
13392 includes a nonzero offset. Such a pointer cannot refer to the beginning
13393 of an allocated object. A negative offset may refer to it only if
13394 the target pointer is unknown. */
dce6c58d
MS
13395
13396static bool
fe7f75cf 13397warn_dealloc_offset (location_t loc, tree exp, const access_ref &aref)
dce6c58d 13398{
fe7f75cf
MS
13399 if (aref.deref || aref.offrng[0] <= 0 || aref.offrng[1] <= 0)
13400 return false;
13401
13402 tree dealloc_decl = get_callee_fndecl (exp);
0df31165
MS
13403 if (!dealloc_decl)
13404 return false;
fdd8560c 13405
fe7f75cf
MS
13406 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13407 && !DECL_IS_REPLACEABLE_OPERATOR (dealloc_decl))
13408 {
13409 /* A call to a user-defined operator delete with a pointer plus offset
13410 may be valid if it's returned from an unknown function (i.e., one
13411 that's not operator new). */
13412 if (TREE_CODE (aref.ref) == SSA_NAME)
13413 {
13414 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13415 if (is_gimple_call (def_stmt))
13416 {
13417 tree alloc_decl = gimple_call_fndecl (def_stmt);
0df31165 13418 if (!alloc_decl || !DECL_IS_OPERATOR_NEW_P (alloc_decl))
fe7f75cf
MS
13419 return false;
13420 }
13421 }
13422 }
13423
dce6c58d
MS
13424 char offstr[80];
13425 offstr[0] = '\0';
13426 if (wi::fits_shwi_p (aref.offrng[0]))
13427 {
13428 if (aref.offrng[0] == aref.offrng[1]
13429 || !wi::fits_shwi_p (aref.offrng[1]))
13430 sprintf (offstr, " %lli",
13431 (long long)aref.offrng[0].to_shwi ());
13432 else
13433 sprintf (offstr, " [%lli, %lli]",
13434 (long long)aref.offrng[0].to_shwi (),
13435 (long long)aref.offrng[1].to_shwi ());
13436 }
13437
13438 if (!warning_at (loc, OPT_Wfree_nonheap_object,
13439 "%K%qD called on pointer %qE with nonzero offset%s",
fe7f75cf 13440 exp, dealloc_decl, aref.ref, offstr))
dce6c58d
MS
13441 return false;
13442
13443 if (DECL_P (aref.ref))
13444 inform (DECL_SOURCE_LOCATION (aref.ref), "declared here");
13445 else if (TREE_CODE (aref.ref) == SSA_NAME)
13446 {
13447 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13448 if (is_gimple_call (def_stmt))
13449 {
fe7f75cf 13450 location_t def_loc = gimple_location (def_stmt);
dce6c58d 13451 tree alloc_decl = gimple_call_fndecl (def_stmt);
fe7f75cf
MS
13452 if (alloc_decl)
13453 inform (def_loc,
13454 "returned from %qD", alloc_decl);
13455 else if (tree alloc_fntype = gimple_call_fntype (def_stmt))
13456 inform (def_loc,
13457 "returned from %qT", alloc_fntype);
13458 else
13459 inform (def_loc, "obtained here");
dce6c58d
MS
13460 }
13461 }
13462
13463 return true;
13464}
13465
13466/* Issue a warning if a deallocation function such as free, realloc,
13467 or C++ operator delete is called with an argument not returned by
13468 a matching allocation function such as malloc or the corresponding
13469 form of C++ operatorn new. */
13470
13471void
f9555f40
JJ
13472maybe_emit_free_warning (tree exp)
13473{
dce6c58d
MS
13474 tree fndecl = get_callee_fndecl (exp);
13475 if (!fndecl)
9616781d
JJ
13476 return;
13477
dce6c58d
MS
13478 unsigned argno = call_dealloc_argno (exp);
13479 if ((unsigned) call_expr_nargs (exp) <= argno)
13480 return;
f9555f40 13481
dce6c58d
MS
13482 tree ptr = CALL_EXPR_ARG (exp, argno);
13483 if (integer_zerop (ptr))
f9555f40
JJ
13484 return;
13485
dce6c58d
MS
13486 access_ref aref;
13487 if (!compute_objsize (ptr, 0, &aref))
f9555f40
JJ
13488 return;
13489
dce6c58d
MS
13490 tree ref = aref.ref;
13491 if (integer_zerop (ref))
13492 return;
13493
13494 tree dealloc_decl = get_callee_fndecl (exp);
fe7f75cf 13495 location_t loc = tree_inlined_location (exp);
dce6c58d
MS
13496
13497 if (DECL_P (ref) || EXPR_P (ref))
13498 {
13499 /* Diagnose freeing a declared object. */
13500 if (aref.ref_declared ()
13501 && warning_at (loc, OPT_Wfree_nonheap_object,
13502 "%K%qD called on unallocated object %qD",
13503 exp, dealloc_decl, ref))
13504 {
fe7f75cf
MS
13505 loc = (DECL_P (ref)
13506 ? DECL_SOURCE_LOCATION (ref)
13507 : EXPR_LOCATION (ref));
13508 inform (loc, "declared here");
dce6c58d
MS
13509 return;
13510 }
13511
13512 /* Diagnose freeing a pointer that includes a positive offset.
13513 Such a pointer cannot refer to the beginning of an allocated
13514 object. A negative offset may refer to it. */
fe7f75cf
MS
13515 if (aref.sizrng[0] != aref.sizrng[1]
13516 && warn_dealloc_offset (loc, exp, aref))
dce6c58d
MS
13517 return;
13518 }
13519 else if (CONSTANT_CLASS_P (ref))
13520 {
13521 if (warning_at (loc, OPT_Wfree_nonheap_object,
13522 "%K%qD called on a pointer to an unallocated "
13523 "object %qE", exp, dealloc_decl, ref))
13524 {
13525 if (TREE_CODE (ptr) == SSA_NAME)
13526 {
13527 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
13528 if (is_gimple_assign (def_stmt))
13529 {
13530 location_t loc = gimple_location (def_stmt);
13531 inform (loc, "assigned here");
13532 }
13533 }
13534 return;
13535 }
13536 }
13537 else if (TREE_CODE (ref) == SSA_NAME)
13538 {
13539 /* Also warn if the pointer argument refers to the result
13540 of an allocation call like alloca or VLA. */
13541 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
13542 if (is_gimple_call (def_stmt))
13543 {
13544 bool warned = false;
13545 if (gimple_call_alloc_p (def_stmt))
13546 {
13547 if (matching_alloc_calls_p (def_stmt, dealloc_decl))
13548 {
fe7f75cf 13549 if (warn_dealloc_offset (loc, exp, aref))
dce6c58d
MS
13550 return;
13551 }
13552 else
13553 {
13554 tree alloc_decl = gimple_call_fndecl (def_stmt);
13555 int opt = (DECL_IS_OPERATOR_NEW_P (alloc_decl)
13556 || DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13557 ? OPT_Wmismatched_new_delete
13558 : OPT_Wmismatched_dealloc);
13559 warned = warning_at (loc, opt,
13560 "%K%qD called on pointer returned "
13561 "from a mismatched allocation "
13562 "function", exp, dealloc_decl);
13563 }
13564 }
13565 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_ALLOCA)
13566 || gimple_call_builtin_p (def_stmt,
13567 BUILT_IN_ALLOCA_WITH_ALIGN))
13568 warned = warning_at (loc, OPT_Wfree_nonheap_object,
13569 "%K%qD called on pointer to "
13570 "an unallocated object",
13571 exp, dealloc_decl);
fe7f75cf 13572 else if (warn_dealloc_offset (loc, exp, aref))
dce6c58d
MS
13573 return;
13574
13575 if (warned)
13576 {
13577 tree fndecl = gimple_call_fndecl (def_stmt);
13578 inform (gimple_location (def_stmt),
fe7f75cf 13579 "returned from %qD", fndecl);
dce6c58d
MS
13580 return;
13581 }
13582 }
13583 else if (gimple_nop_p (def_stmt))
13584 {
13585 ref = SSA_NAME_VAR (ref);
13586 /* Diagnose freeing a pointer that includes a positive offset. */
13587 if (TREE_CODE (ref) == PARM_DECL
13588 && !aref.deref
13589 && aref.sizrng[0] != aref.sizrng[1]
13590 && aref.offrng[0] > 0 && aref.offrng[1] > 0
fe7f75cf 13591 && warn_dealloc_offset (loc, exp, aref))
dce6c58d
MS
13592 return;
13593 }
13594 }
f9555f40
JJ
13595}
13596
5039610b
SL
13597/* Fold a call to __builtin_object_size with arguments PTR and OST,
13598 if possible. */
10a0d495 13599
9b2b7279 13600static tree
5039610b 13601fold_builtin_object_size (tree ptr, tree ost)
10a0d495 13602{
88e06841 13603 unsigned HOST_WIDE_INT bytes;
10a0d495
JJ
13604 int object_size_type;
13605
5039610b
SL
13606 if (!validate_arg (ptr, POINTER_TYPE)
13607 || !validate_arg (ost, INTEGER_TYPE))
13608 return NULL_TREE;
10a0d495 13609
10a0d495
JJ
13610 STRIP_NOPS (ost);
13611
13612 if (TREE_CODE (ost) != INTEGER_CST
13613 || tree_int_cst_sgn (ost) < 0
13614 || compare_tree_int (ost, 3) > 0)
5039610b 13615 return NULL_TREE;
10a0d495 13616
9439e9a1 13617 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
13618
13619 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
13620 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
13621 and (size_t) 0 for types 2 and 3. */
13622 if (TREE_SIDE_EFFECTS (ptr))
2ac7cbb5 13623 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10a0d495
JJ
13624
13625 if (TREE_CODE (ptr) == ADDR_EXPR)
88e06841 13626 {
05a64756 13627 compute_builtin_object_size (ptr, object_size_type, &bytes);
807e902e 13628 if (wi::fits_to_tree_p (bytes, size_type_node))
88e06841
AS
13629 return build_int_cstu (size_type_node, bytes);
13630 }
10a0d495
JJ
13631 else if (TREE_CODE (ptr) == SSA_NAME)
13632 {
10a0d495
JJ
13633 /* If object size is not known yet, delay folding until
13634 later. Maybe subsequent passes will help determining
13635 it. */
05a64756
MS
13636 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
13637 && wi::fits_to_tree_p (bytes, size_type_node))
88e06841 13638 return build_int_cstu (size_type_node, bytes);
10a0d495
JJ
13639 }
13640
88e06841 13641 return NULL_TREE;
10a0d495
JJ
13642}
13643
903c723b
TC
13644/* Builtins with folding operations that operate on "..." arguments
13645 need special handling; we need to store the arguments in a convenient
13646 data structure before attempting any folding. Fortunately there are
13647 only a few builtins that fall into this category. FNDECL is the
13648 function, EXP is the CALL_EXPR for the call. */
13649
13650static tree
13651fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
13652{
13653 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13654 tree ret = NULL_TREE;
13655
13656 switch (fcode)
13657 {
13658 case BUILT_IN_FPCLASSIFY:
13659 ret = fold_builtin_fpclassify (loc, args, nargs);
13660 break;
13661
13662 default:
13663 break;
13664 }
13665 if (ret)
13666 {
13667 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13668 SET_EXPR_LOCATION (ret, loc);
13669 TREE_NO_WARNING (ret) = 1;
13670 return ret;
13671 }
13672 return NULL_TREE;
13673}
13674
000ba23d
KG
13675/* Initialize format string characters in the target charset. */
13676
fef5a0d9 13677bool
000ba23d
KG
13678init_target_chars (void)
13679{
13680 static bool init;
13681 if (!init)
13682 {
13683 target_newline = lang_hooks.to_target_charset ('\n');
13684 target_percent = lang_hooks.to_target_charset ('%');
13685 target_c = lang_hooks.to_target_charset ('c');
13686 target_s = lang_hooks.to_target_charset ('s');
13687 if (target_newline == 0 || target_percent == 0 || target_c == 0
13688 || target_s == 0)
13689 return false;
13690
13691 target_percent_c[0] = target_percent;
13692 target_percent_c[1] = target_c;
13693 target_percent_c[2] = '\0';
13694
13695 target_percent_s[0] = target_percent;
13696 target_percent_s[1] = target_s;
13697 target_percent_s[2] = '\0';
13698
13699 target_percent_s_newline[0] = target_percent;
13700 target_percent_s_newline[1] = target_s;
13701 target_percent_s_newline[2] = target_newline;
13702 target_percent_s_newline[3] = '\0';
c22cacf3 13703
000ba23d
KG
13704 init = true;
13705 }
13706 return true;
13707}
1f3f1f68 13708
4413d881
KG
13709/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13710 and no overflow/underflow occurred. INEXACT is true if M was not
2f8e468b 13711 exactly calculated. TYPE is the tree type for the result. This
4413d881
KG
13712 function assumes that you cleared the MPFR flags and then
13713 calculated M to see if anything subsequently set a flag prior to
13714 entering this function. Return NULL_TREE if any checks fail. */
13715
13716static tree
62e5bf5d 13717do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
4413d881
KG
13718{
13719 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13720 overflow/underflow occurred. If -frounding-math, proceed iff the
13721 result of calling FUNC was exact. */
62e5bf5d 13722 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
4413d881
KG
13723 && (!flag_rounding_math || !inexact))
13724 {
13725 REAL_VALUE_TYPE rr;
13726
90ca6847 13727 real_from_mpfr (&rr, m, type, MPFR_RNDN);
4413d881
KG
13728 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13729 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13730 but the mpft_t is not, then we underflowed in the
13731 conversion. */
4c8c70e0 13732 if (real_isfinite (&rr)
4413d881
KG
13733 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13734 {
13735 REAL_VALUE_TYPE rmode;
13736
13737 real_convert (&rmode, TYPE_MODE (type), &rr);
13738 /* Proceed iff the specified mode can hold the value. */
13739 if (real_identical (&rmode, &rr))
13740 return build_real (type, rmode);
13741 }
13742 }
13743 return NULL_TREE;
13744}
13745
c128599a
KG
13746/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13747 number and no overflow/underflow occurred. INEXACT is true if M
13748 was not exactly calculated. TYPE is the tree type for the result.
13749 This function assumes that you cleared the MPFR flags and then
13750 calculated M to see if anything subsequently set a flag prior to
ca75b926
KG
13751 entering this function. Return NULL_TREE if any checks fail, if
13752 FORCE_CONVERT is true, then bypass the checks. */
c128599a
KG
13753
13754static tree
ca75b926 13755do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
c128599a
KG
13756{
13757 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13758 overflow/underflow occurred. If -frounding-math, proceed iff the
13759 result of calling FUNC was exact. */
ca75b926
KG
13760 if (force_convert
13761 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13762 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13763 && (!flag_rounding_math || !inexact)))
c128599a
KG
13764 {
13765 REAL_VALUE_TYPE re, im;
13766
90ca6847
TB
13767 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
13768 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
c128599a
KG
13769 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13770 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13771 but the mpft_t is not, then we underflowed in the
13772 conversion. */
ca75b926
KG
13773 if (force_convert
13774 || (real_isfinite (&re) && real_isfinite (&im)
13775 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13776 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
c128599a
KG
13777 {
13778 REAL_VALUE_TYPE re_mode, im_mode;
13779
13780 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13781 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13782 /* Proceed iff the specified mode can hold the value. */
ca75b926
KG
13783 if (force_convert
13784 || (real_identical (&re_mode, &re)
13785 && real_identical (&im_mode, &im)))
c128599a
KG
13786 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13787 build_real (TREE_TYPE (type), im_mode));
13788 }
13789 }
13790 return NULL_TREE;
13791}
c128599a 13792
ea91f957
KG
13793/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13794 the pointer *(ARG_QUO) and return the result. The type is taken
13795 from the type of ARG0 and is used for setting the precision of the
13796 calculation and results. */
13797
13798static tree
13799do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13800{
13801 tree const type = TREE_TYPE (arg0);
13802 tree result = NULL_TREE;
b8698a0f 13803
ea91f957
KG
13804 STRIP_NOPS (arg0);
13805 STRIP_NOPS (arg1);
b8698a0f 13806
ea91f957
KG
13807 /* To proceed, MPFR must exactly represent the target floating point
13808 format, which only happens when the target base equals two. */
13809 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13810 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13811 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13812 {
13813 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13814 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13815
4c8c70e0 13816 if (real_isfinite (ra0) && real_isfinite (ra1))
ea91f957 13817 {
3e479de3
UW
13818 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13819 const int prec = fmt->p;
90ca6847 13820 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
ea91f957
KG
13821 tree result_rem;
13822 long integer_quo;
13823 mpfr_t m0, m1;
13824
13825 mpfr_inits2 (prec, m0, m1, NULL);
90ca6847
TB
13826 mpfr_from_real (m0, ra0, MPFR_RNDN);
13827 mpfr_from_real (m1, ra1, MPFR_RNDN);
ea91f957 13828 mpfr_clear_flags ();
3e479de3 13829 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
ea91f957
KG
13830 /* Remquo is independent of the rounding mode, so pass
13831 inexact=0 to do_mpfr_ckconv(). */
13832 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13833 mpfr_clears (m0, m1, NULL);
13834 if (result_rem)
13835 {
13836 /* MPFR calculates quo in the host's long so it may
13837 return more bits in quo than the target int can hold
13838 if sizeof(host long) > sizeof(target int). This can
13839 happen even for native compilers in LP64 mode. In
13840 these cases, modulo the quo value with the largest
13841 number that the target int can hold while leaving one
13842 bit for the sign. */
13843 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13844 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13845
13846 /* Dereference the quo pointer argument. */
13847 arg_quo = build_fold_indirect_ref (arg_quo);
13848 /* Proceed iff a valid pointer type was passed in. */
13849 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13850 {
13851 /* Set the value. */
45a2c477
RG
13852 tree result_quo
13853 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13854 build_int_cst (TREE_TYPE (arg_quo),
13855 integer_quo));
ea91f957
KG
13856 TREE_SIDE_EFFECTS (result_quo) = 1;
13857 /* Combine the quo assignment with the rem. */
13858 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13859 result_quo, result_rem));
13860 }
13861 }
13862 }
13863 }
13864 return result;
13865}
752b7d38
KG
13866
13867/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13868 resulting value as a tree with type TYPE. The mpfr precision is
13869 set to the precision of TYPE. We assume that this mpfr function
13870 returns zero if the result could be calculated exactly within the
13871 requested precision. In addition, the integer pointer represented
13872 by ARG_SG will be dereferenced and set to the appropriate signgam
13873 (-1,1) value. */
13874
13875static tree
13876do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13877{
13878 tree result = NULL_TREE;
13879
13880 STRIP_NOPS (arg);
b8698a0f 13881
752b7d38
KG
13882 /* To proceed, MPFR must exactly represent the target floating point
13883 format, which only happens when the target base equals two. Also
13884 verify ARG is a constant and that ARG_SG is an int pointer. */
13885 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13886 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13887 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13888 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13889 {
13890 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13891
13892 /* In addition to NaN and Inf, the argument cannot be zero or a
13893 negative integer. */
4c8c70e0 13894 if (real_isfinite (ra)
752b7d38 13895 && ra->cl != rvc_zero
c3284718 13896 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
752b7d38 13897 {
3e479de3
UW
13898 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13899 const int prec = fmt->p;
90ca6847 13900 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
752b7d38
KG
13901 int inexact, sg;
13902 mpfr_t m;
13903 tree result_lg;
13904
13905 mpfr_init2 (m, prec);
90ca6847 13906 mpfr_from_real (m, ra, MPFR_RNDN);
752b7d38 13907 mpfr_clear_flags ();
3e479de3 13908 inexact = mpfr_lgamma (m, &sg, m, rnd);
752b7d38
KG
13909 result_lg = do_mpfr_ckconv (m, type, inexact);
13910 mpfr_clear (m);
13911 if (result_lg)
13912 {
13913 tree result_sg;
13914
13915 /* Dereference the arg_sg pointer argument. */
13916 arg_sg = build_fold_indirect_ref (arg_sg);
13917 /* Assign the signgam value into *arg_sg. */
13918 result_sg = fold_build2 (MODIFY_EXPR,
13919 TREE_TYPE (arg_sg), arg_sg,
45a2c477 13920 build_int_cst (TREE_TYPE (arg_sg), sg));
752b7d38
KG
13921 TREE_SIDE_EFFECTS (result_sg) = 1;
13922 /* Combine the signgam assignment with the lgamma result. */
13923 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13924 result_sg, result_lg));
13925 }
13926 }
13927 }
13928
13929 return result;
13930}
726a989a 13931
a41d064d
KG
13932/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13933 mpc function FUNC on it and return the resulting value as a tree
13934 with type TYPE. The mpfr precision is set to the precision of
13935 TYPE. We assume that function FUNC returns zero if the result
ca75b926
KG
13936 could be calculated exactly within the requested precision. If
13937 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13938 in the arguments and/or results. */
a41d064d 13939
2f440f6a 13940tree
ca75b926 13941do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
a41d064d
KG
13942 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13943{
13944 tree result = NULL_TREE;
b8698a0f 13945
a41d064d
KG
13946 STRIP_NOPS (arg0);
13947 STRIP_NOPS (arg1);
13948
13949 /* To proceed, MPFR must exactly represent the target floating point
13950 format, which only happens when the target base equals two. */
13951 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13952 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13953 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13954 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13955 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13956 {
13957 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13958 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13959 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13960 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13961
ca75b926
KG
13962 if (do_nonfinite
13963 || (real_isfinite (re0) && real_isfinite (im0)
13964 && real_isfinite (re1) && real_isfinite (im1)))
a41d064d
KG
13965 {
13966 const struct real_format *const fmt =
13967 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13968 const int prec = fmt->p;
90ca6847
TB
13969 const mpfr_rnd_t rnd = fmt->round_towards_zero
13970 ? MPFR_RNDZ : MPFR_RNDN;
a41d064d
KG
13971 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13972 int inexact;
13973 mpc_t m0, m1;
b8698a0f 13974
a41d064d
KG
13975 mpc_init2 (m0, prec);
13976 mpc_init2 (m1, prec);
c3284718
RS
13977 mpfr_from_real (mpc_realref (m0), re0, rnd);
13978 mpfr_from_real (mpc_imagref (m0), im0, rnd);
13979 mpfr_from_real (mpc_realref (m1), re1, rnd);
13980 mpfr_from_real (mpc_imagref (m1), im1, rnd);
a41d064d
KG
13981 mpfr_clear_flags ();
13982 inexact = func (m0, m0, m1, crnd);
ca75b926 13983 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
a41d064d
KG
13984 mpc_clear (m0);
13985 mpc_clear (m1);
13986 }
13987 }
13988
13989 return result;
13990}
c128599a 13991
726a989a
RB
13992/* A wrapper function for builtin folding that prevents warnings for
13993 "statement without effect" and the like, caused by removing the
13994 call node earlier than the warning is generated. */
13995
13996tree
538dd0b7 13997fold_call_stmt (gcall *stmt, bool ignore)
726a989a
RB
13998{
13999 tree ret = NULL_TREE;
14000 tree fndecl = gimple_call_fndecl (stmt);
db3927fb 14001 location_t loc = gimple_location (stmt);
3d78e008 14002 if (fndecl && fndecl_built_in_p (fndecl)
726a989a
RB
14003 && !gimple_call_va_arg_pack_p (stmt))
14004 {
14005 int nargs = gimple_call_num_args (stmt);
8897c9ce
NF
14006 tree *args = (nargs > 0
14007 ? gimple_call_arg_ptr (stmt, 0)
14008 : &error_mark_node);
726a989a 14009
0889e9bc
JJ
14010 if (avoid_folding_inline_builtin (fndecl))
14011 return NULL_TREE;
726a989a
RB
14012 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14013 {
8897c9ce 14014 return targetm.fold_builtin (fndecl, nargs, args, ignore);
726a989a
RB
14015 }
14016 else
14017 {
b5338fb3 14018 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
726a989a
RB
14019 if (ret)
14020 {
14021 /* Propagate location information from original call to
14022 expansion of builtin. Otherwise things like
14023 maybe_emit_chk_warning, that operate on the expansion
14024 of a builtin, will use the wrong location information. */
14025 if (gimple_has_location (stmt))
14026 {
14027 tree realret = ret;
14028 if (TREE_CODE (ret) == NOP_EXPR)
14029 realret = TREE_OPERAND (ret, 0);
14030 if (CAN_HAVE_LOCATION_P (realret)
14031 && !EXPR_HAS_LOCATION (realret))
db3927fb 14032 SET_EXPR_LOCATION (realret, loc);
726a989a
RB
14033 return realret;
14034 }
14035 return ret;
14036 }
14037 }
14038 }
14039 return NULL_TREE;
14040}
d7f09764 14041
e79983f4 14042/* Look up the function in builtin_decl that corresponds to DECL
d7f09764
DN
14043 and set ASMSPEC as its user assembler name. DECL must be a
14044 function decl that declares a builtin. */
14045
14046void
14047set_builtin_user_assembler_name (tree decl, const char *asmspec)
14048{
3d78e008 14049 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
d7f09764
DN
14050 && asmspec != 0);
14051
ee516de9 14052 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
ce835863 14053 set_user_assembler_name (builtin, asmspec);
ee516de9
EB
14054
14055 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
14056 && INT_TYPE_SIZE < BITS_PER_WORD)
d7f09764 14057 {
fffbab82 14058 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
ee516de9 14059 set_user_assembler_libfunc ("ffs", asmspec);
fffbab82 14060 set_optab_libfunc (ffs_optab, mode, "ffs");
d7f09764
DN
14061 }
14062}
bec922f0
SL
14063
14064/* Return true if DECL is a builtin that expands to a constant or similarly
14065 simple code. */
14066bool
14067is_simple_builtin (tree decl)
14068{
3d78e008 14069 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
bec922f0
SL
14070 switch (DECL_FUNCTION_CODE (decl))
14071 {
14072 /* Builtins that expand to constants. */
14073 case BUILT_IN_CONSTANT_P:
14074 case BUILT_IN_EXPECT:
14075 case BUILT_IN_OBJECT_SIZE:
14076 case BUILT_IN_UNREACHABLE:
14077 /* Simple register moves or loads from stack. */
45d439ac 14078 case BUILT_IN_ASSUME_ALIGNED:
bec922f0
SL
14079 case BUILT_IN_RETURN_ADDRESS:
14080 case BUILT_IN_EXTRACT_RETURN_ADDR:
14081 case BUILT_IN_FROB_RETURN_ADDR:
14082 case BUILT_IN_RETURN:
14083 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14084 case BUILT_IN_FRAME_ADDRESS:
14085 case BUILT_IN_VA_END:
14086 case BUILT_IN_STACK_SAVE:
14087 case BUILT_IN_STACK_RESTORE:
14088 /* Exception state returns or moves registers around. */
14089 case BUILT_IN_EH_FILTER:
14090 case BUILT_IN_EH_POINTER:
14091 case BUILT_IN_EH_COPY_VALUES:
14092 return true;
14093
14094 default:
14095 return false;
14096 }
14097
14098 return false;
14099}
14100
14101/* Return true if DECL is a builtin that is not expensive, i.e., they are
14102 most probably expanded inline into reasonably simple code. This is a
14103 superset of is_simple_builtin. */
14104bool
14105is_inexpensive_builtin (tree decl)
14106{
14107 if (!decl)
14108 return false;
14109 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14110 return true;
14111 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14112 switch (DECL_FUNCTION_CODE (decl))
14113 {
14114 case BUILT_IN_ABS:
9e878cf1 14115 CASE_BUILT_IN_ALLOCA:
ac868f29 14116 case BUILT_IN_BSWAP16:
bec922f0
SL
14117 case BUILT_IN_BSWAP32:
14118 case BUILT_IN_BSWAP64:
fe7ebef7 14119 case BUILT_IN_BSWAP128:
bec922f0
SL
14120 case BUILT_IN_CLZ:
14121 case BUILT_IN_CLZIMAX:
14122 case BUILT_IN_CLZL:
14123 case BUILT_IN_CLZLL:
14124 case BUILT_IN_CTZ:
14125 case BUILT_IN_CTZIMAX:
14126 case BUILT_IN_CTZL:
14127 case BUILT_IN_CTZLL:
14128 case BUILT_IN_FFS:
14129 case BUILT_IN_FFSIMAX:
14130 case BUILT_IN_FFSL:
14131 case BUILT_IN_FFSLL:
14132 case BUILT_IN_IMAXABS:
14133 case BUILT_IN_FINITE:
14134 case BUILT_IN_FINITEF:
14135 case BUILT_IN_FINITEL:
14136 case BUILT_IN_FINITED32:
14137 case BUILT_IN_FINITED64:
14138 case BUILT_IN_FINITED128:
14139 case BUILT_IN_FPCLASSIFY:
14140 case BUILT_IN_ISFINITE:
14141 case BUILT_IN_ISINF_SIGN:
14142 case BUILT_IN_ISINF:
14143 case BUILT_IN_ISINFF:
14144 case BUILT_IN_ISINFL:
14145 case BUILT_IN_ISINFD32:
14146 case BUILT_IN_ISINFD64:
14147 case BUILT_IN_ISINFD128:
14148 case BUILT_IN_ISNAN:
14149 case BUILT_IN_ISNANF:
14150 case BUILT_IN_ISNANL:
14151 case BUILT_IN_ISNAND32:
14152 case BUILT_IN_ISNAND64:
14153 case BUILT_IN_ISNAND128:
14154 case BUILT_IN_ISNORMAL:
14155 case BUILT_IN_ISGREATER:
14156 case BUILT_IN_ISGREATEREQUAL:
14157 case BUILT_IN_ISLESS:
14158 case BUILT_IN_ISLESSEQUAL:
14159 case BUILT_IN_ISLESSGREATER:
14160 case BUILT_IN_ISUNORDERED:
14161 case BUILT_IN_VA_ARG_PACK:
14162 case BUILT_IN_VA_ARG_PACK_LEN:
14163 case BUILT_IN_VA_COPY:
14164 case BUILT_IN_TRAP:
14165 case BUILT_IN_SAVEREGS:
14166 case BUILT_IN_POPCOUNTL:
14167 case BUILT_IN_POPCOUNTLL:
14168 case BUILT_IN_POPCOUNTIMAX:
14169 case BUILT_IN_POPCOUNT:
14170 case BUILT_IN_PARITYL:
14171 case BUILT_IN_PARITYLL:
14172 case BUILT_IN_PARITYIMAX:
14173 case BUILT_IN_PARITY:
14174 case BUILT_IN_LABS:
14175 case BUILT_IN_LLABS:
14176 case BUILT_IN_PREFETCH:
41dbbb37 14177 case BUILT_IN_ACC_ON_DEVICE:
bec922f0
SL
14178 return true;
14179
14180 default:
14181 return is_simple_builtin (decl);
14182 }
14183
14184 return false;
14185}
488c6247
ML
14186
14187/* Return true if T is a constant and the value cast to a target char
14188 can be represented by a host char.
14189 Store the casted char constant in *P if so. */
14190
14191bool
14192target_char_cst_p (tree t, char *p)
14193{
14194 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
14195 return false;
14196
14197 *p = (char)tree_to_uhwi (t);
14198 return true;
14199}
5747e0c0
XHL
14200
14201/* Return true if the builtin DECL is implemented in a standard library.
14202 Otherwise returns false which doesn't guarantee it is not (thus the list of
14203 handled builtins below may be incomplete). */
14204
14205bool
14206builtin_with_linkage_p (tree decl)
14207{
14208 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14209 switch (DECL_FUNCTION_CODE (decl))
14210 {
14211 CASE_FLT_FN (BUILT_IN_ACOS):
14212 CASE_FLT_FN (BUILT_IN_ACOSH):
14213 CASE_FLT_FN (BUILT_IN_ASIN):
14214 CASE_FLT_FN (BUILT_IN_ASINH):
14215 CASE_FLT_FN (BUILT_IN_ATAN):
14216 CASE_FLT_FN (BUILT_IN_ATANH):
14217 CASE_FLT_FN (BUILT_IN_ATAN2):
14218 CASE_FLT_FN (BUILT_IN_CBRT):
14219 CASE_FLT_FN (BUILT_IN_CEIL):
14220 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
14221 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14222 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
14223 CASE_FLT_FN (BUILT_IN_COS):
14224 CASE_FLT_FN (BUILT_IN_COSH):
14225 CASE_FLT_FN (BUILT_IN_ERF):
14226 CASE_FLT_FN (BUILT_IN_ERFC):
14227 CASE_FLT_FN (BUILT_IN_EXP):
14228 CASE_FLT_FN (BUILT_IN_EXP2):
14229 CASE_FLT_FN (BUILT_IN_EXPM1):
14230 CASE_FLT_FN (BUILT_IN_FABS):
14231 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
14232 CASE_FLT_FN (BUILT_IN_FDIM):
14233 CASE_FLT_FN (BUILT_IN_FLOOR):
14234 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
14235 CASE_FLT_FN (BUILT_IN_FMA):
14236 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
14237 CASE_FLT_FN (BUILT_IN_FMAX):
14238 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
14239 CASE_FLT_FN (BUILT_IN_FMIN):
14240 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
14241 CASE_FLT_FN (BUILT_IN_FMOD):
14242 CASE_FLT_FN (BUILT_IN_FREXP):
14243 CASE_FLT_FN (BUILT_IN_HYPOT):
14244 CASE_FLT_FN (BUILT_IN_ILOGB):
14245 CASE_FLT_FN (BUILT_IN_LDEXP):
14246 CASE_FLT_FN (BUILT_IN_LGAMMA):
14247 CASE_FLT_FN (BUILT_IN_LLRINT):
14248 CASE_FLT_FN (BUILT_IN_LLROUND):
14249 CASE_FLT_FN (BUILT_IN_LOG):
14250 CASE_FLT_FN (BUILT_IN_LOG10):
14251 CASE_FLT_FN (BUILT_IN_LOG1P):
14252 CASE_FLT_FN (BUILT_IN_LOG2):
14253 CASE_FLT_FN (BUILT_IN_LOGB):
14254 CASE_FLT_FN (BUILT_IN_LRINT):
14255 CASE_FLT_FN (BUILT_IN_LROUND):
14256 CASE_FLT_FN (BUILT_IN_MODF):
14257 CASE_FLT_FN (BUILT_IN_NAN):
14258 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14259 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
14260 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
14261 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
14262 CASE_FLT_FN (BUILT_IN_POW):
14263 CASE_FLT_FN (BUILT_IN_REMAINDER):
14264 CASE_FLT_FN (BUILT_IN_REMQUO):
14265 CASE_FLT_FN (BUILT_IN_RINT):
14266 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
14267 CASE_FLT_FN (BUILT_IN_ROUND):
14268 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
14269 CASE_FLT_FN (BUILT_IN_SCALBLN):
14270 CASE_FLT_FN (BUILT_IN_SCALBN):
14271 CASE_FLT_FN (BUILT_IN_SIN):
14272 CASE_FLT_FN (BUILT_IN_SINH):
14273 CASE_FLT_FN (BUILT_IN_SINCOS):
14274 CASE_FLT_FN (BUILT_IN_SQRT):
14275 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
14276 CASE_FLT_FN (BUILT_IN_TAN):
14277 CASE_FLT_FN (BUILT_IN_TANH):
14278 CASE_FLT_FN (BUILT_IN_TGAMMA):
14279 CASE_FLT_FN (BUILT_IN_TRUNC):
14280 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
14281 return true;
14282 default:
14283 break;
14284 }
14285 return false;
14286}
de05c19d
MS
14287
14288/* Return true if OFFRNG is bounded to a subrange of offset values
14289 valid for the largest possible object. */
14290
14291bool
14292access_ref::offset_bounded () const
14293{
14294 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
14295 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
14296 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
14297}
4f8cfb42
JH
14298
14299/* If CALLEE has known side effects, fill in INFO and return true.
14300 See tree-ssa-structalias.c:find_func_aliases
14301 for the list of builtins we might need to handle here. */
14302
14303attr_fnspec
14304builtin_fnspec (tree callee)
14305{
14306 built_in_function code = DECL_FUNCTION_CODE (callee);
14307
14308 switch (code)
14309 {
14310 /* All the following functions read memory pointed to by
14311 their second argument and write memory pointed to by first
14312 argument.
14313 strcat/strncat additionally reads memory pointed to by the first
14314 argument. */
14315 case BUILT_IN_STRCAT:
14316 case BUILT_IN_STRCAT_CHK:
071a31a5 14317 return "1cW 1 ";
4f8cfb42
JH
14318 case BUILT_IN_STRNCAT:
14319 case BUILT_IN_STRNCAT_CHK:
071a31a5 14320 return "1cW 13";
4f8cfb42
JH
14321 case BUILT_IN_STRCPY:
14322 case BUILT_IN_STRCPY_CHK:
071a31a5 14323 return "1cO 1 ";
4f8cfb42
JH
14324 case BUILT_IN_STPCPY:
14325 case BUILT_IN_STPCPY_CHK:
071a31a5 14326 return ".cO 1 ";
4f8cfb42
JH
14327 case BUILT_IN_STRNCPY:
14328 case BUILT_IN_MEMCPY:
14329 case BUILT_IN_MEMMOVE:
14330 case BUILT_IN_TM_MEMCPY:
14331 case BUILT_IN_TM_MEMMOVE:
14332 case BUILT_IN_STRNCPY_CHK:
14333 case BUILT_IN_MEMCPY_CHK:
14334 case BUILT_IN_MEMMOVE_CHK:
071a31a5 14335 return "1cO313";
4f8cfb42
JH
14336 case BUILT_IN_MEMPCPY:
14337 case BUILT_IN_MEMPCPY_CHK:
071a31a5 14338 return ".cO313";
4f8cfb42
JH
14339 case BUILT_IN_STPNCPY:
14340 case BUILT_IN_STPNCPY_CHK:
071a31a5 14341 return ".cO313";
4f8cfb42 14342 case BUILT_IN_BCOPY:
071a31a5 14343 return ".c23O3";
b53f709d
JH
14344 case BUILT_IN_BZERO:
14345 return ".cO2";
14346 case BUILT_IN_MEMCMP:
14347 case BUILT_IN_MEMCMP_EQ:
14348 case BUILT_IN_BCMP:
14349 case BUILT_IN_STRNCMP:
14350 case BUILT_IN_STRNCMP_EQ:
14351 case BUILT_IN_STRNCASECMP:
14352 return ".cR3R3";
4f8cfb42
JH
14353
14354 /* The following functions read memory pointed to by their
14355 first argument. */
14356 CASE_BUILT_IN_TM_LOAD (1):
14357 CASE_BUILT_IN_TM_LOAD (2):
14358 CASE_BUILT_IN_TM_LOAD (4):
14359 CASE_BUILT_IN_TM_LOAD (8):
14360 CASE_BUILT_IN_TM_LOAD (FLOAT):
14361 CASE_BUILT_IN_TM_LOAD (DOUBLE):
14362 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
14363 CASE_BUILT_IN_TM_LOAD (M64):
14364 CASE_BUILT_IN_TM_LOAD (M128):
14365 CASE_BUILT_IN_TM_LOAD (M256):
14366 case BUILT_IN_TM_LOG:
14367 case BUILT_IN_TM_LOG_1:
14368 case BUILT_IN_TM_LOG_2:
14369 case BUILT_IN_TM_LOG_4:
14370 case BUILT_IN_TM_LOG_8:
14371 case BUILT_IN_TM_LOG_FLOAT:
14372 case BUILT_IN_TM_LOG_DOUBLE:
14373 case BUILT_IN_TM_LOG_LDOUBLE:
14374 case BUILT_IN_TM_LOG_M64:
14375 case BUILT_IN_TM_LOG_M128:
14376 case BUILT_IN_TM_LOG_M256:
14377 return ".cR ";
14378
14379 case BUILT_IN_INDEX:
b53f709d 14380 case BUILT_IN_RINDEX:
4f8cfb42 14381 case BUILT_IN_STRCHR:
b53f709d 14382 case BUILT_IN_STRLEN:
4f8cfb42
JH
14383 case BUILT_IN_STRRCHR:
14384 return ".cR ";
b53f709d
JH
14385 case BUILT_IN_STRNLEN:
14386 return ".cR2";
4f8cfb42
JH
14387
14388 /* These read memory pointed to by the first argument.
14389 Allocating memory does not have any side-effects apart from
14390 being the definition point for the pointer.
14391 Unix98 specifies that errno is set on allocation failure. */
14392 case BUILT_IN_STRDUP:
14393 return "mCR ";
14394 case BUILT_IN_STRNDUP:
14395 return "mCR2";
14396 /* Allocating memory does not have any side-effects apart from
14397 being the definition point for the pointer. */
14398 case BUILT_IN_MALLOC:
14399 case BUILT_IN_ALIGNED_ALLOC:
14400 case BUILT_IN_CALLOC:
6fcc3cac 14401 case BUILT_IN_GOMP_ALLOC:
4f8cfb42
JH
14402 return "mC";
14403 CASE_BUILT_IN_ALLOCA:
14404 return "mc";
14405 /* These read memory pointed to by the first argument with size
14406 in the third argument. */
14407 case BUILT_IN_MEMCHR:
14408 return ".cR3";
14409 /* These read memory pointed to by the first and second arguments. */
14410 case BUILT_IN_STRSTR:
14411 case BUILT_IN_STRPBRK:
b53f709d
JH
14412 case BUILT_IN_STRCASECMP:
14413 case BUILT_IN_STRCSPN:
14414 case BUILT_IN_STRSPN:
14415 case BUILT_IN_STRCMP:
14416 case BUILT_IN_STRCMP_EQ:
4f8cfb42
JH
14417 return ".cR R ";
14418 /* Freeing memory kills the pointed-to memory. More importantly
14419 the call has to serve as a barrier for moving loads and stores
14420 across it. */
14421 case BUILT_IN_STACK_RESTORE:
14422 case BUILT_IN_FREE:
6fcc3cac 14423 case BUILT_IN_GOMP_FREE:
4f8cfb42
JH
14424 return ".co ";
14425 case BUILT_IN_VA_END:
14426 return ".cO ";
14427 /* Realloc serves both as allocation point and deallocation point. */
14428 case BUILT_IN_REALLOC:
57fcbe57 14429 return ".Cw ";
4f8cfb42
JH
14430 case BUILT_IN_GAMMA_R:
14431 case BUILT_IN_GAMMAF_R:
14432 case BUILT_IN_GAMMAL_R:
14433 case BUILT_IN_LGAMMA_R:
14434 case BUILT_IN_LGAMMAF_R:
14435 case BUILT_IN_LGAMMAL_R:
14436 return ".C. Ot";
14437 case BUILT_IN_FREXP:
14438 case BUILT_IN_FREXPF:
14439 case BUILT_IN_FREXPL:
14440 case BUILT_IN_MODF:
14441 case BUILT_IN_MODFF:
14442 case BUILT_IN_MODFL:
14443 return ".c. Ot";
14444 case BUILT_IN_REMQUO:
14445 case BUILT_IN_REMQUOF:
14446 case BUILT_IN_REMQUOL:
14447 return ".c. . Ot";
14448 case BUILT_IN_SINCOS:
14449 case BUILT_IN_SINCOSF:
14450 case BUILT_IN_SINCOSL:
14451 return ".c. OtOt";
14452 case BUILT_IN_MEMSET:
14453 case BUILT_IN_MEMSET_CHK:
14454 case BUILT_IN_TM_MEMSET:
14455 return "1cO3";
14456 CASE_BUILT_IN_TM_STORE (1):
14457 CASE_BUILT_IN_TM_STORE (2):
14458 CASE_BUILT_IN_TM_STORE (4):
14459 CASE_BUILT_IN_TM_STORE (8):
14460 CASE_BUILT_IN_TM_STORE (FLOAT):
14461 CASE_BUILT_IN_TM_STORE (DOUBLE):
14462 CASE_BUILT_IN_TM_STORE (LDOUBLE):
14463 CASE_BUILT_IN_TM_STORE (M64):
14464 CASE_BUILT_IN_TM_STORE (M128):
14465 CASE_BUILT_IN_TM_STORE (M256):
14466 return ".cO ";
14467 case BUILT_IN_STACK_SAVE:
14468 return ".c";
14469 case BUILT_IN_ASSUME_ALIGNED:
14470 return "1cX ";
14471 /* But posix_memalign stores a pointer into the memory pointed to
14472 by its first argument. */
14473 case BUILT_IN_POSIX_MEMALIGN:
14474 return ".cOt";
14475
14476 default:
14477 return "";
14478 }
14479}