]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/builtins.c
Correct handling of variable offset minus constant in -Warray-bounds [PR100137]
[thirdparty/gcc.git] / gcc / builtins.c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83
84 struct target_builtins default_target_builtins;
85 #if SWITCHABLE_TARGET
86 struct target_builtins *this_target_builtins = &default_target_builtins;
87 #endif
88
89 /* Define the names of the builtin function types and codes. */
90 const char *const built_in_class_names[BUILT_IN_LAST]
91 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
92
93 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
94 const char * built_in_names[(int) END_BUILTINS] =
95 {
96 #include "builtins.def"
97 };
98
99 /* Setup an array of builtin_info_type, make sure each element decl is
100 initialized to NULL_TREE. */
101 builtin_info_type builtin_info[(int)END_BUILTINS];
102
103 /* Non-zero if __builtin_constant_p should be folded right away. */
104 bool force_folding_builtin_constant_p;
105
106 static int target_char_cast (tree, char *);
107 static rtx get_memory_rtx (tree, tree);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx result_vector (int, rtx);
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static enum type_class type_to_class (tree);
117 static rtx expand_builtin_classify_type (tree);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_cexpi (tree, rtx);
123 static rtx expand_builtin_int_roundingfn (tree, rtx);
124 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
125 static rtx expand_builtin_next_arg (void);
126 static rtx expand_builtin_va_start (tree);
127 static rtx expand_builtin_va_end (tree);
128 static rtx expand_builtin_va_copy (tree);
129 static rtx inline_expand_builtin_bytecmp (tree, rtx);
130 static rtx expand_builtin_strcmp (tree, rtx);
131 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
132 static rtx expand_builtin_memchr (tree, rtx);
133 static rtx expand_builtin_memcpy (tree, rtx);
134 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
135 rtx target, tree exp,
136 memop_ret retmode,
137 bool might_overlap);
138 static rtx expand_builtin_memmove (tree, rtx);
139 static rtx expand_builtin_mempcpy (tree, rtx);
140 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
141 static rtx expand_builtin_strcat (tree);
142 static rtx expand_builtin_strcpy (tree, rtx);
143 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
144 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
145 static rtx expand_builtin_stpncpy (tree, rtx);
146 static rtx expand_builtin_strncat (tree, rtx);
147 static rtx expand_builtin_strncpy (tree, rtx);
148 static rtx expand_builtin_memset (tree, rtx, machine_mode);
149 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
150 static rtx expand_builtin_bzero (tree);
151 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
152 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
153 static rtx expand_builtin_alloca (tree);
154 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
155 static rtx expand_builtin_frame_address (tree, tree);
156 static tree stabilize_va_list_loc (location_t, tree, int);
157 static rtx expand_builtin_expect (tree, rtx);
158 static rtx expand_builtin_expect_with_probability (tree, rtx);
159 static tree fold_builtin_constant_p (tree);
160 static tree fold_builtin_classify_type (tree);
161 static tree fold_builtin_strlen (location_t, tree, tree, tree);
162 static tree fold_builtin_inf (location_t, tree, int);
163 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
164 static bool validate_arg (const_tree, enum tree_code code);
165 static rtx expand_builtin_fabs (tree, rtx, rtx);
166 static rtx expand_builtin_signbit (tree, rtx);
167 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
168 static tree fold_builtin_isascii (location_t, tree);
169 static tree fold_builtin_toascii (location_t, tree);
170 static tree fold_builtin_isdigit (location_t, tree);
171 static tree fold_builtin_fabs (location_t, tree, tree);
172 static tree fold_builtin_abs (location_t, tree, tree);
173 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
174 enum tree_code);
175 static tree fold_builtin_varargs (location_t, tree, tree*, int);
176
177 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
178 static tree fold_builtin_strspn (location_t, tree, tree, tree);
179 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
180
181 static rtx expand_builtin_object_size (tree);
182 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
183 enum built_in_function);
184 static void maybe_emit_chk_warning (tree, enum built_in_function);
185 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
186 static tree fold_builtin_object_size (tree, tree);
187 static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
188 static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
189 pointer_query *);
190
191 unsigned HOST_WIDE_INT target_newline;
192 unsigned HOST_WIDE_INT target_percent;
193 static unsigned HOST_WIDE_INT target_c;
194 static unsigned HOST_WIDE_INT target_s;
195 char target_percent_c[3];
196 char target_percent_s[3];
197 char target_percent_s_newline[4];
198 static tree do_mpfr_remquo (tree, tree, tree);
199 static tree do_mpfr_lgamma_r (tree, tree, tree);
200 static void expand_builtin_sync_synchronize (void);
201
202 access_ref::access_ref (tree bound /* = NULL_TREE */,
203 bool minaccess /* = false */)
204 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
205 base0 (true), parmarray ()
206 {
207 /* Set to valid. */
208 offrng[0] = offrng[1] = 0;
209 offmax[0] = offmax[1] = 0;
210 /* Invalidate. */
211 sizrng[0] = sizrng[1] = -1;
212
213 /* Set the default bounds of the access and adjust below. */
214 bndrng[0] = minaccess ? 1 : 0;
215 bndrng[1] = HOST_WIDE_INT_M1U;
216
217 /* When BOUND is nonnull and a range can be extracted from it,
218 set the bounds of the access to reflect both it and MINACCESS.
219 BNDRNG[0] is the size of the minimum access. */
220 tree rng[2];
221 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
222 {
223 bndrng[0] = wi::to_offset (rng[0]);
224 bndrng[1] = wi::to_offset (rng[1]);
225 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
226 }
227 }
228
229 /* Return the PHI node REF refers to or null if it doesn't. */
230
231 gphi *
232 access_ref::phi () const
233 {
234 if (!ref || TREE_CODE (ref) != SSA_NAME)
235 return NULL;
236
237 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
238 if (gimple_code (def_stmt) != GIMPLE_PHI)
239 return NULL;
240
241 return as_a <gphi *> (def_stmt);
242 }
243
244 /* Determine and return the largest object to which *THIS. If *THIS
245 refers to a PHI and PREF is nonnull, fill *PREF with the details
246 of the object determined by compute_objsize(ARG, OSTYPE) for each
247 PHI argument ARG. */
248
249 tree
250 access_ref::get_ref (vec<access_ref> *all_refs,
251 access_ref *pref /* = NULL */,
252 int ostype /* = 1 */,
253 ssa_name_limit_t *psnlim /* = NULL */,
254 pointer_query *qry /* = NULL */) const
255 {
256 gphi *phi_stmt = this->phi ();
257 if (!phi_stmt)
258 return ref;
259
260 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
261 cause unbounded recursion. */
262 ssa_name_limit_t snlim_buf;
263 if (!psnlim)
264 psnlim = &snlim_buf;
265
266 if (!psnlim->visit_phi (ref))
267 return NULL_TREE;
268
269 /* Reflects the range of offsets of all PHI arguments refer to the same
270 object (i.e., have the same REF). */
271 access_ref same_ref;
272 /* The conservative result of the PHI reflecting the offset and size
273 of the largest PHI argument, regardless of whether or not they all
274 refer to the same object. */
275 pointer_query empty_qry;
276 if (!qry)
277 qry = &empty_qry;
278
279 access_ref phi_ref;
280 if (pref)
281 {
282 phi_ref = *pref;
283 same_ref = *pref;
284 }
285
286 /* Set if any argument is a function array (or VLA) parameter not
287 declared [static]. */
288 bool parmarray = false;
289 /* The size of the smallest object referenced by the PHI arguments. */
290 offset_int minsize = 0;
291 const offset_int maxobjsize = wi::to_offset (max_object_size ());
292 /* The offset of the PHI, not reflecting those of its arguments. */
293 const offset_int orng[2] = { phi_ref.offrng[0], phi_ref.offrng[1] };
294
295 const unsigned nargs = gimple_phi_num_args (phi_stmt);
296 for (unsigned i = 0; i < nargs; ++i)
297 {
298 access_ref phi_arg_ref;
299 tree arg = gimple_phi_arg_def (phi_stmt, i);
300 if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
301 || phi_arg_ref.sizrng[0] < 0)
302 /* A PHI with all null pointer arguments. */
303 return NULL_TREE;
304
305 /* Add PREF's offset to that of the argument. */
306 phi_arg_ref.add_offset (orng[0], orng[1]);
307 if (TREE_CODE (arg) == SSA_NAME)
308 qry->put_ref (arg, phi_arg_ref);
309
310 if (all_refs)
311 all_refs->safe_push (phi_arg_ref);
312
313 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
314 || phi_arg_ref.sizrng[1] != maxobjsize);
315
316 parmarray |= phi_arg_ref.parmarray;
317
318 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
319
320 if (phi_ref.sizrng[0] < 0)
321 {
322 if (!nullp)
323 same_ref = phi_arg_ref;
324 phi_ref = phi_arg_ref;
325 if (arg_known_size)
326 minsize = phi_arg_ref.sizrng[0];
327 continue;
328 }
329
330 const bool phi_known_size = (phi_ref.sizrng[0] != 0
331 || phi_ref.sizrng[1] != maxobjsize);
332
333 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
334 minsize = phi_arg_ref.sizrng[0];
335
336 /* Disregard null pointers in PHIs with two or more arguments.
337 TODO: Handle this better! */
338 if (nullp)
339 continue;
340
341 /* Determine the amount of remaining space in the argument. */
342 offset_int argrem[2];
343 argrem[1] = phi_arg_ref.size_remaining (argrem);
344
345 /* Determine the amount of remaining space computed so far and
346 if the remaining space in the argument is more use it instead. */
347 offset_int phirem[2];
348 phirem[1] = phi_ref.size_remaining (phirem);
349
350 if (phi_arg_ref.ref != same_ref.ref)
351 same_ref.ref = NULL_TREE;
352
353 if (phirem[1] < argrem[1]
354 || (phirem[1] == argrem[1]
355 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
356 /* Use the argument with the most space remaining as the result,
357 or the larger one if the space is equal. */
358 phi_ref = phi_arg_ref;
359
360 /* Set SAME_REF.OFFRNG to the maximum range of all arguments. */
361 if (phi_arg_ref.offrng[0] < same_ref.offrng[0])
362 same_ref.offrng[0] = phi_arg_ref.offrng[0];
363 if (same_ref.offrng[1] < phi_arg_ref.offrng[1])
364 same_ref.offrng[1] = phi_arg_ref.offrng[1];
365 }
366
367 if (!same_ref.ref && same_ref.offrng[0] != 0)
368 /* Clear BASE0 if not all the arguments refer to the same object and
369 if not all their offsets are zero-based. This allows the final
370 PHI offset to out of bounds for some arguments but not for others
371 (or negative even of all the arguments are BASE0), which is overly
372 permissive. */
373 phi_ref.base0 = false;
374
375 if (same_ref.ref)
376 phi_ref = same_ref;
377 else
378 {
379 /* Replace the lower bound of the largest argument with the size
380 of the smallest argument, and set PARMARRAY if any argument
381 was one. */
382 phi_ref.sizrng[0] = minsize;
383 phi_ref.parmarray = parmarray;
384 }
385
386 if (phi_ref.sizrng[0] < 0)
387 {
388 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
389 (perhaps because they have all been already visited by prior
390 recursive calls). */
391 psnlim->leave_phi (ref);
392 return NULL_TREE;
393 }
394
395 /* Avoid changing *THIS. */
396 if (pref && pref != this)
397 *pref = phi_ref;
398
399 psnlim->leave_phi (ref);
400
401 return phi_ref.ref;
402 }
403
404 /* Return the maximum amount of space remaining and if non-null, set
405 argument to the minimum. */
406
407 offset_int
408 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
409 {
410 offset_int minbuf;
411 if (!pmin)
412 pmin = &minbuf;
413
414 /* add_offset() ensures the offset range isn't inverted. */
415 gcc_checking_assert (offrng[0] <= offrng[1]);
416
417 if (base0)
418 {
419 /* The offset into referenced object is zero-based (i.e., it's
420 not referenced by a pointer into middle of some unknown object). */
421 if (offrng[0] < 0 && offrng[1] < 0)
422 {
423 /* If the offset is negative the remaining size is zero. */
424 *pmin = 0;
425 return 0;
426 }
427
428 if (sizrng[1] <= offrng[0])
429 {
430 /* If the starting offset is greater than or equal to the upper
431 bound on the size of the object, the space remaining is zero.
432 As a special case, if it's equal, set *PMIN to -1 to let
433 the caller know the offset is valid and just past the end. */
434 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
435 return 0;
436 }
437
438 /* Otherwise return the size minus the lower bound of the offset. */
439 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
440
441 *pmin = sizrng[0] - or0;
442 return sizrng[1] - or0;
443 }
444
445 /* The offset to the referenced object isn't zero-based (i.e., it may
446 refer to a byte other than the first. The size of such an object
447 is constrained only by the size of the address space (the result
448 of max_object_size()). */
449 if (sizrng[1] <= offrng[0])
450 {
451 *pmin = 0;
452 return 0;
453 }
454
455 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
456
457 *pmin = sizrng[0] - or0;
458 return sizrng[1] - or0;
459 }
460
461 /* Return true if the offset and object size are in range for SIZE. */
462
463 bool
464 access_ref::offset_in_range (const offset_int &size) const
465 {
466 if (size_remaining () < size)
467 return false;
468
469 if (base0)
470 return offmax[0] >= 0 && offmax[1] <= sizrng[1];
471
472 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
473 return offmax[0] > -maxoff && offmax[1] < maxoff;
474 }
475
476 /* Add the range [MIN, MAX] to the offset range. For known objects (with
477 zero-based offsets) at least one of whose offset's bounds is in range,
478 constrain the other (or both) to the bounds of the object (i.e., zero
479 and the upper bound of its size). This improves the quality of
480 diagnostics. */
481
482 void access_ref::add_offset (const offset_int &min, const offset_int &max)
483 {
484 if (min <= max)
485 {
486 /* To add an ordinary range just add it to the bounds. */
487 offrng[0] += min;
488 offrng[1] += max;
489 }
490 else if (!base0)
491 {
492 /* To add an inverted range to an offset to an unknown object
493 expand it to the maximum. */
494 add_max_offset ();
495 return;
496 }
497 else
498 {
499 /* To add an inverted range to an offset to an known object set
500 the upper bound to the maximum representable offset value
501 (which may be greater than MAX_OBJECT_SIZE).
502 The lower bound is either the sum of the current offset and
503 MIN when abs(MAX) is greater than the former, or zero otherwise.
504 Zero because then then inverted range includes the negative of
505 the lower bound. */
506 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
507 offrng[1] = maxoff;
508
509 if (max >= 0)
510 {
511 offrng[0] = 0;
512 if (offmax[0] > 0)
513 offmax[0] = 0;
514 return;
515 }
516
517 offset_int absmax = wi::abs (max);
518 if (offrng[0] < absmax)
519 {
520 offrng[0] += min;
521 /* Cap the lower bound at the upper (set to MAXOFF above)
522 to avoid inadvertently recreating an inverted range. */
523 if (offrng[1] < offrng[0])
524 offrng[0] = offrng[1];
525 }
526 else
527 offrng[0] = 0;
528 }
529
530 /* Set the minimum and maximmum computed so far. */
531 if (offrng[1] < 0 && offrng[1] < offmax[0])
532 offmax[0] = offrng[1];
533 if (offrng[0] > 0 && offrng[0] > offmax[1])
534 offmax[1] = offrng[0];
535
536 if (!base0)
537 return;
538
539 /* When referencing a known object check to see if the offset computed
540 so far is in bounds... */
541 offset_int remrng[2];
542 remrng[1] = size_remaining (remrng);
543 if (remrng[1] > 0 || remrng[0] < 0)
544 {
545 /* ...if so, constrain it so that neither bound exceeds the size of
546 the object. Out of bounds offsets are left unchanged, and, for
547 better or worse, become in bounds later. They should be detected
548 and diagnosed at the point they first become invalid by
549 -Warray-bounds. */
550 if (offrng[0] < 0)
551 offrng[0] = 0;
552 if (offrng[1] > sizrng[1])
553 offrng[1] = sizrng[1];
554 }
555 }
556
557 /* Set a bit for the PHI in VISITED and return true if it wasn't
558 already set. */
559
560 bool
561 ssa_name_limit_t::visit_phi (tree ssa_name)
562 {
563 if (!visited)
564 visited = BITMAP_ALLOC (NULL);
565
566 /* Return false if SSA_NAME has already been visited. */
567 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
568 }
569
570 /* Clear a bit for the PHI in VISITED. */
571
572 void
573 ssa_name_limit_t::leave_phi (tree ssa_name)
574 {
575 /* Return false if SSA_NAME has already been visited. */
576 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
577 }
578
579 /* Return false if the SSA_NAME chain length counter has reached
580 the limit, otherwise increment the counter and return true. */
581
582 bool
583 ssa_name_limit_t::next ()
584 {
585 /* Return a negative value to let caller avoid recursing beyond
586 the specified limit. */
587 if (ssa_def_max == 0)
588 return false;
589
590 --ssa_def_max;
591 return true;
592 }
593
594 /* If the SSA_NAME has already been "seen" return a positive value.
595 Otherwise add it to VISITED. If the SSA_NAME limit has been
596 reached, return a negative value. Otherwise return zero. */
597
598 int
599 ssa_name_limit_t::next_phi (tree ssa_name)
600 {
601 {
602 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
603 /* Return a positive value if the PHI has already been visited. */
604 if (gimple_code (def_stmt) == GIMPLE_PHI
605 && !visit_phi (ssa_name))
606 return 1;
607 }
608
609 /* Return a negative value to let caller avoid recursing beyond
610 the specified limit. */
611 if (ssa_def_max == 0)
612 return -1;
613
614 --ssa_def_max;
615
616 return 0;
617 }
618
619 ssa_name_limit_t::~ssa_name_limit_t ()
620 {
621 if (visited)
622 BITMAP_FREE (visited);
623 }
624
625 /* Default ctor. Initialize object with pointers to the range_query
626 and cache_type instances to use or null. */
627
628 pointer_query::pointer_query (range_query *qry /* = NULL */,
629 cache_type *cache /* = NULL */)
630 : rvals (qry), var_cache (cache), hits (), misses (),
631 failures (), depth (), max_depth ()
632 {
633 /* No op. */
634 }
635
636 /* Return a pointer to the cached access_ref instance for the SSA_NAME
637 PTR if it's there or null otherwise. */
638
639 const access_ref *
640 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
641 {
642 if (!var_cache)
643 {
644 ++misses;
645 return NULL;
646 }
647
648 unsigned version = SSA_NAME_VERSION (ptr);
649 unsigned idx = version << 1 | (ostype & 1);
650 if (var_cache->indices.length () <= idx)
651 {
652 ++misses;
653 return NULL;
654 }
655
656 unsigned cache_idx = var_cache->indices[idx];
657 if (var_cache->access_refs.length () <= cache_idx)
658 {
659 ++misses;
660 return NULL;
661 }
662
663 access_ref &cache_ref = var_cache->access_refs[cache_idx];
664 if (cache_ref.ref)
665 {
666 ++hits;
667 return &cache_ref;
668 }
669
670 ++misses;
671 return NULL;
672 }
673
674 /* Retrieve the access_ref instance for a variable from the cache if it's
675 there or compute it and insert it into the cache if it's nonnonull. */
676
677 bool
678 pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
679 {
680 const unsigned version
681 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
682
683 if (var_cache && version)
684 {
685 unsigned idx = version << 1 | (ostype & 1);
686 if (idx < var_cache->indices.length ())
687 {
688 unsigned cache_idx = var_cache->indices[idx] - 1;
689 if (cache_idx < var_cache->access_refs.length ()
690 && var_cache->access_refs[cache_idx].ref)
691 {
692 ++hits;
693 *pref = var_cache->access_refs[cache_idx];
694 return true;
695 }
696 }
697
698 ++misses;
699 }
700
701 if (!compute_objsize (ptr, ostype, pref, this))
702 {
703 ++failures;
704 return false;
705 }
706
707 return true;
708 }
709
710 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
711 nonnull. */
712
713 void
714 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
715 {
716 /* Only add populated/valid entries. */
717 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
718 return;
719
720 /* Add REF to the two-level cache. */
721 unsigned version = SSA_NAME_VERSION (ptr);
722 unsigned idx = version << 1 | (ostype & 1);
723
724 /* Grow INDICES if necessary. An index is valid if it's nonzero.
725 Its value minus one is the index into ACCESS_REFS. Not all
726 entries are valid. */
727 if (var_cache->indices.length () <= idx)
728 var_cache->indices.safe_grow_cleared (idx + 1);
729
730 if (!var_cache->indices[idx])
731 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
732
733 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
734 REF member is nonnull. All entries except for the last two
735 are valid. Once nonnull, the REF value must stay unchanged. */
736 unsigned cache_idx = var_cache->indices[idx];
737 if (var_cache->access_refs.length () <= cache_idx)
738 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
739
740 access_ref cache_ref = var_cache->access_refs[cache_idx - 1];
741 if (cache_ref.ref)
742 {
743 gcc_checking_assert (cache_ref.ref == ref.ref);
744 return;
745 }
746
747 cache_ref = ref;
748 }
749
750 /* Flush the cache if it's nonnull. */
751
752 void
753 pointer_query::flush_cache ()
754 {
755 if (!var_cache)
756 return;
757 var_cache->indices.release ();
758 var_cache->access_refs.release ();
759 }
760
761 /* Return true if NAME starts with __builtin_ or __sync_. */
762
763 static bool
764 is_builtin_name (const char *name)
765 {
766 return (startswith (name, "__builtin_")
767 || startswith (name, "__sync_")
768 || startswith (name, "__atomic_"));
769 }
770
771 /* Return true if NODE should be considered for inline expansion regardless
772 of the optimization level. This means whenever a function is invoked with
773 its "internal" name, which normally contains the prefix "__builtin". */
774
775 bool
776 called_as_built_in (tree node)
777 {
778 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
779 we want the name used to call the function, not the name it
780 will have. */
781 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
782 return is_builtin_name (name);
783 }
784
785 /* Compute values M and N such that M divides (address of EXP - N) and such
786 that N < M. If these numbers can be determined, store M in alignp and N in
787 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
788 *alignp and any bit-offset to *bitposp.
789
790 Note that the address (and thus the alignment) computed here is based
791 on the address to which a symbol resolves, whereas DECL_ALIGN is based
792 on the address at which an object is actually located. These two
793 addresses are not always the same. For example, on ARM targets,
794 the address &foo of a Thumb function foo() has the lowest bit set,
795 whereas foo() itself starts on an even address.
796
797 If ADDR_P is true we are taking the address of the memory reference EXP
798 and thus cannot rely on the access taking place. */
799
800 static bool
801 get_object_alignment_2 (tree exp, unsigned int *alignp,
802 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
803 {
804 poly_int64 bitsize, bitpos;
805 tree offset;
806 machine_mode mode;
807 int unsignedp, reversep, volatilep;
808 unsigned int align = BITS_PER_UNIT;
809 bool known_alignment = false;
810
811 /* Get the innermost object and the constant (bitpos) and possibly
812 variable (offset) offset of the access. */
813 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
814 &unsignedp, &reversep, &volatilep);
815
816 /* Extract alignment information from the innermost object and
817 possibly adjust bitpos and offset. */
818 if (TREE_CODE (exp) == FUNCTION_DECL)
819 {
820 /* Function addresses can encode extra information besides their
821 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
822 allows the low bit to be used as a virtual bit, we know
823 that the address itself must be at least 2-byte aligned. */
824 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
825 align = 2 * BITS_PER_UNIT;
826 }
827 else if (TREE_CODE (exp) == LABEL_DECL)
828 ;
829 else if (TREE_CODE (exp) == CONST_DECL)
830 {
831 /* The alignment of a CONST_DECL is determined by its initializer. */
832 exp = DECL_INITIAL (exp);
833 align = TYPE_ALIGN (TREE_TYPE (exp));
834 if (CONSTANT_CLASS_P (exp))
835 align = targetm.constant_alignment (exp, align);
836
837 known_alignment = true;
838 }
839 else if (DECL_P (exp))
840 {
841 align = DECL_ALIGN (exp);
842 known_alignment = true;
843 }
844 else if (TREE_CODE (exp) == INDIRECT_REF
845 || TREE_CODE (exp) == MEM_REF
846 || TREE_CODE (exp) == TARGET_MEM_REF)
847 {
848 tree addr = TREE_OPERAND (exp, 0);
849 unsigned ptr_align;
850 unsigned HOST_WIDE_INT ptr_bitpos;
851 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
852
853 /* If the address is explicitely aligned, handle that. */
854 if (TREE_CODE (addr) == BIT_AND_EXPR
855 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
856 {
857 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
858 ptr_bitmask *= BITS_PER_UNIT;
859 align = least_bit_hwi (ptr_bitmask);
860 addr = TREE_OPERAND (addr, 0);
861 }
862
863 known_alignment
864 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
865 align = MAX (ptr_align, align);
866
867 /* Re-apply explicit alignment to the bitpos. */
868 ptr_bitpos &= ptr_bitmask;
869
870 /* The alignment of the pointer operand in a TARGET_MEM_REF
871 has to take the variable offset parts into account. */
872 if (TREE_CODE (exp) == TARGET_MEM_REF)
873 {
874 if (TMR_INDEX (exp))
875 {
876 unsigned HOST_WIDE_INT step = 1;
877 if (TMR_STEP (exp))
878 step = TREE_INT_CST_LOW (TMR_STEP (exp));
879 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
880 }
881 if (TMR_INDEX2 (exp))
882 align = BITS_PER_UNIT;
883 known_alignment = false;
884 }
885
886 /* When EXP is an actual memory reference then we can use
887 TYPE_ALIGN of a pointer indirection to derive alignment.
888 Do so only if get_pointer_alignment_1 did not reveal absolute
889 alignment knowledge and if using that alignment would
890 improve the situation. */
891 unsigned int talign;
892 if (!addr_p && !known_alignment
893 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
894 && talign > align)
895 align = talign;
896 else
897 {
898 /* Else adjust bitpos accordingly. */
899 bitpos += ptr_bitpos;
900 if (TREE_CODE (exp) == MEM_REF
901 || TREE_CODE (exp) == TARGET_MEM_REF)
902 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
903 }
904 }
905 else if (TREE_CODE (exp) == STRING_CST)
906 {
907 /* STRING_CST are the only constant objects we allow to be not
908 wrapped inside a CONST_DECL. */
909 align = TYPE_ALIGN (TREE_TYPE (exp));
910 if (CONSTANT_CLASS_P (exp))
911 align = targetm.constant_alignment (exp, align);
912
913 known_alignment = true;
914 }
915
916 /* If there is a non-constant offset part extract the maximum
917 alignment that can prevail. */
918 if (offset)
919 {
920 unsigned int trailing_zeros = tree_ctz (offset);
921 if (trailing_zeros < HOST_BITS_PER_INT)
922 {
923 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
924 if (inner)
925 align = MIN (align, inner);
926 }
927 }
928
929 /* Account for the alignment of runtime coefficients, so that the constant
930 bitpos is guaranteed to be accurate. */
931 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
932 if (alt_align != 0 && alt_align < align)
933 {
934 align = alt_align;
935 known_alignment = false;
936 }
937
938 *alignp = align;
939 *bitposp = bitpos.coeffs[0] & (align - 1);
940 return known_alignment;
941 }
942
943 /* For a memory reference expression EXP compute values M and N such that M
944 divides (&EXP - N) and such that N < M. If these numbers can be determined,
945 store M in alignp and N in *BITPOSP and return true. Otherwise return false
946 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
947
948 bool
949 get_object_alignment_1 (tree exp, unsigned int *alignp,
950 unsigned HOST_WIDE_INT *bitposp)
951 {
952 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
953 with it. */
954 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
955 exp = TREE_OPERAND (exp, 0);
956 return get_object_alignment_2 (exp, alignp, bitposp, false);
957 }
958
959 /* Return the alignment in bits of EXP, an object. */
960
961 unsigned int
962 get_object_alignment (tree exp)
963 {
964 unsigned HOST_WIDE_INT bitpos = 0;
965 unsigned int align;
966
967 get_object_alignment_1 (exp, &align, &bitpos);
968
969 /* align and bitpos now specify known low bits of the pointer.
970 ptr & (align - 1) == bitpos. */
971
972 if (bitpos != 0)
973 align = least_bit_hwi (bitpos);
974 return align;
975 }
976
977 /* For a pointer valued expression EXP compute values M and N such that M
978 divides (EXP - N) and such that N < M. If these numbers can be determined,
979 store M in alignp and N in *BITPOSP and return true. Return false if
980 the results are just a conservative approximation.
981
982 If EXP is not a pointer, false is returned too. */
983
984 bool
985 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
986 unsigned HOST_WIDE_INT *bitposp)
987 {
988 STRIP_NOPS (exp);
989
990 if (TREE_CODE (exp) == ADDR_EXPR)
991 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
992 alignp, bitposp, true);
993 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
994 {
995 unsigned int align;
996 unsigned HOST_WIDE_INT bitpos;
997 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
998 &align, &bitpos);
999 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
1000 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
1001 else
1002 {
1003 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
1004 if (trailing_zeros < HOST_BITS_PER_INT)
1005 {
1006 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
1007 if (inner)
1008 align = MIN (align, inner);
1009 }
1010 }
1011 *alignp = align;
1012 *bitposp = bitpos & (align - 1);
1013 return res;
1014 }
1015 else if (TREE_CODE (exp) == SSA_NAME
1016 && POINTER_TYPE_P (TREE_TYPE (exp)))
1017 {
1018 unsigned int ptr_align, ptr_misalign;
1019 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
1020
1021 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
1022 {
1023 *bitposp = ptr_misalign * BITS_PER_UNIT;
1024 *alignp = ptr_align * BITS_PER_UNIT;
1025 /* Make sure to return a sensible alignment when the multiplication
1026 by BITS_PER_UNIT overflowed. */
1027 if (*alignp == 0)
1028 *alignp = 1u << (HOST_BITS_PER_INT - 1);
1029 /* We cannot really tell whether this result is an approximation. */
1030 return false;
1031 }
1032 else
1033 {
1034 *bitposp = 0;
1035 *alignp = BITS_PER_UNIT;
1036 return false;
1037 }
1038 }
1039 else if (TREE_CODE (exp) == INTEGER_CST)
1040 {
1041 *alignp = BIGGEST_ALIGNMENT;
1042 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
1043 & (BIGGEST_ALIGNMENT - 1));
1044 return true;
1045 }
1046
1047 *bitposp = 0;
1048 *alignp = BITS_PER_UNIT;
1049 return false;
1050 }
1051
1052 /* Return the alignment in bits of EXP, a pointer valued expression.
1053 The alignment returned is, by default, the alignment of the thing that
1054 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
1055
1056 Otherwise, look at the expression to see if we can do better, i.e., if the
1057 expression is actually pointing at an object whose alignment is tighter. */
1058
1059 unsigned int
1060 get_pointer_alignment (tree exp)
1061 {
1062 unsigned HOST_WIDE_INT bitpos = 0;
1063 unsigned int align;
1064
1065 get_pointer_alignment_1 (exp, &align, &bitpos);
1066
1067 /* align and bitpos now specify known low bits of the pointer.
1068 ptr & (align - 1) == bitpos. */
1069
1070 if (bitpos != 0)
1071 align = least_bit_hwi (bitpos);
1072
1073 return align;
1074 }
1075
1076 /* Return the number of leading non-zero elements in the sequence
1077 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
1078 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
1079
1080 unsigned
1081 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
1082 {
1083 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1084
1085 unsigned n;
1086
1087 if (eltsize == 1)
1088 {
1089 /* Optimize the common case of plain char. */
1090 for (n = 0; n < maxelts; n++)
1091 {
1092 const char *elt = (const char*) ptr + n;
1093 if (!*elt)
1094 break;
1095 }
1096 }
1097 else
1098 {
1099 for (n = 0; n < maxelts; n++)
1100 {
1101 const char *elt = (const char*) ptr + n * eltsize;
1102 if (!memcmp (elt, "\0\0\0\0", eltsize))
1103 break;
1104 }
1105 }
1106 return n;
1107 }
1108
1109 /* For a call EXPR at LOC to a function FNAME that expects a string
1110 in the argument ARG, issue a diagnostic due to it being a called
1111 with an argument that is a character array with no terminating
1112 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
1113 of characters in which the NUL is expected. Either EXPR or FNAME
1114 may be null but noth both. SIZE may be null when BNDRNG is null. */
1115
1116 void
1117 warn_string_no_nul (location_t loc, tree expr, const char *fname,
1118 tree arg, tree decl, tree size /* = NULL_TREE */,
1119 bool exact /* = false */,
1120 const wide_int bndrng[2] /* = NULL */)
1121 {
1122 const opt_code opt = OPT_Wstringop_overread;
1123 if ((expr && warning_suppressed_p (expr, opt))
1124 || warning_suppressed_p (arg, opt))
1125 return;
1126
1127 loc = expansion_point_location_if_in_system_header (loc);
1128 bool warned;
1129
1130 /* Format the bound range as a string to keep the nuber of messages
1131 from exploding. */
1132 char bndstr[80];
1133 *bndstr = 0;
1134 if (bndrng)
1135 {
1136 if (bndrng[0] == bndrng[1])
1137 sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
1138 else
1139 sprintf (bndstr, "[%llu, %llu]",
1140 (unsigned long long) bndrng[0].to_uhwi (),
1141 (unsigned long long) bndrng[1].to_uhwi ());
1142 }
1143
1144 const tree maxobjsize = max_object_size ();
1145 const wide_int maxsiz = wi::to_wide (maxobjsize);
1146 if (expr)
1147 {
1148 tree func = get_callee_fndecl (expr);
1149 if (bndrng)
1150 {
1151 if (wi::ltu_p (maxsiz, bndrng[0]))
1152 warned = warning_at (loc, opt,
1153 "%qD specified bound %s exceeds "
1154 "maximum object size %E",
1155 func, bndstr, maxobjsize);
1156 else
1157 {
1158 bool maybe = wi::to_wide (size) == bndrng[0];
1159 warned = warning_at (loc, opt,
1160 exact
1161 ? G_("%qD specified bound %s exceeds "
1162 "the size %E of unterminated array")
1163 : (maybe
1164 ? G_("%qD specified bound %s may "
1165 "exceed the size of at most %E "
1166 "of unterminated array")
1167 : G_("%qD specified bound %s exceeds "
1168 "the size of at most %E "
1169 "of unterminated array")),
1170 func, bndstr, size);
1171 }
1172 }
1173 else
1174 warned = warning_at (loc, opt,
1175 "%qD argument missing terminating nul",
1176 func);
1177 }
1178 else
1179 {
1180 if (bndrng)
1181 {
1182 if (wi::ltu_p (maxsiz, bndrng[0]))
1183 warned = warning_at (loc, opt,
1184 "%qs specified bound %s exceeds "
1185 "maximum object size %E",
1186 fname, bndstr, maxobjsize);
1187 else
1188 {
1189 bool maybe = wi::to_wide (size) == bndrng[0];
1190 warned = warning_at (loc, opt,
1191 exact
1192 ? G_("%qs specified bound %s exceeds "
1193 "the size %E of unterminated array")
1194 : (maybe
1195 ? G_("%qs specified bound %s may "
1196 "exceed the size of at most %E "
1197 "of unterminated array")
1198 : G_("%qs specified bound %s exceeds "
1199 "the size of at most %E "
1200 "of unterminated array")),
1201 fname, bndstr, size);
1202 }
1203 }
1204 else
1205 warned = warning_at (loc, opt,
1206 "%qs argument missing terminating nul",
1207 fname);
1208 }
1209
1210 if (warned)
1211 {
1212 inform (DECL_SOURCE_LOCATION (decl),
1213 "referenced argument declared here");
1214 suppress_warning (arg, opt);
1215 if (expr)
1216 suppress_warning (expr, opt);
1217 }
1218 }
1219
1220 /* For a call EXPR (which may be null) that expects a string argument
1221 SRC as an argument, returns false if SRC is a character array with
1222 no terminating NUL. When nonnull, BOUND is the number of characters
1223 in which to expect the terminating NUL. RDONLY is true for read-only
1224 accesses such as strcmp, false for read-write such as strcpy. When
1225 EXPR is also issues a warning. */
1226
1227 bool
1228 check_nul_terminated_array (tree expr, tree src,
1229 tree bound /* = NULL_TREE */)
1230 {
1231 /* The constant size of the array SRC points to. The actual size
1232 may be less of EXACT is true, but not more. */
1233 tree size;
1234 /* True if SRC involves a non-constant offset into the array. */
1235 bool exact;
1236 /* The unterminated constant array SRC points to. */
1237 tree nonstr = unterminated_array (src, &size, &exact);
1238 if (!nonstr)
1239 return true;
1240
1241 /* NONSTR refers to the non-nul terminated constant array and SIZE
1242 is the constant size of the array in bytes. EXACT is true when
1243 SIZE is exact. */
1244
1245 wide_int bndrng[2];
1246 if (bound)
1247 {
1248 value_range r;
1249
1250 get_global_range_query ()->range_of_expr (r, bound);
1251
1252 if (r.kind () != VR_RANGE)
1253 return true;
1254
1255 bndrng[0] = r.lower_bound ();
1256 bndrng[1] = r.upper_bound ();
1257
1258 if (exact)
1259 {
1260 if (wi::leu_p (bndrng[0], wi::to_wide (size)))
1261 return true;
1262 }
1263 else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
1264 return true;
1265 }
1266
1267 if (expr)
1268 warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr,
1269 size, exact, bound ? bndrng : NULL);
1270
1271 return false;
1272 }
1273
1274 /* If EXP refers to an unterminated constant character array return
1275 the declaration of the object of which the array is a member or
1276 element and if SIZE is not null, set *SIZE to the size of
1277 the unterminated array and set *EXACT if the size is exact or
1278 clear it otherwise. Otherwise return null. */
1279
1280 tree
1281 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
1282 {
1283 /* C_STRLEN will return NULL and set DECL in the info
1284 structure if EXP references a unterminated array. */
1285 c_strlen_data lendata = { };
1286 tree len = c_strlen (exp, 1, &lendata);
1287 if (len == NULL_TREE && lendata.minlen && lendata.decl)
1288 {
1289 if (size)
1290 {
1291 len = lendata.minlen;
1292 if (lendata.off)
1293 {
1294 /* Constant offsets are already accounted for in LENDATA.MINLEN,
1295 but not in a SSA_NAME + CST expression. */
1296 if (TREE_CODE (lendata.off) == INTEGER_CST)
1297 *exact = true;
1298 else if (TREE_CODE (lendata.off) == PLUS_EXPR
1299 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
1300 {
1301 /* Subtract the offset from the size of the array. */
1302 *exact = false;
1303 tree temp = TREE_OPERAND (lendata.off, 1);
1304 temp = fold_convert (ssizetype, temp);
1305 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
1306 }
1307 else
1308 *exact = false;
1309 }
1310 else
1311 *exact = true;
1312
1313 *size = len;
1314 }
1315 return lendata.decl;
1316 }
1317
1318 return NULL_TREE;
1319 }
1320
1321 /* Compute the length of a null-terminated character string or wide
1322 character string handling character sizes of 1, 2, and 4 bytes.
1323 TREE_STRING_LENGTH is not the right way because it evaluates to
1324 the size of the character array in bytes (as opposed to characters)
1325 and because it can contain a zero byte in the middle.
1326
1327 ONLY_VALUE should be nonzero if the result is not going to be emitted
1328 into the instruction stream and zero if it is going to be expanded.
1329 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
1330 is returned, otherwise NULL, since
1331 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
1332 evaluate the side-effects.
1333
1334 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
1335 accesses. Note that this implies the result is not going to be emitted
1336 into the instruction stream.
1337
1338 Additional information about the string accessed may be recorded
1339 in DATA. For example, if ARG references an unterminated string,
1340 then the declaration will be stored in the DECL field. If the
1341 length of the unterminated string can be determined, it'll be
1342 stored in the LEN field. Note this length could well be different
1343 than what a C strlen call would return.
1344
1345 ELTSIZE is 1 for normal single byte character strings, and 2 or
1346 4 for wide characer strings. ELTSIZE is by default 1.
1347
1348 The value returned is of type `ssizetype'. */
1349
1350 tree
1351 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
1352 {
1353 /* If we were not passed a DATA pointer, then get one to a local
1354 structure. That avoids having to check DATA for NULL before
1355 each time we want to use it. */
1356 c_strlen_data local_strlen_data = { };
1357 if (!data)
1358 data = &local_strlen_data;
1359
1360 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1361
1362 tree src = STRIP_NOPS (arg);
1363 if (TREE_CODE (src) == COND_EXPR
1364 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1365 {
1366 tree len1, len2;
1367
1368 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1369 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
1370 if (tree_int_cst_equal (len1, len2))
1371 return len1;
1372 }
1373
1374 if (TREE_CODE (src) == COMPOUND_EXPR
1375 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1376 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1377
1378 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
1379
1380 /* Offset from the beginning of the string in bytes. */
1381 tree byteoff;
1382 tree memsize;
1383 tree decl;
1384 src = string_constant (src, &byteoff, &memsize, &decl);
1385 if (src == 0)
1386 return NULL_TREE;
1387
1388 /* Determine the size of the string element. */
1389 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
1390 return NULL_TREE;
1391
1392 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
1393 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
1394 in case the latter is less than the size of the array, such as when
1395 SRC refers to a short string literal used to initialize a large array.
1396 In that case, the elements of the array after the terminating NUL are
1397 all NUL. */
1398 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
1399 strelts = strelts / eltsize;
1400
1401 if (!tree_fits_uhwi_p (memsize))
1402 return NULL_TREE;
1403
1404 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
1405
1406 /* PTR can point to the byte representation of any string type, including
1407 char* and wchar_t*. */
1408 const char *ptr = TREE_STRING_POINTER (src);
1409
1410 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
1411 {
1412 /* The code below works only for single byte character types. */
1413 if (eltsize != 1)
1414 return NULL_TREE;
1415
1416 /* If the string has an internal NUL character followed by any
1417 non-NUL characters (e.g., "foo\0bar"), we can't compute
1418 the offset to the following NUL if we don't know where to
1419 start searching for it. */
1420 unsigned len = string_length (ptr, eltsize, strelts);
1421
1422 /* Return when an embedded null character is found or none at all.
1423 In the latter case, set the DECL/LEN field in the DATA structure
1424 so that callers may examine them. */
1425 if (len + 1 < strelts)
1426 return NULL_TREE;
1427 else if (len >= maxelts)
1428 {
1429 data->decl = decl;
1430 data->off = byteoff;
1431 data->minlen = ssize_int (len);
1432 return NULL_TREE;
1433 }
1434
1435 /* For empty strings the result should be zero. */
1436 if (len == 0)
1437 return ssize_int (0);
1438
1439 /* We don't know the starting offset, but we do know that the string
1440 has no internal zero bytes. If the offset falls within the bounds
1441 of the string subtract the offset from the length of the string,
1442 and return that. Otherwise the length is zero. Take care to
1443 use SAVE_EXPR in case the OFFSET has side-effects. */
1444 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
1445 : byteoff;
1446 offsave = fold_convert_loc (loc, sizetype, offsave);
1447 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
1448 size_int (len));
1449 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
1450 offsave);
1451 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
1452 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
1453 build_zero_cst (ssizetype));
1454 }
1455
1456 /* Offset from the beginning of the string in elements. */
1457 HOST_WIDE_INT eltoff;
1458
1459 /* We have a known offset into the string. Start searching there for
1460 a null character if we can represent it as a single HOST_WIDE_INT. */
1461 if (byteoff == 0)
1462 eltoff = 0;
1463 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1464 eltoff = -1;
1465 else
1466 eltoff = tree_to_uhwi (byteoff) / eltsize;
1467
1468 /* If the offset is known to be out of bounds, warn, and call strlen at
1469 runtime. */
1470 if (eltoff < 0 || eltoff >= maxelts)
1471 {
1472 /* Suppress multiple warnings for propagated constant strings. */
1473 if (only_value != 2
1474 && !warning_suppressed_p (arg, OPT_Warray_bounds)
1475 && warning_at (loc, OPT_Warray_bounds,
1476 "offset %qwi outside bounds of constant string",
1477 eltoff))
1478 {
1479 if (decl)
1480 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
1481 suppress_warning (arg, OPT_Warray_bounds);
1482 }
1483 return NULL_TREE;
1484 }
1485
1486 /* If eltoff is larger than strelts but less than maxelts the
1487 string length is zero, since the excess memory will be zero. */
1488 if (eltoff > strelts)
1489 return ssize_int (0);
1490
1491 /* Use strlen to search for the first zero byte. Since any strings
1492 constructed with build_string will have nulls appended, we win even
1493 if we get handed something like (char[4])"abcd".
1494
1495 Since ELTOFF is our starting index into the string, no further
1496 calculation is needed. */
1497 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
1498 strelts - eltoff);
1499
1500 /* Don't know what to return if there was no zero termination.
1501 Ideally this would turn into a gcc_checking_assert over time.
1502 Set DECL/LEN so callers can examine them. */
1503 if (len >= maxelts - eltoff)
1504 {
1505 data->decl = decl;
1506 data->off = byteoff;
1507 data->minlen = ssize_int (len);
1508 return NULL_TREE;
1509 }
1510
1511 return ssize_int (len);
1512 }
1513
1514 /* Return a constant integer corresponding to target reading
1515 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
1516 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
1517 are assumed to be zero, otherwise it reads as many characters
1518 as needed. */
1519
1520 rtx
1521 c_readstr (const char *str, scalar_int_mode mode,
1522 bool null_terminated_p/*=true*/)
1523 {
1524 HOST_WIDE_INT ch;
1525 unsigned int i, j;
1526 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
1527
1528 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
1529 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
1530 / HOST_BITS_PER_WIDE_INT;
1531
1532 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
1533 for (i = 0; i < len; i++)
1534 tmp[i] = 0;
1535
1536 ch = 1;
1537 for (i = 0; i < GET_MODE_SIZE (mode); i++)
1538 {
1539 j = i;
1540 if (WORDS_BIG_ENDIAN)
1541 j = GET_MODE_SIZE (mode) - i - 1;
1542 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
1543 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1544 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
1545 j *= BITS_PER_UNIT;
1546
1547 if (ch || !null_terminated_p)
1548 ch = (unsigned char) str[i];
1549 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
1550 }
1551
1552 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1553 return immed_wide_int_const (c, mode);
1554 }
1555
1556 /* Cast a target constant CST to target CHAR and if that value fits into
1557 host char type, return zero and put that value into variable pointed to by
1558 P. */
1559
1560 static int
1561 target_char_cast (tree cst, char *p)
1562 {
1563 unsigned HOST_WIDE_INT val, hostval;
1564
1565 if (TREE_CODE (cst) != INTEGER_CST
1566 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
1567 return 1;
1568
1569 /* Do not care if it fits or not right here. */
1570 val = TREE_INT_CST_LOW (cst);
1571
1572 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
1573 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
1574
1575 hostval = val;
1576 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
1577 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
1578
1579 if (val != hostval)
1580 return 1;
1581
1582 *p = hostval;
1583 return 0;
1584 }
1585
1586 /* Similar to save_expr, but assumes that arbitrary code is not executed
1587 in between the multiple evaluations. In particular, we assume that a
1588 non-addressable local variable will not be modified. */
1589
1590 static tree
1591 builtin_save_expr (tree exp)
1592 {
1593 if (TREE_CODE (exp) == SSA_NAME
1594 || (TREE_ADDRESSABLE (exp) == 0
1595 && (TREE_CODE (exp) == PARM_DECL
1596 || (VAR_P (exp) && !TREE_STATIC (exp)))))
1597 return exp;
1598
1599 return save_expr (exp);
1600 }
1601
1602 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1603 times to get the address of either a higher stack frame, or a return
1604 address located within it (depending on FNDECL_CODE). */
1605
1606 static rtx
1607 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
1608 {
1609 int i;
1610 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
1611 if (tem == NULL_RTX)
1612 {
1613 /* For a zero count with __builtin_return_address, we don't care what
1614 frame address we return, because target-specific definitions will
1615 override us. Therefore frame pointer elimination is OK, and using
1616 the soft frame pointer is OK.
1617
1618 For a nonzero count, or a zero count with __builtin_frame_address,
1619 we require a stable offset from the current frame pointer to the
1620 previous one, so we must use the hard frame pointer, and
1621 we must disable frame pointer elimination. */
1622 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1623 tem = frame_pointer_rtx;
1624 else
1625 {
1626 tem = hard_frame_pointer_rtx;
1627
1628 /* Tell reload not to eliminate the frame pointer. */
1629 crtl->accesses_prior_frames = 1;
1630 }
1631 }
1632
1633 if (count > 0)
1634 SETUP_FRAME_ADDRESSES ();
1635
1636 /* On the SPARC, the return address is not in the frame, it is in a
1637 register. There is no way to access it off of the current frame
1638 pointer, but it can be accessed off the previous frame pointer by
1639 reading the value from the register window save area. */
1640 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1641 count--;
1642
1643 /* Scan back COUNT frames to the specified frame. */
1644 for (i = 0; i < count; i++)
1645 {
1646 /* Assume the dynamic chain pointer is in the word that the
1647 frame address points to, unless otherwise specified. */
1648 tem = DYNAMIC_CHAIN_ADDRESS (tem);
1649 tem = memory_address (Pmode, tem);
1650 tem = gen_frame_mem (Pmode, tem);
1651 tem = copy_to_reg (tem);
1652 }
1653
1654 /* For __builtin_frame_address, return what we've got. But, on
1655 the SPARC for example, we may have to add a bias. */
1656 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
1657 return FRAME_ADDR_RTX (tem);
1658
1659 /* For __builtin_return_address, get the return address from that frame. */
1660 #ifdef RETURN_ADDR_RTX
1661 tem = RETURN_ADDR_RTX (count, tem);
1662 #else
1663 tem = memory_address (Pmode,
1664 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1665 tem = gen_frame_mem (Pmode, tem);
1666 #endif
1667 return tem;
1668 }
1669
1670 /* Alias set used for setjmp buffer. */
1671 static alias_set_type setjmp_alias_set = -1;
1672
1673 /* Construct the leading half of a __builtin_setjmp call. Control will
1674 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1675 exception handling code. */
1676
1677 void
1678 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1679 {
1680 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1681 rtx stack_save;
1682 rtx mem;
1683
1684 if (setjmp_alias_set == -1)
1685 setjmp_alias_set = new_alias_set ();
1686
1687 buf_addr = convert_memory_address (Pmode, buf_addr);
1688
1689 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1690
1691 /* We store the frame pointer and the address of receiver_label in
1692 the buffer and use the rest of it for the stack save area, which
1693 is machine-dependent. */
1694
1695 mem = gen_rtx_MEM (Pmode, buf_addr);
1696 set_mem_alias_set (mem, setjmp_alias_set);
1697 emit_move_insn (mem, hard_frame_pointer_rtx);
1698
1699 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1700 GET_MODE_SIZE (Pmode))),
1701 set_mem_alias_set (mem, setjmp_alias_set);
1702
1703 emit_move_insn (validize_mem (mem),
1704 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1705
1706 stack_save = gen_rtx_MEM (sa_mode,
1707 plus_constant (Pmode, buf_addr,
1708 2 * GET_MODE_SIZE (Pmode)));
1709 set_mem_alias_set (stack_save, setjmp_alias_set);
1710 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1711
1712 /* If there is further processing to do, do it. */
1713 if (targetm.have_builtin_setjmp_setup ())
1714 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1715
1716 /* We have a nonlocal label. */
1717 cfun->has_nonlocal_label = 1;
1718 }
1719
1720 /* Construct the trailing part of a __builtin_setjmp call. This is
1721 also called directly by the SJLJ exception handling code.
1722 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1723
1724 void
1725 expand_builtin_setjmp_receiver (rtx receiver_label)
1726 {
1727 rtx chain;
1728
1729 /* Mark the FP as used when we get here, so we have to make sure it's
1730 marked as used by this function. */
1731 emit_use (hard_frame_pointer_rtx);
1732
1733 /* Mark the static chain as clobbered here so life information
1734 doesn't get messed up for it. */
1735 chain = rtx_for_static_chain (current_function_decl, true);
1736 if (chain && REG_P (chain))
1737 emit_clobber (chain);
1738
1739 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1740 {
1741 /* If the argument pointer can be eliminated in favor of the
1742 frame pointer, we don't need to restore it. We assume here
1743 that if such an elimination is present, it can always be used.
1744 This is the case on all known machines; if we don't make this
1745 assumption, we do unnecessary saving on many machines. */
1746 size_t i;
1747 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1748
1749 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1750 if (elim_regs[i].from == ARG_POINTER_REGNUM
1751 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1752 break;
1753
1754 if (i == ARRAY_SIZE (elim_regs))
1755 {
1756 /* Now restore our arg pointer from the address at which it
1757 was saved in our stack frame. */
1758 emit_move_insn (crtl->args.internal_arg_pointer,
1759 copy_to_reg (get_arg_pointer_save_area ()));
1760 }
1761 }
1762
1763 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1764 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1765 else if (targetm.have_nonlocal_goto_receiver ())
1766 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1767 else
1768 { /* Nothing */ }
1769
1770 /* We must not allow the code we just generated to be reordered by
1771 scheduling. Specifically, the update of the frame pointer must
1772 happen immediately, not later. */
1773 emit_insn (gen_blockage ());
1774 }
1775
1776 /* __builtin_longjmp is passed a pointer to an array of five words (not
1777 all will be used on all machines). It operates similarly to the C
1778 library function of the same name, but is more efficient. Much of
1779 the code below is copied from the handling of non-local gotos. */
1780
1781 static void
1782 expand_builtin_longjmp (rtx buf_addr, rtx value)
1783 {
1784 rtx fp, lab, stack;
1785 rtx_insn *insn, *last;
1786 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1787
1788 /* DRAP is needed for stack realign if longjmp is expanded to current
1789 function */
1790 if (SUPPORTS_STACK_ALIGNMENT)
1791 crtl->need_drap = true;
1792
1793 if (setjmp_alias_set == -1)
1794 setjmp_alias_set = new_alias_set ();
1795
1796 buf_addr = convert_memory_address (Pmode, buf_addr);
1797
1798 buf_addr = force_reg (Pmode, buf_addr);
1799
1800 /* We require that the user must pass a second argument of 1, because
1801 that is what builtin_setjmp will return. */
1802 gcc_assert (value == const1_rtx);
1803
1804 last = get_last_insn ();
1805 if (targetm.have_builtin_longjmp ())
1806 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1807 else
1808 {
1809 fp = gen_rtx_MEM (Pmode, buf_addr);
1810 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1811 GET_MODE_SIZE (Pmode)));
1812
1813 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1814 2 * GET_MODE_SIZE (Pmode)));
1815 set_mem_alias_set (fp, setjmp_alias_set);
1816 set_mem_alias_set (lab, setjmp_alias_set);
1817 set_mem_alias_set (stack, setjmp_alias_set);
1818
1819 /* Pick up FP, label, and SP from the block and jump. This code is
1820 from expand_goto in stmt.c; see there for detailed comments. */
1821 if (targetm.have_nonlocal_goto ())
1822 /* We have to pass a value to the nonlocal_goto pattern that will
1823 get copied into the static_chain pointer, but it does not matter
1824 what that value is, because builtin_setjmp does not use it. */
1825 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1826 else
1827 {
1828 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1829 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1830
1831 lab = copy_to_reg (lab);
1832
1833 /* Restore the frame pointer and stack pointer. We must use a
1834 temporary since the setjmp buffer may be a local. */
1835 fp = copy_to_reg (fp);
1836 emit_stack_restore (SAVE_NONLOCAL, stack);
1837
1838 /* Ensure the frame pointer move is not optimized. */
1839 emit_insn (gen_blockage ());
1840 emit_clobber (hard_frame_pointer_rtx);
1841 emit_clobber (frame_pointer_rtx);
1842 emit_move_insn (hard_frame_pointer_rtx, fp);
1843
1844 emit_use (hard_frame_pointer_rtx);
1845 emit_use (stack_pointer_rtx);
1846 emit_indirect_jump (lab);
1847 }
1848 }
1849
1850 /* Search backwards and mark the jump insn as a non-local goto.
1851 Note that this precludes the use of __builtin_longjmp to a
1852 __builtin_setjmp target in the same function. However, we've
1853 already cautioned the user that these functions are for
1854 internal exception handling use only. */
1855 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1856 {
1857 gcc_assert (insn != last);
1858
1859 if (JUMP_P (insn))
1860 {
1861 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1862 break;
1863 }
1864 else if (CALL_P (insn))
1865 break;
1866 }
1867 }
1868
1869 static inline bool
1870 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1871 {
1872 return (iter->i < iter->n);
1873 }
1874
1875 /* This function validates the types of a function call argument list
1876 against a specified list of tree_codes. If the last specifier is a 0,
1877 that represents an ellipsis, otherwise the last specifier must be a
1878 VOID_TYPE. */
1879
1880 static bool
1881 validate_arglist (const_tree callexpr, ...)
1882 {
1883 enum tree_code code;
1884 bool res = 0;
1885 va_list ap;
1886 const_call_expr_arg_iterator iter;
1887 const_tree arg;
1888
1889 va_start (ap, callexpr);
1890 init_const_call_expr_arg_iterator (callexpr, &iter);
1891
1892 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1893 tree fn = CALL_EXPR_FN (callexpr);
1894 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1895
1896 for (unsigned argno = 1; ; ++argno)
1897 {
1898 code = (enum tree_code) va_arg (ap, int);
1899
1900 switch (code)
1901 {
1902 case 0:
1903 /* This signifies an ellipses, any further arguments are all ok. */
1904 res = true;
1905 goto end;
1906 case VOID_TYPE:
1907 /* This signifies an endlink, if no arguments remain, return
1908 true, otherwise return false. */
1909 res = !more_const_call_expr_args_p (&iter);
1910 goto end;
1911 case POINTER_TYPE:
1912 /* The actual argument must be nonnull when either the whole
1913 called function has been declared nonnull, or when the formal
1914 argument corresponding to the actual argument has been. */
1915 if (argmap
1916 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1917 {
1918 arg = next_const_call_expr_arg (&iter);
1919 if (!validate_arg (arg, code) || integer_zerop (arg))
1920 goto end;
1921 break;
1922 }
1923 /* FALLTHRU */
1924 default:
1925 /* If no parameters remain or the parameter's code does not
1926 match the specified code, return false. Otherwise continue
1927 checking any remaining arguments. */
1928 arg = next_const_call_expr_arg (&iter);
1929 if (!validate_arg (arg, code))
1930 goto end;
1931 break;
1932 }
1933 }
1934
1935 /* We need gotos here since we can only have one VA_CLOSE in a
1936 function. */
1937 end: ;
1938 va_end (ap);
1939
1940 BITMAP_FREE (argmap);
1941
1942 return res;
1943 }
1944
1945 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1946 and the address of the save area. */
1947
1948 static rtx
1949 expand_builtin_nonlocal_goto (tree exp)
1950 {
1951 tree t_label, t_save_area;
1952 rtx r_label, r_save_area, r_fp, r_sp;
1953 rtx_insn *insn;
1954
1955 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1956 return NULL_RTX;
1957
1958 t_label = CALL_EXPR_ARG (exp, 0);
1959 t_save_area = CALL_EXPR_ARG (exp, 1);
1960
1961 r_label = expand_normal (t_label);
1962 r_label = convert_memory_address (Pmode, r_label);
1963 r_save_area = expand_normal (t_save_area);
1964 r_save_area = convert_memory_address (Pmode, r_save_area);
1965 /* Copy the address of the save location to a register just in case it was
1966 based on the frame pointer. */
1967 r_save_area = copy_to_reg (r_save_area);
1968 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1969 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1970 plus_constant (Pmode, r_save_area,
1971 GET_MODE_SIZE (Pmode)));
1972
1973 crtl->has_nonlocal_goto = 1;
1974
1975 /* ??? We no longer need to pass the static chain value, afaik. */
1976 if (targetm.have_nonlocal_goto ())
1977 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1978 else
1979 {
1980 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1981 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1982
1983 r_label = copy_to_reg (r_label);
1984
1985 /* Restore the frame pointer and stack pointer. We must use a
1986 temporary since the setjmp buffer may be a local. */
1987 r_fp = copy_to_reg (r_fp);
1988 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1989
1990 /* Ensure the frame pointer move is not optimized. */
1991 emit_insn (gen_blockage ());
1992 emit_clobber (hard_frame_pointer_rtx);
1993 emit_clobber (frame_pointer_rtx);
1994 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1995
1996 /* USE of hard_frame_pointer_rtx added for consistency;
1997 not clear if really needed. */
1998 emit_use (hard_frame_pointer_rtx);
1999 emit_use (stack_pointer_rtx);
2000
2001 /* If the architecture is using a GP register, we must
2002 conservatively assume that the target function makes use of it.
2003 The prologue of functions with nonlocal gotos must therefore
2004 initialize the GP register to the appropriate value, and we
2005 must then make sure that this value is live at the point
2006 of the jump. (Note that this doesn't necessarily apply
2007 to targets with a nonlocal_goto pattern; they are free
2008 to implement it in their own way. Note also that this is
2009 a no-op if the GP register is a global invariant.) */
2010 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
2011 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
2012 emit_use (pic_offset_table_rtx);
2013
2014 emit_indirect_jump (r_label);
2015 }
2016
2017 /* Search backwards to the jump insn and mark it as a
2018 non-local goto. */
2019 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
2020 {
2021 if (JUMP_P (insn))
2022 {
2023 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
2024 break;
2025 }
2026 else if (CALL_P (insn))
2027 break;
2028 }
2029
2030 return const0_rtx;
2031 }
2032
2033 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
2034 (not all will be used on all machines) that was passed to __builtin_setjmp.
2035 It updates the stack pointer in that block to the current value. This is
2036 also called directly by the SJLJ exception handling code. */
2037
2038 void
2039 expand_builtin_update_setjmp_buf (rtx buf_addr)
2040 {
2041 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
2042 buf_addr = convert_memory_address (Pmode, buf_addr);
2043 rtx stack_save
2044 = gen_rtx_MEM (sa_mode,
2045 memory_address
2046 (sa_mode,
2047 plus_constant (Pmode, buf_addr,
2048 2 * GET_MODE_SIZE (Pmode))));
2049
2050 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2051 }
2052
2053 /* Expand a call to __builtin_prefetch. For a target that does not support
2054 data prefetch, evaluate the memory address argument in case it has side
2055 effects. */
2056
2057 static void
2058 expand_builtin_prefetch (tree exp)
2059 {
2060 tree arg0, arg1, arg2;
2061 int nargs;
2062 rtx op0, op1, op2;
2063
2064 if (!validate_arglist (exp, POINTER_TYPE, 0))
2065 return;
2066
2067 arg0 = CALL_EXPR_ARG (exp, 0);
2068
2069 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
2070 zero (read) and argument 2 (locality) defaults to 3 (high degree of
2071 locality). */
2072 nargs = call_expr_nargs (exp);
2073 if (nargs > 1)
2074 arg1 = CALL_EXPR_ARG (exp, 1);
2075 else
2076 arg1 = integer_zero_node;
2077 if (nargs > 2)
2078 arg2 = CALL_EXPR_ARG (exp, 2);
2079 else
2080 arg2 = integer_three_node;
2081
2082 /* Argument 0 is an address. */
2083 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
2084
2085 /* Argument 1 (read/write flag) must be a compile-time constant int. */
2086 if (TREE_CODE (arg1) != INTEGER_CST)
2087 {
2088 error ("second argument to %<__builtin_prefetch%> must be a constant");
2089 arg1 = integer_zero_node;
2090 }
2091 op1 = expand_normal (arg1);
2092 /* Argument 1 must be either zero or one. */
2093 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
2094 {
2095 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
2096 " using zero");
2097 op1 = const0_rtx;
2098 }
2099
2100 /* Argument 2 (locality) must be a compile-time constant int. */
2101 if (TREE_CODE (arg2) != INTEGER_CST)
2102 {
2103 error ("third argument to %<__builtin_prefetch%> must be a constant");
2104 arg2 = integer_zero_node;
2105 }
2106 op2 = expand_normal (arg2);
2107 /* Argument 2 must be 0, 1, 2, or 3. */
2108 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
2109 {
2110 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
2111 op2 = const0_rtx;
2112 }
2113
2114 if (targetm.have_prefetch ())
2115 {
2116 class expand_operand ops[3];
2117
2118 create_address_operand (&ops[0], op0);
2119 create_integer_operand (&ops[1], INTVAL (op1));
2120 create_integer_operand (&ops[2], INTVAL (op2));
2121 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
2122 return;
2123 }
2124
2125 /* Don't do anything with direct references to volatile memory, but
2126 generate code to handle other side effects. */
2127 if (!MEM_P (op0) && side_effects_p (op0))
2128 emit_insn (op0);
2129 }
2130
2131 /* Get a MEM rtx for expression EXP which is the address of an operand
2132 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
2133 the maximum length of the block of memory that might be accessed or
2134 NULL if unknown. */
2135
2136 static rtx
2137 get_memory_rtx (tree exp, tree len)
2138 {
2139 tree orig_exp = exp;
2140 rtx addr, mem;
2141
2142 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
2143 from its expression, for expr->a.b only <variable>.a.b is recorded. */
2144 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
2145 exp = TREE_OPERAND (exp, 0);
2146
2147 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2148 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2149
2150 /* Get an expression we can use to find the attributes to assign to MEM.
2151 First remove any nops. */
2152 while (CONVERT_EXPR_P (exp)
2153 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2154 exp = TREE_OPERAND (exp, 0);
2155
2156 /* Build a MEM_REF representing the whole accessed area as a byte blob,
2157 (as builtin stringops may alias with anything). */
2158 exp = fold_build2 (MEM_REF,
2159 build_array_type (char_type_node,
2160 build_range_type (sizetype,
2161 size_one_node, len)),
2162 exp, build_int_cst (ptr_type_node, 0));
2163
2164 /* If the MEM_REF has no acceptable address, try to get the base object
2165 from the original address we got, and build an all-aliasing
2166 unknown-sized access to that one. */
2167 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2168 set_mem_attributes (mem, exp, 0);
2169 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2170 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
2171 0))))
2172 {
2173 exp = build_fold_addr_expr (exp);
2174 exp = fold_build2 (MEM_REF,
2175 build_array_type (char_type_node,
2176 build_range_type (sizetype,
2177 size_zero_node,
2178 NULL)),
2179 exp, build_int_cst (ptr_type_node, 0));
2180 set_mem_attributes (mem, exp, 0);
2181 }
2182 set_mem_alias_set (mem, 0);
2183 return mem;
2184 }
2185 \f
2186 /* Built-in functions to perform an untyped call and return. */
2187
2188 #define apply_args_mode \
2189 (this_target_builtins->x_apply_args_mode)
2190 #define apply_result_mode \
2191 (this_target_builtins->x_apply_result_mode)
2192
2193 /* Return the size required for the block returned by __builtin_apply_args,
2194 and initialize apply_args_mode. */
2195
2196 static int
2197 apply_args_size (void)
2198 {
2199 static int size = -1;
2200 int align;
2201 unsigned int regno;
2202
2203 /* The values computed by this function never change. */
2204 if (size < 0)
2205 {
2206 /* The first value is the incoming arg-pointer. */
2207 size = GET_MODE_SIZE (Pmode);
2208
2209 /* The second value is the structure value address unless this is
2210 passed as an "invisible" first argument. */
2211 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2212 size += GET_MODE_SIZE (Pmode);
2213
2214 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2215 if (FUNCTION_ARG_REGNO_P (regno))
2216 {
2217 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
2218
2219 gcc_assert (mode != VOIDmode);
2220
2221 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2222 if (size % align != 0)
2223 size = CEIL (size, align) * align;
2224 size += GET_MODE_SIZE (mode);
2225 apply_args_mode[regno] = mode;
2226 }
2227 else
2228 {
2229 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2230 }
2231 }
2232 return size;
2233 }
2234
2235 /* Return the size required for the block returned by __builtin_apply,
2236 and initialize apply_result_mode. */
2237
2238 static int
2239 apply_result_size (void)
2240 {
2241 static int size = -1;
2242 int align, regno;
2243
2244 /* The values computed by this function never change. */
2245 if (size < 0)
2246 {
2247 size = 0;
2248
2249 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2250 if (targetm.calls.function_value_regno_p (regno))
2251 {
2252 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
2253
2254 gcc_assert (mode != VOIDmode);
2255
2256 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2257 if (size % align != 0)
2258 size = CEIL (size, align) * align;
2259 size += GET_MODE_SIZE (mode);
2260 apply_result_mode[regno] = mode;
2261 }
2262 else
2263 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2264
2265 /* Allow targets that use untyped_call and untyped_return to override
2266 the size so that machine-specific information can be stored here. */
2267 #ifdef APPLY_RESULT_SIZE
2268 size = APPLY_RESULT_SIZE;
2269 #endif
2270 }
2271 return size;
2272 }
2273
2274 /* Create a vector describing the result block RESULT. If SAVEP is true,
2275 the result block is used to save the values; otherwise it is used to
2276 restore the values. */
2277
2278 static rtx
2279 result_vector (int savep, rtx result)
2280 {
2281 int regno, size, align, nelts;
2282 fixed_size_mode mode;
2283 rtx reg, mem;
2284 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
2285
2286 size = nelts = 0;
2287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2288 if ((mode = apply_result_mode[regno]) != VOIDmode)
2289 {
2290 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2291 if (size % align != 0)
2292 size = CEIL (size, align) * align;
2293 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
2294 mem = adjust_address (result, mode, size);
2295 savevec[nelts++] = (savep
2296 ? gen_rtx_SET (mem, reg)
2297 : gen_rtx_SET (reg, mem));
2298 size += GET_MODE_SIZE (mode);
2299 }
2300 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
2301 }
2302
2303 /* Save the state required to perform an untyped call with the same
2304 arguments as were passed to the current function. */
2305
2306 static rtx
2307 expand_builtin_apply_args_1 (void)
2308 {
2309 rtx registers, tem;
2310 int size, align, regno;
2311 fixed_size_mode mode;
2312 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
2313
2314 /* Create a block where the arg-pointer, structure value address,
2315 and argument registers can be saved. */
2316 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
2317
2318 /* Walk past the arg-pointer and structure value address. */
2319 size = GET_MODE_SIZE (Pmode);
2320 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2321 size += GET_MODE_SIZE (Pmode);
2322
2323 /* Save each register used in calling a function to the block. */
2324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2325 if ((mode = apply_args_mode[regno]) != VOIDmode)
2326 {
2327 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2328 if (size % align != 0)
2329 size = CEIL (size, align) * align;
2330
2331 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2332
2333 emit_move_insn (adjust_address (registers, mode, size), tem);
2334 size += GET_MODE_SIZE (mode);
2335 }
2336
2337 /* Save the arg pointer to the block. */
2338 tem = copy_to_reg (crtl->args.internal_arg_pointer);
2339 /* We need the pointer as the caller actually passed them to us, not
2340 as we might have pretended they were passed. Make sure it's a valid
2341 operand, as emit_move_insn isn't expected to handle a PLUS. */
2342 if (STACK_GROWS_DOWNWARD)
2343 tem
2344 = force_operand (plus_constant (Pmode, tem,
2345 crtl->args.pretend_args_size),
2346 NULL_RTX);
2347 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
2348
2349 size = GET_MODE_SIZE (Pmode);
2350
2351 /* Save the structure value address unless this is passed as an
2352 "invisible" first argument. */
2353 if (struct_incoming_value)
2354 emit_move_insn (adjust_address (registers, Pmode, size),
2355 copy_to_reg (struct_incoming_value));
2356
2357 /* Return the address of the block. */
2358 return copy_addr_to_reg (XEXP (registers, 0));
2359 }
2360
2361 /* __builtin_apply_args returns block of memory allocated on
2362 the stack into which is stored the arg pointer, structure
2363 value address, static chain, and all the registers that might
2364 possibly be used in performing a function call. The code is
2365 moved to the start of the function so the incoming values are
2366 saved. */
2367
2368 static rtx
2369 expand_builtin_apply_args (void)
2370 {
2371 /* Don't do __builtin_apply_args more than once in a function.
2372 Save the result of the first call and reuse it. */
2373 if (apply_args_value != 0)
2374 return apply_args_value;
2375 {
2376 /* When this function is called, it means that registers must be
2377 saved on entry to this function. So we migrate the
2378 call to the first insn of this function. */
2379 rtx temp;
2380
2381 start_sequence ();
2382 temp = expand_builtin_apply_args_1 ();
2383 rtx_insn *seq = get_insns ();
2384 end_sequence ();
2385
2386 apply_args_value = temp;
2387
2388 /* Put the insns after the NOTE that starts the function.
2389 If this is inside a start_sequence, make the outer-level insn
2390 chain current, so the code is placed at the start of the
2391 function. If internal_arg_pointer is a non-virtual pseudo,
2392 it needs to be placed after the function that initializes
2393 that pseudo. */
2394 push_topmost_sequence ();
2395 if (REG_P (crtl->args.internal_arg_pointer)
2396 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
2397 emit_insn_before (seq, parm_birth_insn);
2398 else
2399 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
2400 pop_topmost_sequence ();
2401 return temp;
2402 }
2403 }
2404
2405 /* Perform an untyped call and save the state required to perform an
2406 untyped return of whatever value was returned by the given function. */
2407
2408 static rtx
2409 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
2410 {
2411 int size, align, regno;
2412 fixed_size_mode mode;
2413 rtx incoming_args, result, reg, dest, src;
2414 rtx_call_insn *call_insn;
2415 rtx old_stack_level = 0;
2416 rtx call_fusage = 0;
2417 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
2418
2419 arguments = convert_memory_address (Pmode, arguments);
2420
2421 /* Create a block where the return registers can be saved. */
2422 result = assign_stack_local (BLKmode, apply_result_size (), -1);
2423
2424 /* Fetch the arg pointer from the ARGUMENTS block. */
2425 incoming_args = gen_reg_rtx (Pmode);
2426 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
2427 if (!STACK_GROWS_DOWNWARD)
2428 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
2429 incoming_args, 0, OPTAB_LIB_WIDEN);
2430
2431 /* Push a new argument block and copy the arguments. Do not allow
2432 the (potential) memcpy call below to interfere with our stack
2433 manipulations. */
2434 do_pending_stack_adjust ();
2435 NO_DEFER_POP;
2436
2437 /* Save the stack with nonlocal if available. */
2438 if (targetm.have_save_stack_nonlocal ())
2439 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
2440 else
2441 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2442
2443 /* Allocate a block of memory onto the stack and copy the memory
2444 arguments to the outgoing arguments address. We can pass TRUE
2445 as the 4th argument because we just saved the stack pointer
2446 and will restore it right after the call. */
2447 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2448
2449 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
2450 may have already set current_function_calls_alloca to true.
2451 current_function_calls_alloca won't be set if argsize is zero,
2452 so we have to guarantee need_drap is true here. */
2453 if (SUPPORTS_STACK_ALIGNMENT)
2454 crtl->need_drap = true;
2455
2456 dest = virtual_outgoing_args_rtx;
2457 if (!STACK_GROWS_DOWNWARD)
2458 {
2459 if (CONST_INT_P (argsize))
2460 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
2461 else
2462 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
2463 }
2464 dest = gen_rtx_MEM (BLKmode, dest);
2465 set_mem_align (dest, PARM_BOUNDARY);
2466 src = gen_rtx_MEM (BLKmode, incoming_args);
2467 set_mem_align (src, PARM_BOUNDARY);
2468 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
2469
2470 /* Refer to the argument block. */
2471 apply_args_size ();
2472 arguments = gen_rtx_MEM (BLKmode, arguments);
2473 set_mem_align (arguments, PARM_BOUNDARY);
2474
2475 /* Walk past the arg-pointer and structure value address. */
2476 size = GET_MODE_SIZE (Pmode);
2477 if (struct_value)
2478 size += GET_MODE_SIZE (Pmode);
2479
2480 /* Restore each of the registers previously saved. Make USE insns
2481 for each of these registers for use in making the call. */
2482 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2483 if ((mode = apply_args_mode[regno]) != VOIDmode)
2484 {
2485 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2486 if (size % align != 0)
2487 size = CEIL (size, align) * align;
2488 reg = gen_rtx_REG (mode, regno);
2489 emit_move_insn (reg, adjust_address (arguments, mode, size));
2490 use_reg (&call_fusage, reg);
2491 size += GET_MODE_SIZE (mode);
2492 }
2493
2494 /* Restore the structure value address unless this is passed as an
2495 "invisible" first argument. */
2496 size = GET_MODE_SIZE (Pmode);
2497 if (struct_value)
2498 {
2499 rtx value = gen_reg_rtx (Pmode);
2500 emit_move_insn (value, adjust_address (arguments, Pmode, size));
2501 emit_move_insn (struct_value, value);
2502 if (REG_P (struct_value))
2503 use_reg (&call_fusage, struct_value);
2504 }
2505
2506 /* All arguments and registers used for the call are set up by now! */
2507 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
2508
2509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
2510 and we don't want to load it into a register as an optimization,
2511 because prepare_call_address already did it if it should be done. */
2512 if (GET_CODE (function) != SYMBOL_REF)
2513 function = memory_address (FUNCTION_MODE, function);
2514
2515 /* Generate the actual call instruction and save the return value. */
2516 if (targetm.have_untyped_call ())
2517 {
2518 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
2519 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
2520 result_vector (1, result));
2521 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
2522 if (CALL_P (insn))
2523 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
2524 emit_insn (seq);
2525 }
2526 else if (targetm.have_call_value ())
2527 {
2528 rtx valreg = 0;
2529
2530 /* Locate the unique return register. It is not possible to
2531 express a call that sets more than one return register using
2532 call_value; use untyped_call for that. In fact, untyped_call
2533 only needs to save the return registers in the given block. */
2534 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2535 if ((mode = apply_result_mode[regno]) != VOIDmode)
2536 {
2537 gcc_assert (!valreg); /* have_untyped_call required. */
2538
2539 valreg = gen_rtx_REG (mode, regno);
2540 }
2541
2542 emit_insn (targetm.gen_call_value (valreg,
2543 gen_rtx_MEM (FUNCTION_MODE, function),
2544 const0_rtx, NULL_RTX, const0_rtx));
2545
2546 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
2547 }
2548 else
2549 gcc_unreachable ();
2550
2551 /* Find the CALL insn we just emitted, and attach the register usage
2552 information. */
2553 call_insn = last_call_insn ();
2554 add_function_usage_to (call_insn, call_fusage);
2555
2556 /* Restore the stack. */
2557 if (targetm.have_save_stack_nonlocal ())
2558 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
2559 else
2560 emit_stack_restore (SAVE_BLOCK, old_stack_level);
2561 fixup_args_size_notes (call_insn, get_last_insn (), 0);
2562
2563 OK_DEFER_POP;
2564
2565 /* Return the address of the result block. */
2566 result = copy_addr_to_reg (XEXP (result, 0));
2567 return convert_memory_address (ptr_mode, result);
2568 }
2569
2570 /* Perform an untyped return. */
2571
2572 static void
2573 expand_builtin_return (rtx result)
2574 {
2575 int size, align, regno;
2576 fixed_size_mode mode;
2577 rtx reg;
2578 rtx_insn *call_fusage = 0;
2579
2580 result = convert_memory_address (Pmode, result);
2581
2582 apply_result_size ();
2583 result = gen_rtx_MEM (BLKmode, result);
2584
2585 if (targetm.have_untyped_return ())
2586 {
2587 rtx vector = result_vector (0, result);
2588 emit_jump_insn (targetm.gen_untyped_return (result, vector));
2589 emit_barrier ();
2590 return;
2591 }
2592
2593 /* Restore the return value and note that each value is used. */
2594 size = 0;
2595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2596 if ((mode = apply_result_mode[regno]) != VOIDmode)
2597 {
2598 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2599 if (size % align != 0)
2600 size = CEIL (size, align) * align;
2601 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2602 emit_move_insn (reg, adjust_address (result, mode, size));
2603
2604 push_to_sequence (call_fusage);
2605 emit_use (reg);
2606 call_fusage = get_insns ();
2607 end_sequence ();
2608 size += GET_MODE_SIZE (mode);
2609 }
2610
2611 /* Put the USE insns before the return. */
2612 emit_insn (call_fusage);
2613
2614 /* Return whatever values was restored by jumping directly to the end
2615 of the function. */
2616 expand_naked_return ();
2617 }
2618
2619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
2620
2621 static enum type_class
2622 type_to_class (tree type)
2623 {
2624 switch (TREE_CODE (type))
2625 {
2626 case VOID_TYPE: return void_type_class;
2627 case INTEGER_TYPE: return integer_type_class;
2628 case ENUMERAL_TYPE: return enumeral_type_class;
2629 case BOOLEAN_TYPE: return boolean_type_class;
2630 case POINTER_TYPE: return pointer_type_class;
2631 case REFERENCE_TYPE: return reference_type_class;
2632 case OFFSET_TYPE: return offset_type_class;
2633 case REAL_TYPE: return real_type_class;
2634 case COMPLEX_TYPE: return complex_type_class;
2635 case FUNCTION_TYPE: return function_type_class;
2636 case METHOD_TYPE: return method_type_class;
2637 case RECORD_TYPE: return record_type_class;
2638 case UNION_TYPE:
2639 case QUAL_UNION_TYPE: return union_type_class;
2640 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
2641 ? string_type_class : array_type_class);
2642 case LANG_TYPE: return lang_type_class;
2643 case OPAQUE_TYPE: return opaque_type_class;
2644 default: return no_type_class;
2645 }
2646 }
2647
2648 /* Expand a call EXP to __builtin_classify_type. */
2649
2650 static rtx
2651 expand_builtin_classify_type (tree exp)
2652 {
2653 if (call_expr_nargs (exp))
2654 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
2655 return GEN_INT (no_type_class);
2656 }
2657
2658 /* This helper macro, meant to be used in mathfn_built_in below, determines
2659 which among a set of builtin math functions is appropriate for a given type
2660 mode. The `F' (float) and `L' (long double) are automatically generated
2661 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2662 types, there are additional types that are considered with 'F32', 'F64',
2663 'F128', etc. suffixes. */
2664 #define CASE_MATHFN(MATHFN) \
2665 CASE_CFN_##MATHFN: \
2666 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2667 fcodel = BUILT_IN_##MATHFN##L ; break;
2668 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2669 types. */
2670 #define CASE_MATHFN_FLOATN(MATHFN) \
2671 CASE_CFN_##MATHFN: \
2672 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2673 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2674 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2675 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2676 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2677 break;
2678 /* Similar to above, but appends _R after any F/L suffix. */
2679 #define CASE_MATHFN_REENT(MATHFN) \
2680 case CFN_BUILT_IN_##MATHFN##_R: \
2681 case CFN_BUILT_IN_##MATHFN##F_R: \
2682 case CFN_BUILT_IN_##MATHFN##L_R: \
2683 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2684 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2685
2686 /* Return a function equivalent to FN but operating on floating-point
2687 values of type TYPE, or END_BUILTINS if no such function exists.
2688 This is purely an operation on function codes; it does not guarantee
2689 that the target actually has an implementation of the function. */
2690
2691 static built_in_function
2692 mathfn_built_in_2 (tree type, combined_fn fn)
2693 {
2694 tree mtype;
2695 built_in_function fcode, fcodef, fcodel;
2696 built_in_function fcodef16 = END_BUILTINS;
2697 built_in_function fcodef32 = END_BUILTINS;
2698 built_in_function fcodef64 = END_BUILTINS;
2699 built_in_function fcodef128 = END_BUILTINS;
2700 built_in_function fcodef32x = END_BUILTINS;
2701 built_in_function fcodef64x = END_BUILTINS;
2702 built_in_function fcodef128x = END_BUILTINS;
2703
2704 switch (fn)
2705 {
2706 #define SEQ_OF_CASE_MATHFN \
2707 CASE_MATHFN (ACOS) \
2708 CASE_MATHFN (ACOSH) \
2709 CASE_MATHFN (ASIN) \
2710 CASE_MATHFN (ASINH) \
2711 CASE_MATHFN (ATAN) \
2712 CASE_MATHFN (ATAN2) \
2713 CASE_MATHFN (ATANH) \
2714 CASE_MATHFN (CBRT) \
2715 CASE_MATHFN_FLOATN (CEIL) \
2716 CASE_MATHFN (CEXPI) \
2717 CASE_MATHFN_FLOATN (COPYSIGN) \
2718 CASE_MATHFN (COS) \
2719 CASE_MATHFN (COSH) \
2720 CASE_MATHFN (DREM) \
2721 CASE_MATHFN (ERF) \
2722 CASE_MATHFN (ERFC) \
2723 CASE_MATHFN (EXP) \
2724 CASE_MATHFN (EXP10) \
2725 CASE_MATHFN (EXP2) \
2726 CASE_MATHFN (EXPM1) \
2727 CASE_MATHFN (FABS) \
2728 CASE_MATHFN (FDIM) \
2729 CASE_MATHFN_FLOATN (FLOOR) \
2730 CASE_MATHFN_FLOATN (FMA) \
2731 CASE_MATHFN_FLOATN (FMAX) \
2732 CASE_MATHFN_FLOATN (FMIN) \
2733 CASE_MATHFN (FMOD) \
2734 CASE_MATHFN (FREXP) \
2735 CASE_MATHFN (GAMMA) \
2736 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2737 CASE_MATHFN (HUGE_VAL) \
2738 CASE_MATHFN (HYPOT) \
2739 CASE_MATHFN (ILOGB) \
2740 CASE_MATHFN (ICEIL) \
2741 CASE_MATHFN (IFLOOR) \
2742 CASE_MATHFN (INF) \
2743 CASE_MATHFN (IRINT) \
2744 CASE_MATHFN (IROUND) \
2745 CASE_MATHFN (ISINF) \
2746 CASE_MATHFN (J0) \
2747 CASE_MATHFN (J1) \
2748 CASE_MATHFN (JN) \
2749 CASE_MATHFN (LCEIL) \
2750 CASE_MATHFN (LDEXP) \
2751 CASE_MATHFN (LFLOOR) \
2752 CASE_MATHFN (LGAMMA) \
2753 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2754 CASE_MATHFN (LLCEIL) \
2755 CASE_MATHFN (LLFLOOR) \
2756 CASE_MATHFN (LLRINT) \
2757 CASE_MATHFN (LLROUND) \
2758 CASE_MATHFN (LOG) \
2759 CASE_MATHFN (LOG10) \
2760 CASE_MATHFN (LOG1P) \
2761 CASE_MATHFN (LOG2) \
2762 CASE_MATHFN (LOGB) \
2763 CASE_MATHFN (LRINT) \
2764 CASE_MATHFN (LROUND) \
2765 CASE_MATHFN (MODF) \
2766 CASE_MATHFN (NAN) \
2767 CASE_MATHFN (NANS) \
2768 CASE_MATHFN_FLOATN (NEARBYINT) \
2769 CASE_MATHFN (NEXTAFTER) \
2770 CASE_MATHFN (NEXTTOWARD) \
2771 CASE_MATHFN (POW) \
2772 CASE_MATHFN (POWI) \
2773 CASE_MATHFN (POW10) \
2774 CASE_MATHFN (REMAINDER) \
2775 CASE_MATHFN (REMQUO) \
2776 CASE_MATHFN_FLOATN (RINT) \
2777 CASE_MATHFN_FLOATN (ROUND) \
2778 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2779 CASE_MATHFN (SCALB) \
2780 CASE_MATHFN (SCALBLN) \
2781 CASE_MATHFN (SCALBN) \
2782 CASE_MATHFN (SIGNBIT) \
2783 CASE_MATHFN (SIGNIFICAND) \
2784 CASE_MATHFN (SIN) \
2785 CASE_MATHFN (SINCOS) \
2786 CASE_MATHFN (SINH) \
2787 CASE_MATHFN_FLOATN (SQRT) \
2788 CASE_MATHFN (TAN) \
2789 CASE_MATHFN (TANH) \
2790 CASE_MATHFN (TGAMMA) \
2791 CASE_MATHFN_FLOATN (TRUNC) \
2792 CASE_MATHFN (Y0) \
2793 CASE_MATHFN (Y1) \
2794 CASE_MATHFN (YN)
2795
2796 SEQ_OF_CASE_MATHFN
2797
2798 default:
2799 return END_BUILTINS;
2800 }
2801
2802 mtype = TYPE_MAIN_VARIANT (type);
2803 if (mtype == double_type_node)
2804 return fcode;
2805 else if (mtype == float_type_node)
2806 return fcodef;
2807 else if (mtype == long_double_type_node)
2808 return fcodel;
2809 else if (mtype == float16_type_node)
2810 return fcodef16;
2811 else if (mtype == float32_type_node)
2812 return fcodef32;
2813 else if (mtype == float64_type_node)
2814 return fcodef64;
2815 else if (mtype == float128_type_node)
2816 return fcodef128;
2817 else if (mtype == float32x_type_node)
2818 return fcodef32x;
2819 else if (mtype == float64x_type_node)
2820 return fcodef64x;
2821 else if (mtype == float128x_type_node)
2822 return fcodef128x;
2823 else
2824 return END_BUILTINS;
2825 }
2826
2827 #undef CASE_MATHFN
2828 #undef CASE_MATHFN_FLOATN
2829 #undef CASE_MATHFN_REENT
2830
2831 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2832 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2833 otherwise use the explicit declaration. If we can't do the conversion,
2834 return null. */
2835
2836 static tree
2837 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2838 {
2839 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2840 if (fcode2 == END_BUILTINS)
2841 return NULL_TREE;
2842
2843 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2844 return NULL_TREE;
2845
2846 return builtin_decl_explicit (fcode2);
2847 }
2848
2849 /* Like mathfn_built_in_1, but always use the implicit array. */
2850
2851 tree
2852 mathfn_built_in (tree type, combined_fn fn)
2853 {
2854 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2855 }
2856
2857 /* Like mathfn_built_in_1, but take a built_in_function and
2858 always use the implicit array. */
2859
2860 tree
2861 mathfn_built_in (tree type, enum built_in_function fn)
2862 {
2863 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2864 }
2865
2866 /* Return the type associated with a built in function, i.e., the one
2867 to be passed to mathfn_built_in to get the type-specific
2868 function. */
2869
2870 tree
2871 mathfn_built_in_type (combined_fn fn)
2872 {
2873 #define CASE_MATHFN(MATHFN) \
2874 case CFN_BUILT_IN_##MATHFN: \
2875 return double_type_node; \
2876 case CFN_BUILT_IN_##MATHFN##F: \
2877 return float_type_node; \
2878 case CFN_BUILT_IN_##MATHFN##L: \
2879 return long_double_type_node;
2880
2881 #define CASE_MATHFN_FLOATN(MATHFN) \
2882 CASE_MATHFN(MATHFN) \
2883 case CFN_BUILT_IN_##MATHFN##F16: \
2884 return float16_type_node; \
2885 case CFN_BUILT_IN_##MATHFN##F32: \
2886 return float32_type_node; \
2887 case CFN_BUILT_IN_##MATHFN##F64: \
2888 return float64_type_node; \
2889 case CFN_BUILT_IN_##MATHFN##F128: \
2890 return float128_type_node; \
2891 case CFN_BUILT_IN_##MATHFN##F32X: \
2892 return float32x_type_node; \
2893 case CFN_BUILT_IN_##MATHFN##F64X: \
2894 return float64x_type_node; \
2895 case CFN_BUILT_IN_##MATHFN##F128X: \
2896 return float128x_type_node;
2897
2898 /* Similar to above, but appends _R after any F/L suffix. */
2899 #define CASE_MATHFN_REENT(MATHFN) \
2900 case CFN_BUILT_IN_##MATHFN##_R: \
2901 return double_type_node; \
2902 case CFN_BUILT_IN_##MATHFN##F_R: \
2903 return float_type_node; \
2904 case CFN_BUILT_IN_##MATHFN##L_R: \
2905 return long_double_type_node;
2906
2907 switch (fn)
2908 {
2909 SEQ_OF_CASE_MATHFN
2910
2911 default:
2912 return NULL_TREE;
2913 }
2914
2915 #undef CASE_MATHFN
2916 #undef CASE_MATHFN_FLOATN
2917 #undef CASE_MATHFN_REENT
2918 #undef SEQ_OF_CASE_MATHFN
2919 }
2920
2921 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2922 return its code, otherwise return IFN_LAST. Note that this function
2923 only tests whether the function is defined in internals.def, not whether
2924 it is actually available on the target. */
2925
2926 internal_fn
2927 associated_internal_fn (tree fndecl)
2928 {
2929 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2930 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2931 switch (DECL_FUNCTION_CODE (fndecl))
2932 {
2933 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2934 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2935 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2936 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2937 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2938 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2939 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2940 #include "internal-fn.def"
2941
2942 CASE_FLT_FN (BUILT_IN_POW10):
2943 return IFN_EXP10;
2944
2945 CASE_FLT_FN (BUILT_IN_DREM):
2946 return IFN_REMAINDER;
2947
2948 CASE_FLT_FN (BUILT_IN_SCALBN):
2949 CASE_FLT_FN (BUILT_IN_SCALBLN):
2950 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2951 return IFN_LDEXP;
2952 return IFN_LAST;
2953
2954 default:
2955 return IFN_LAST;
2956 }
2957 }
2958
2959 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2960 on the current target by a call to an internal function, return the
2961 code of that internal function, otherwise return IFN_LAST. The caller
2962 is responsible for ensuring that any side-effects of the built-in
2963 call are dealt with correctly. E.g. if CALL sets errno, the caller
2964 must decide that the errno result isn't needed or make it available
2965 in some other way. */
2966
2967 internal_fn
2968 replacement_internal_fn (gcall *call)
2969 {
2970 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2971 {
2972 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2973 if (ifn != IFN_LAST)
2974 {
2975 tree_pair types = direct_internal_fn_types (ifn, call);
2976 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2977 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2978 return ifn;
2979 }
2980 }
2981 return IFN_LAST;
2982 }
2983
2984 /* Expand a call to the builtin trinary math functions (fma).
2985 Return NULL_RTX if a normal call should be emitted rather than expanding the
2986 function in-line. EXP is the expression that is a call to the builtin
2987 function; if convenient, the result should be placed in TARGET.
2988 SUBTARGET may be used as the target for computing one of EXP's
2989 operands. */
2990
2991 static rtx
2992 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2993 {
2994 optab builtin_optab;
2995 rtx op0, op1, op2, result;
2996 rtx_insn *insns;
2997 tree fndecl = get_callee_fndecl (exp);
2998 tree arg0, arg1, arg2;
2999 machine_mode mode;
3000
3001 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3002 return NULL_RTX;
3003
3004 arg0 = CALL_EXPR_ARG (exp, 0);
3005 arg1 = CALL_EXPR_ARG (exp, 1);
3006 arg2 = CALL_EXPR_ARG (exp, 2);
3007
3008 switch (DECL_FUNCTION_CODE (fndecl))
3009 {
3010 CASE_FLT_FN (BUILT_IN_FMA):
3011 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
3012 builtin_optab = fma_optab; break;
3013 default:
3014 gcc_unreachable ();
3015 }
3016
3017 /* Make a suitable register to place result in. */
3018 mode = TYPE_MODE (TREE_TYPE (exp));
3019
3020 /* Before working hard, check whether the instruction is available. */
3021 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
3022 return NULL_RTX;
3023
3024 result = gen_reg_rtx (mode);
3025
3026 /* Always stabilize the argument list. */
3027 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
3028 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
3029 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
3030
3031 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3032 op1 = expand_normal (arg1);
3033 op2 = expand_normal (arg2);
3034
3035 start_sequence ();
3036
3037 /* Compute into RESULT.
3038 Set RESULT to wherever the result comes back. */
3039 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
3040 result, 0);
3041
3042 /* If we were unable to expand via the builtin, stop the sequence
3043 (without outputting the insns) and call to the library function
3044 with the stabilized argument list. */
3045 if (result == 0)
3046 {
3047 end_sequence ();
3048 return expand_call (exp, target, target == const0_rtx);
3049 }
3050
3051 /* Output the entire sequence. */
3052 insns = get_insns ();
3053 end_sequence ();
3054 emit_insn (insns);
3055
3056 return result;
3057 }
3058
3059 /* Expand a call to the builtin sin and cos math functions.
3060 Return NULL_RTX if a normal call should be emitted rather than expanding the
3061 function in-line. EXP is the expression that is a call to the builtin
3062 function; if convenient, the result should be placed in TARGET.
3063 SUBTARGET may be used as the target for computing one of EXP's
3064 operands. */
3065
3066 static rtx
3067 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
3068 {
3069 optab builtin_optab;
3070 rtx op0;
3071 rtx_insn *insns;
3072 tree fndecl = get_callee_fndecl (exp);
3073 machine_mode mode;
3074 tree arg;
3075
3076 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3077 return NULL_RTX;
3078
3079 arg = CALL_EXPR_ARG (exp, 0);
3080
3081 switch (DECL_FUNCTION_CODE (fndecl))
3082 {
3083 CASE_FLT_FN (BUILT_IN_SIN):
3084 CASE_FLT_FN (BUILT_IN_COS):
3085 builtin_optab = sincos_optab; break;
3086 default:
3087 gcc_unreachable ();
3088 }
3089
3090 /* Make a suitable register to place result in. */
3091 mode = TYPE_MODE (TREE_TYPE (exp));
3092
3093 /* Check if sincos insn is available, otherwise fallback
3094 to sin or cos insn. */
3095 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
3096 switch (DECL_FUNCTION_CODE (fndecl))
3097 {
3098 CASE_FLT_FN (BUILT_IN_SIN):
3099 builtin_optab = sin_optab; break;
3100 CASE_FLT_FN (BUILT_IN_COS):
3101 builtin_optab = cos_optab; break;
3102 default:
3103 gcc_unreachable ();
3104 }
3105
3106 /* Before working hard, check whether the instruction is available. */
3107 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
3108 {
3109 rtx result = gen_reg_rtx (mode);
3110
3111 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3112 need to expand the argument again. This way, we will not perform
3113 side-effects more the once. */
3114 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3115
3116 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
3117
3118 start_sequence ();
3119
3120 /* Compute into RESULT.
3121 Set RESULT to wherever the result comes back. */
3122 if (builtin_optab == sincos_optab)
3123 {
3124 int ok;
3125
3126 switch (DECL_FUNCTION_CODE (fndecl))
3127 {
3128 CASE_FLT_FN (BUILT_IN_SIN):
3129 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
3130 break;
3131 CASE_FLT_FN (BUILT_IN_COS):
3132 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
3133 break;
3134 default:
3135 gcc_unreachable ();
3136 }
3137 gcc_assert (ok);
3138 }
3139 else
3140 result = expand_unop (mode, builtin_optab, op0, result, 0);
3141
3142 if (result != 0)
3143 {
3144 /* Output the entire sequence. */
3145 insns = get_insns ();
3146 end_sequence ();
3147 emit_insn (insns);
3148 return result;
3149 }
3150
3151 /* If we were unable to expand via the builtin, stop the sequence
3152 (without outputting the insns) and call to the library function
3153 with the stabilized argument list. */
3154 end_sequence ();
3155 }
3156
3157 return expand_call (exp, target, target == const0_rtx);
3158 }
3159
3160 /* Given an interclass math builtin decl FNDECL and it's argument ARG
3161 return an RTL instruction code that implements the functionality.
3162 If that isn't possible or available return CODE_FOR_nothing. */
3163
3164 static enum insn_code
3165 interclass_mathfn_icode (tree arg, tree fndecl)
3166 {
3167 bool errno_set = false;
3168 optab builtin_optab = unknown_optab;
3169 machine_mode mode;
3170
3171 switch (DECL_FUNCTION_CODE (fndecl))
3172 {
3173 CASE_FLT_FN (BUILT_IN_ILOGB):
3174 errno_set = true; builtin_optab = ilogb_optab; break;
3175 CASE_FLT_FN (BUILT_IN_ISINF):
3176 builtin_optab = isinf_optab; break;
3177 case BUILT_IN_ISNORMAL:
3178 case BUILT_IN_ISFINITE:
3179 CASE_FLT_FN (BUILT_IN_FINITE):
3180 case BUILT_IN_FINITED32:
3181 case BUILT_IN_FINITED64:
3182 case BUILT_IN_FINITED128:
3183 case BUILT_IN_ISINFD32:
3184 case BUILT_IN_ISINFD64:
3185 case BUILT_IN_ISINFD128:
3186 /* These builtins have no optabs (yet). */
3187 break;
3188 default:
3189 gcc_unreachable ();
3190 }
3191
3192 /* There's no easy way to detect the case we need to set EDOM. */
3193 if (flag_errno_math && errno_set)
3194 return CODE_FOR_nothing;
3195
3196 /* Optab mode depends on the mode of the input argument. */
3197 mode = TYPE_MODE (TREE_TYPE (arg));
3198
3199 if (builtin_optab)
3200 return optab_handler (builtin_optab, mode);
3201 return CODE_FOR_nothing;
3202 }
3203
3204 /* Expand a call to one of the builtin math functions that operate on
3205 floating point argument and output an integer result (ilogb, isinf,
3206 isnan, etc).
3207 Return 0 if a normal call should be emitted rather than expanding the
3208 function in-line. EXP is the expression that is a call to the builtin
3209 function; if convenient, the result should be placed in TARGET. */
3210
3211 static rtx
3212 expand_builtin_interclass_mathfn (tree exp, rtx target)
3213 {
3214 enum insn_code icode = CODE_FOR_nothing;
3215 rtx op0;
3216 tree fndecl = get_callee_fndecl (exp);
3217 machine_mode mode;
3218 tree arg;
3219
3220 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3221 return NULL_RTX;
3222
3223 arg = CALL_EXPR_ARG (exp, 0);
3224 icode = interclass_mathfn_icode (arg, fndecl);
3225 mode = TYPE_MODE (TREE_TYPE (arg));
3226
3227 if (icode != CODE_FOR_nothing)
3228 {
3229 class expand_operand ops[1];
3230 rtx_insn *last = get_last_insn ();
3231 tree orig_arg = arg;
3232
3233 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3234 need to expand the argument again. This way, we will not perform
3235 side-effects more the once. */
3236 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3237
3238 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3239
3240 if (mode != GET_MODE (op0))
3241 op0 = convert_to_mode (mode, op0, 0);
3242
3243 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
3244 if (maybe_legitimize_operands (icode, 0, 1, ops)
3245 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
3246 return ops[0].value;
3247
3248 delete_insns_since (last);
3249 CALL_EXPR_ARG (exp, 0) = orig_arg;
3250 }
3251
3252 return NULL_RTX;
3253 }
3254
3255 /* Expand a call to the builtin sincos math function.
3256 Return NULL_RTX if a normal call should be emitted rather than expanding the
3257 function in-line. EXP is the expression that is a call to the builtin
3258 function. */
3259
3260 static rtx
3261 expand_builtin_sincos (tree exp)
3262 {
3263 rtx op0, op1, op2, target1, target2;
3264 machine_mode mode;
3265 tree arg, sinp, cosp;
3266 int result;
3267 location_t loc = EXPR_LOCATION (exp);
3268 tree alias_type, alias_off;
3269
3270 if (!validate_arglist (exp, REAL_TYPE,
3271 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3272 return NULL_RTX;
3273
3274 arg = CALL_EXPR_ARG (exp, 0);
3275 sinp = CALL_EXPR_ARG (exp, 1);
3276 cosp = CALL_EXPR_ARG (exp, 2);
3277
3278 /* Make a suitable register to place result in. */
3279 mode = TYPE_MODE (TREE_TYPE (arg));
3280
3281 /* Check if sincos insn is available, otherwise emit the call. */
3282 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
3283 return NULL_RTX;
3284
3285 target1 = gen_reg_rtx (mode);
3286 target2 = gen_reg_rtx (mode);
3287
3288 op0 = expand_normal (arg);
3289 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
3290 alias_off = build_int_cst (alias_type, 0);
3291 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3292 sinp, alias_off));
3293 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3294 cosp, alias_off));
3295
3296 /* Compute into target1 and target2.
3297 Set TARGET to wherever the result comes back. */
3298 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
3299 gcc_assert (result);
3300
3301 /* Move target1 and target2 to the memory locations indicated
3302 by op1 and op2. */
3303 emit_move_insn (op1, target1);
3304 emit_move_insn (op2, target2);
3305
3306 return const0_rtx;
3307 }
3308
3309 /* Expand a call to the internal cexpi builtin to the sincos math function.
3310 EXP is the expression that is a call to the builtin function; if convenient,
3311 the result should be placed in TARGET. */
3312
3313 static rtx
3314 expand_builtin_cexpi (tree exp, rtx target)
3315 {
3316 tree fndecl = get_callee_fndecl (exp);
3317 tree arg, type;
3318 machine_mode mode;
3319 rtx op0, op1, op2;
3320 location_t loc = EXPR_LOCATION (exp);
3321
3322 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3323 return NULL_RTX;
3324
3325 arg = CALL_EXPR_ARG (exp, 0);
3326 type = TREE_TYPE (arg);
3327 mode = TYPE_MODE (TREE_TYPE (arg));
3328
3329 /* Try expanding via a sincos optab, fall back to emitting a libcall
3330 to sincos or cexp. We are sure we have sincos or cexp because cexpi
3331 is only generated from sincos, cexp or if we have either of them. */
3332 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
3333 {
3334 op1 = gen_reg_rtx (mode);
3335 op2 = gen_reg_rtx (mode);
3336
3337 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3338
3339 /* Compute into op1 and op2. */
3340 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
3341 }
3342 else if (targetm.libc_has_function (function_sincos, type))
3343 {
3344 tree call, fn = NULL_TREE;
3345 tree top1, top2;
3346 rtx op1a, op2a;
3347
3348 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3349 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
3350 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3351 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
3352 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3353 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
3354 else
3355 gcc_unreachable ();
3356
3357 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
3358 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
3359 op1a = copy_addr_to_reg (XEXP (op1, 0));
3360 op2a = copy_addr_to_reg (XEXP (op2, 0));
3361 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
3362 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
3363
3364 /* Make sure not to fold the sincos call again. */
3365 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3366 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
3367 call, 3, arg, top1, top2));
3368 }
3369 else
3370 {
3371 tree call, fn = NULL_TREE, narg;
3372 tree ctype = build_complex_type (type);
3373
3374 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3375 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
3376 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3377 fn = builtin_decl_explicit (BUILT_IN_CEXP);
3378 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3379 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
3380 else
3381 gcc_unreachable ();
3382
3383 /* If we don't have a decl for cexp create one. This is the
3384 friendliest fallback if the user calls __builtin_cexpi
3385 without full target C99 function support. */
3386 if (fn == NULL_TREE)
3387 {
3388 tree fntype;
3389 const char *name = NULL;
3390
3391 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3392 name = "cexpf";
3393 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3394 name = "cexp";
3395 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3396 name = "cexpl";
3397
3398 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
3399 fn = build_fn_decl (name, fntype);
3400 }
3401
3402 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
3403 build_real (type, dconst0), arg);
3404
3405 /* Make sure not to fold the cexp call again. */
3406 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3407 return expand_expr (build_call_nary (ctype, call, 1, narg),
3408 target, VOIDmode, EXPAND_NORMAL);
3409 }
3410
3411 /* Now build the proper return type. */
3412 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
3413 make_tree (TREE_TYPE (arg), op2),
3414 make_tree (TREE_TYPE (arg), op1)),
3415 target, VOIDmode, EXPAND_NORMAL);
3416 }
3417
3418 /* Conveniently construct a function call expression. FNDECL names the
3419 function to be called, N is the number of arguments, and the "..."
3420 parameters are the argument expressions. Unlike build_call_exr
3421 this doesn't fold the call, hence it will always return a CALL_EXPR. */
3422
3423 static tree
3424 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
3425 {
3426 va_list ap;
3427 tree fntype = TREE_TYPE (fndecl);
3428 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
3429
3430 va_start (ap, n);
3431 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
3432 va_end (ap);
3433 SET_EXPR_LOCATION (fn, loc);
3434 return fn;
3435 }
3436
3437 /* Expand a call to one of the builtin rounding functions gcc defines
3438 as an extension (lfloor and lceil). As these are gcc extensions we
3439 do not need to worry about setting errno to EDOM.
3440 If expanding via optab fails, lower expression to (int)(floor(x)).
3441 EXP is the expression that is a call to the builtin function;
3442 if convenient, the result should be placed in TARGET. */
3443
3444 static rtx
3445 expand_builtin_int_roundingfn (tree exp, rtx target)
3446 {
3447 convert_optab builtin_optab;
3448 rtx op0, tmp;
3449 rtx_insn *insns;
3450 tree fndecl = get_callee_fndecl (exp);
3451 enum built_in_function fallback_fn;
3452 tree fallback_fndecl;
3453 machine_mode mode;
3454 tree arg;
3455
3456 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3457 return NULL_RTX;
3458
3459 arg = CALL_EXPR_ARG (exp, 0);
3460
3461 switch (DECL_FUNCTION_CODE (fndecl))
3462 {
3463 CASE_FLT_FN (BUILT_IN_ICEIL):
3464 CASE_FLT_FN (BUILT_IN_LCEIL):
3465 CASE_FLT_FN (BUILT_IN_LLCEIL):
3466 builtin_optab = lceil_optab;
3467 fallback_fn = BUILT_IN_CEIL;
3468 break;
3469
3470 CASE_FLT_FN (BUILT_IN_IFLOOR):
3471 CASE_FLT_FN (BUILT_IN_LFLOOR):
3472 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3473 builtin_optab = lfloor_optab;
3474 fallback_fn = BUILT_IN_FLOOR;
3475 break;
3476
3477 default:
3478 gcc_unreachable ();
3479 }
3480
3481 /* Make a suitable register to place result in. */
3482 mode = TYPE_MODE (TREE_TYPE (exp));
3483
3484 target = gen_reg_rtx (mode);
3485
3486 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3487 need to expand the argument again. This way, we will not perform
3488 side-effects more the once. */
3489 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3490
3491 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3492
3493 start_sequence ();
3494
3495 /* Compute into TARGET. */
3496 if (expand_sfix_optab (target, op0, builtin_optab))
3497 {
3498 /* Output the entire sequence. */
3499 insns = get_insns ();
3500 end_sequence ();
3501 emit_insn (insns);
3502 return target;
3503 }
3504
3505 /* If we were unable to expand via the builtin, stop the sequence
3506 (without outputting the insns). */
3507 end_sequence ();
3508
3509 /* Fall back to floating point rounding optab. */
3510 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3511
3512 /* For non-C99 targets we may end up without a fallback fndecl here
3513 if the user called __builtin_lfloor directly. In this case emit
3514 a call to the floor/ceil variants nevertheless. This should result
3515 in the best user experience for not full C99 targets. */
3516 if (fallback_fndecl == NULL_TREE)
3517 {
3518 tree fntype;
3519 const char *name = NULL;
3520
3521 switch (DECL_FUNCTION_CODE (fndecl))
3522 {
3523 case BUILT_IN_ICEIL:
3524 case BUILT_IN_LCEIL:
3525 case BUILT_IN_LLCEIL:
3526 name = "ceil";
3527 break;
3528 case BUILT_IN_ICEILF:
3529 case BUILT_IN_LCEILF:
3530 case BUILT_IN_LLCEILF:
3531 name = "ceilf";
3532 break;
3533 case BUILT_IN_ICEILL:
3534 case BUILT_IN_LCEILL:
3535 case BUILT_IN_LLCEILL:
3536 name = "ceill";
3537 break;
3538 case BUILT_IN_IFLOOR:
3539 case BUILT_IN_LFLOOR:
3540 case BUILT_IN_LLFLOOR:
3541 name = "floor";
3542 break;
3543 case BUILT_IN_IFLOORF:
3544 case BUILT_IN_LFLOORF:
3545 case BUILT_IN_LLFLOORF:
3546 name = "floorf";
3547 break;
3548 case BUILT_IN_IFLOORL:
3549 case BUILT_IN_LFLOORL:
3550 case BUILT_IN_LLFLOORL:
3551 name = "floorl";
3552 break;
3553 default:
3554 gcc_unreachable ();
3555 }
3556
3557 fntype = build_function_type_list (TREE_TYPE (arg),
3558 TREE_TYPE (arg), NULL_TREE);
3559 fallback_fndecl = build_fn_decl (name, fntype);
3560 }
3561
3562 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3563
3564 tmp = expand_normal (exp);
3565 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3566
3567 /* Truncate the result of floating point optab to integer
3568 via expand_fix (). */
3569 target = gen_reg_rtx (mode);
3570 expand_fix (target, tmp, 0);
3571
3572 return target;
3573 }
3574
3575 /* Expand a call to one of the builtin math functions doing integer
3576 conversion (lrint).
3577 Return 0 if a normal call should be emitted rather than expanding the
3578 function in-line. EXP is the expression that is a call to the builtin
3579 function; if convenient, the result should be placed in TARGET. */
3580
3581 static rtx
3582 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3583 {
3584 convert_optab builtin_optab;
3585 rtx op0;
3586 rtx_insn *insns;
3587 tree fndecl = get_callee_fndecl (exp);
3588 tree arg;
3589 machine_mode mode;
3590 enum built_in_function fallback_fn = BUILT_IN_NONE;
3591
3592 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3593 return NULL_RTX;
3594
3595 arg = CALL_EXPR_ARG (exp, 0);
3596
3597 switch (DECL_FUNCTION_CODE (fndecl))
3598 {
3599 CASE_FLT_FN (BUILT_IN_IRINT):
3600 fallback_fn = BUILT_IN_LRINT;
3601 gcc_fallthrough ();
3602 CASE_FLT_FN (BUILT_IN_LRINT):
3603 CASE_FLT_FN (BUILT_IN_LLRINT):
3604 builtin_optab = lrint_optab;
3605 break;
3606
3607 CASE_FLT_FN (BUILT_IN_IROUND):
3608 fallback_fn = BUILT_IN_LROUND;
3609 gcc_fallthrough ();
3610 CASE_FLT_FN (BUILT_IN_LROUND):
3611 CASE_FLT_FN (BUILT_IN_LLROUND):
3612 builtin_optab = lround_optab;
3613 break;
3614
3615 default:
3616 gcc_unreachable ();
3617 }
3618
3619 /* There's no easy way to detect the case we need to set EDOM. */
3620 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3621 return NULL_RTX;
3622
3623 /* Make a suitable register to place result in. */
3624 mode = TYPE_MODE (TREE_TYPE (exp));
3625
3626 /* There's no easy way to detect the case we need to set EDOM. */
3627 if (!flag_errno_math)
3628 {
3629 rtx result = gen_reg_rtx (mode);
3630
3631 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3632 need to expand the argument again. This way, we will not perform
3633 side-effects more the once. */
3634 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3635
3636 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3637
3638 start_sequence ();
3639
3640 if (expand_sfix_optab (result, op0, builtin_optab))
3641 {
3642 /* Output the entire sequence. */
3643 insns = get_insns ();
3644 end_sequence ();
3645 emit_insn (insns);
3646 return result;
3647 }
3648
3649 /* If we were unable to expand via the builtin, stop the sequence
3650 (without outputting the insns) and call to the library function
3651 with the stabilized argument list. */
3652 end_sequence ();
3653 }
3654
3655 if (fallback_fn != BUILT_IN_NONE)
3656 {
3657 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3658 targets, (int) round (x) should never be transformed into
3659 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3660 a call to lround in the hope that the target provides at least some
3661 C99 functions. This should result in the best user experience for
3662 not full C99 targets. */
3663 tree fallback_fndecl = mathfn_built_in_1
3664 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
3665
3666 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3667 fallback_fndecl, 1, arg);
3668
3669 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3670 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3671 return convert_to_mode (mode, target, 0);
3672 }
3673
3674 return expand_call (exp, target, target == const0_rtx);
3675 }
3676
3677 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3678 a normal call should be emitted rather than expanding the function
3679 in-line. EXP is the expression that is a call to the builtin
3680 function; if convenient, the result should be placed in TARGET. */
3681
3682 static rtx
3683 expand_builtin_powi (tree exp, rtx target)
3684 {
3685 tree arg0, arg1;
3686 rtx op0, op1;
3687 machine_mode mode;
3688 machine_mode mode2;
3689
3690 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3691 return NULL_RTX;
3692
3693 arg0 = CALL_EXPR_ARG (exp, 0);
3694 arg1 = CALL_EXPR_ARG (exp, 1);
3695 mode = TYPE_MODE (TREE_TYPE (exp));
3696
3697 /* Emit a libcall to libgcc. */
3698
3699 /* Mode of the 2nd argument must match that of an int. */
3700 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3701
3702 if (target == NULL_RTX)
3703 target = gen_reg_rtx (mode);
3704
3705 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3706 if (GET_MODE (op0) != mode)
3707 op0 = convert_to_mode (mode, op0, 0);
3708 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3709 if (GET_MODE (op1) != mode2)
3710 op1 = convert_to_mode (mode2, op1, 0);
3711
3712 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3713 target, LCT_CONST, mode,
3714 op0, mode, op1, mode2);
3715
3716 return target;
3717 }
3718
3719 /* Expand expression EXP which is a call to the strlen builtin. Return
3720 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3721 try to get the result in TARGET, if convenient. */
3722
3723 static rtx
3724 expand_builtin_strlen (tree exp, rtx target,
3725 machine_mode target_mode)
3726 {
3727 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3728 return NULL_RTX;
3729
3730 tree src = CALL_EXPR_ARG (exp, 0);
3731 if (!check_read_access (exp, src))
3732 return NULL_RTX;
3733
3734 /* If the length can be computed at compile-time, return it. */
3735 if (tree len = c_strlen (src, 0))
3736 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3737
3738 /* If the length can be computed at compile-time and is constant
3739 integer, but there are side-effects in src, evaluate
3740 src for side-effects, then return len.
3741 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3742 can be optimized into: i++; x = 3; */
3743 tree len = c_strlen (src, 1);
3744 if (len && TREE_CODE (len) == INTEGER_CST)
3745 {
3746 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3747 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3748 }
3749
3750 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3751
3752 /* If SRC is not a pointer type, don't do this operation inline. */
3753 if (align == 0)
3754 return NULL_RTX;
3755
3756 /* Bail out if we can't compute strlen in the right mode. */
3757 machine_mode insn_mode;
3758 enum insn_code icode = CODE_FOR_nothing;
3759 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3760 {
3761 icode = optab_handler (strlen_optab, insn_mode);
3762 if (icode != CODE_FOR_nothing)
3763 break;
3764 }
3765 if (insn_mode == VOIDmode)
3766 return NULL_RTX;
3767
3768 /* Make a place to hold the source address. We will not expand
3769 the actual source until we are sure that the expansion will
3770 not fail -- there are trees that cannot be expanded twice. */
3771 rtx src_reg = gen_reg_rtx (Pmode);
3772
3773 /* Mark the beginning of the strlen sequence so we can emit the
3774 source operand later. */
3775 rtx_insn *before_strlen = get_last_insn ();
3776
3777 class expand_operand ops[4];
3778 create_output_operand (&ops[0], target, insn_mode);
3779 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3780 create_integer_operand (&ops[2], 0);
3781 create_integer_operand (&ops[3], align);
3782 if (!maybe_expand_insn (icode, 4, ops))
3783 return NULL_RTX;
3784
3785 /* Check to see if the argument was declared attribute nonstring
3786 and if so, issue a warning since at this point it's not known
3787 to be nul-terminated. */
3788 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3789
3790 /* Now that we are assured of success, expand the source. */
3791 start_sequence ();
3792 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3793 if (pat != src_reg)
3794 {
3795 #ifdef POINTERS_EXTEND_UNSIGNED
3796 if (GET_MODE (pat) != Pmode)
3797 pat = convert_to_mode (Pmode, pat,
3798 POINTERS_EXTEND_UNSIGNED);
3799 #endif
3800 emit_move_insn (src_reg, pat);
3801 }
3802 pat = get_insns ();
3803 end_sequence ();
3804
3805 if (before_strlen)
3806 emit_insn_after (pat, before_strlen);
3807 else
3808 emit_insn_before (pat, get_insns ());
3809
3810 /* Return the value in the proper mode for this function. */
3811 if (GET_MODE (ops[0].value) == target_mode)
3812 target = ops[0].value;
3813 else if (target != 0)
3814 convert_move (target, ops[0].value, 0);
3815 else
3816 target = convert_to_mode (target_mode, ops[0].value, 0);
3817
3818 return target;
3819 }
3820
3821 /* Expand call EXP to the strnlen built-in, returning the result
3822 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3823
3824 static rtx
3825 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3826 {
3827 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3828 return NULL_RTX;
3829
3830 tree src = CALL_EXPR_ARG (exp, 0);
3831 tree bound = CALL_EXPR_ARG (exp, 1);
3832
3833 if (!bound)
3834 return NULL_RTX;
3835
3836 check_read_access (exp, src, bound);
3837
3838 location_t loc = UNKNOWN_LOCATION;
3839 if (EXPR_HAS_LOCATION (exp))
3840 loc = EXPR_LOCATION (exp);
3841
3842 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3843 so these conversions aren't necessary. */
3844 c_strlen_data lendata = { };
3845 tree len = c_strlen (src, 0, &lendata, 1);
3846 if (len)
3847 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3848
3849 if (TREE_CODE (bound) == INTEGER_CST)
3850 {
3851 if (!len)
3852 return NULL_RTX;
3853
3854 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3855 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3856 }
3857
3858 if (TREE_CODE (bound) != SSA_NAME)
3859 return NULL_RTX;
3860
3861 wide_int min, max;
3862 value_range r;
3863 get_global_range_query ()->range_of_expr (r, bound);
3864 if (r.kind () != VR_RANGE)
3865 return NULL_RTX;
3866 min = r.lower_bound ();
3867 max = r.upper_bound ();
3868
3869 if (!len || TREE_CODE (len) != INTEGER_CST)
3870 {
3871 bool exact;
3872 lendata.decl = unterminated_array (src, &len, &exact);
3873 if (!lendata.decl)
3874 return NULL_RTX;
3875 }
3876
3877 if (lendata.decl)
3878 return NULL_RTX;
3879
3880 if (wi::gtu_p (min, wi::to_wide (len)))
3881 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3882
3883 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3884 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3885 }
3886
3887 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3888 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3889 a target constant. */
3890
3891 static rtx
3892 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3893 scalar_int_mode mode)
3894 {
3895 /* The REPresentation pointed to by DATA need not be a nul-terminated
3896 string but the caller guarantees it's large enough for MODE. */
3897 const char *rep = (const char *) data;
3898
3899 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3900 }
3901
3902 /* LEN specify length of the block of memcpy/memset operation.
3903 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3904 In some cases we can make very likely guess on max size, then we
3905 set it into PROBABLE_MAX_SIZE. */
3906
3907 static void
3908 determine_block_size (tree len, rtx len_rtx,
3909 unsigned HOST_WIDE_INT *min_size,
3910 unsigned HOST_WIDE_INT *max_size,
3911 unsigned HOST_WIDE_INT *probable_max_size)
3912 {
3913 if (CONST_INT_P (len_rtx))
3914 {
3915 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3916 return;
3917 }
3918 else
3919 {
3920 wide_int min, max;
3921 enum value_range_kind range_type = VR_UNDEFINED;
3922
3923 /* Determine bounds from the type. */
3924 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3925 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3926 else
3927 *min_size = 0;
3928 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3929 *probable_max_size = *max_size
3930 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3931 else
3932 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3933
3934 if (TREE_CODE (len) == SSA_NAME)
3935 {
3936 value_range r;
3937 get_global_range_query ()->range_of_expr (r, len);
3938 range_type = r.kind ();
3939 if (range_type != VR_UNDEFINED)
3940 {
3941 min = wi::to_wide (r.min ());
3942 max = wi::to_wide (r.max ());
3943 }
3944 }
3945 if (range_type == VR_RANGE)
3946 {
3947 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3948 *min_size = min.to_uhwi ();
3949 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3950 *probable_max_size = *max_size = max.to_uhwi ();
3951 }
3952 else if (range_type == VR_ANTI_RANGE)
3953 {
3954 /* Code like
3955
3956 int n;
3957 if (n < 100)
3958 memcpy (a, b, n)
3959
3960 Produce anti range allowing negative values of N. We still
3961 can use the information and make a guess that N is not negative.
3962 */
3963 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3964 *probable_max_size = min.to_uhwi () - 1;
3965 }
3966 }
3967 gcc_checking_assert (*max_size <=
3968 (unsigned HOST_WIDE_INT)
3969 GET_MODE_MASK (GET_MODE (len_rtx)));
3970 }
3971
3972 /* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3973 accessing an object with SIZE. */
3974
3975 static bool
3976 maybe_warn_for_bound (opt_code opt, location_t loc, tree exp, tree func,
3977 tree bndrng[2], tree size, const access_data *pad = NULL)
3978 {
3979 if (!bndrng[0] || warning_suppressed_p (exp, opt))
3980 return false;
3981
3982 tree maxobjsize = max_object_size ();
3983
3984 bool warned = false;
3985
3986 if (opt == OPT_Wstringop_overread)
3987 {
3988 bool maybe = pad && pad->src.phi ();
3989
3990 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3991 {
3992 if (bndrng[0] == bndrng[1])
3993 warned = (func
3994 ? warning_at (loc, opt,
3995 (maybe
3996 ? G_("%qD specified bound %E may "
3997 "exceed maximum object size %E")
3998 : G_("%qD specified bound %E "
3999 "exceeds maximum object size %E")),
4000 func, bndrng[0], maxobjsize)
4001 : warning_at (loc, opt,
4002 (maybe
4003 ? G_("specified bound %E may "
4004 "exceed maximum object size %E")
4005 : G_("specified bound %E "
4006 "exceeds maximum object size %E")),
4007 bndrng[0], maxobjsize));
4008 else
4009 warned = (func
4010 ? warning_at (loc, opt,
4011 (maybe
4012 ? G_("%qD specified bound [%E, %E] may "
4013 "exceed maximum object size %E")
4014 : G_("%qD specified bound [%E, %E] "
4015 "exceeds maximum object size %E")),
4016 func, bndrng[0], bndrng[1], maxobjsize)
4017 : warning_at (loc, opt,
4018 (maybe
4019 ? G_("specified bound [%E, %E] may "
4020 "exceed maximum object size %E")
4021 : G_("specified bound [%E, %E] "
4022 "exceeds maximum object size %E")),
4023 bndrng[0], bndrng[1], maxobjsize));
4024 }
4025 else if (!size || tree_int_cst_le (bndrng[0], size))
4026 return false;
4027 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
4028 warned = (func
4029 ? warning_at (loc, opt,
4030 (maybe
4031 ? G_("%qD specified bound %E may exceed "
4032 "source size %E")
4033 : G_("%qD specified bound %E exceeds "
4034 "source size %E")),
4035 func, bndrng[0], size)
4036 : warning_at (loc, opt,
4037 (maybe
4038 ? G_("specified bound %E may exceed "
4039 "source size %E")
4040 : G_("specified bound %E exceeds "
4041 "source size %E")),
4042 bndrng[0], size));
4043 else
4044 warned = (func
4045 ? warning_at (loc, opt,
4046 (maybe
4047 ? G_("%qD specified bound [%E, %E] may "
4048 "exceed source size %E")
4049 : G_("%qD specified bound [%E, %E] exceeds "
4050 "source size %E")),
4051 func, bndrng[0], bndrng[1], size)
4052 : warning_at (loc, opt,
4053 (maybe
4054 ? G_("specified bound [%E, %E] may exceed "
4055 "source size %E")
4056 : G_("specified bound [%E, %E] exceeds "
4057 "source size %E")),
4058 bndrng[0], bndrng[1], size));
4059 if (warned)
4060 {
4061 if (pad && pad->src.ref)
4062 {
4063 if (DECL_P (pad->src.ref))
4064 inform (DECL_SOURCE_LOCATION (pad->src.ref),
4065 "source object declared here");
4066 else if (EXPR_HAS_LOCATION (pad->src.ref))
4067 inform (EXPR_LOCATION (pad->src.ref),
4068 "source object allocated here");
4069 }
4070 suppress_warning (exp, opt);
4071 }
4072
4073 return warned;
4074 }
4075
4076 bool maybe = pad && pad->dst.phi ();
4077 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
4078 {
4079 if (bndrng[0] == bndrng[1])
4080 warned = (func
4081 ? warning_at (loc, opt,
4082 (maybe
4083 ? G_("%qD specified size %E may "
4084 "exceed maximum object size %E")
4085 : G_("%qD specified size %E "
4086 "exceeds maximum object size %E")),
4087 func, bndrng[0], maxobjsize)
4088 : warning_at (loc, opt,
4089 (maybe
4090 ? G_("specified size %E may exceed "
4091 "maximum object size %E")
4092 : G_("specified size %E exceeds "
4093 "maximum object size %E")),
4094 bndrng[0], maxobjsize));
4095 else
4096 warned = (func
4097 ? warning_at (loc, opt,
4098 (maybe
4099 ? G_("%qD specified size between %E and %E "
4100 "may exceed maximum object size %E")
4101 : G_("%qD specified size between %E and %E "
4102 "exceeds maximum object size %E")),
4103 func, bndrng[0], bndrng[1], maxobjsize)
4104 : warning_at (loc, opt,
4105 (maybe
4106 ? G_("specified size between %E and %E "
4107 "may exceed maximum object size %E")
4108 : G_("specified size between %E and %E "
4109 "exceeds maximum object size %E")),
4110 bndrng[0], bndrng[1], maxobjsize));
4111 }
4112 else if (!size || tree_int_cst_le (bndrng[0], size))
4113 return false;
4114 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
4115 warned = (func
4116 ? warning_at (loc, opt,
4117 (maybe
4118 ? G_("%qD specified bound %E may exceed "
4119 "destination size %E")
4120 : G_("%qD specified bound %E exceeds "
4121 "destination size %E")),
4122 func, bndrng[0], size)
4123 : warning_at (loc, opt,
4124 (maybe
4125 ? G_("specified bound %E may exceed "
4126 "destination size %E")
4127 : G_("specified bound %E exceeds "
4128 "destination size %E")),
4129 bndrng[0], size));
4130 else
4131 warned = (func
4132 ? warning_at (loc, opt,
4133 (maybe
4134 ? G_("%qD specified bound [%E, %E] may exceed "
4135 "destination size %E")
4136 : G_("%qD specified bound [%E, %E] exceeds "
4137 "destination size %E")),
4138 func, bndrng[0], bndrng[1], size)
4139 : warning_at (loc, opt,
4140 (maybe
4141 ? G_("specified bound [%E, %E] exceeds "
4142 "destination size %E")
4143 : G_("specified bound [%E, %E] exceeds "
4144 "destination size %E")),
4145 bndrng[0], bndrng[1], size));
4146
4147 if (warned)
4148 {
4149 if (pad && pad->dst.ref)
4150 {
4151 if (DECL_P (pad->dst.ref))
4152 inform (DECL_SOURCE_LOCATION (pad->dst.ref),
4153 "destination object declared here");
4154 else if (EXPR_HAS_LOCATION (pad->dst.ref))
4155 inform (EXPR_LOCATION (pad->dst.ref),
4156 "destination object allocated here");
4157 }
4158 suppress_warning (exp, opt);
4159 }
4160
4161 return warned;
4162 }
4163
4164 /* For an expression EXP issue an access warning controlled by option OPT
4165 with access to a region SIZE bytes in size in the RANGE of sizes.
4166 WRITE is true for a write access, READ for a read access, neither for
4167 call that may or may not perform an access but for which the range
4168 is expected to valid.
4169 Returns true when a warning has been issued. */
4170
4171 static bool
4172 warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
4173 tree size, bool write, bool read, bool maybe)
4174 {
4175 bool warned = false;
4176
4177 if (write && read)
4178 {
4179 if (tree_int_cst_equal (range[0], range[1]))
4180 warned = (func
4181 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4182 (maybe
4183 ? G_("%qD may access %E byte in a region "
4184 "of size %E")
4185 : G_("%qD accessing %E byte in a region "
4186 "of size %E")),
4187 (maybe
4188 ? G_ ("%qD may access %E bytes in a region "
4189 "of size %E")
4190 : G_ ("%qD accessing %E bytes in a region "
4191 "of size %E")),
4192 func, range[0], size)
4193 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4194 (maybe
4195 ? G_("may access %E byte in a region "
4196 "of size %E")
4197 : G_("accessing %E byte in a region "
4198 "of size %E")),
4199 (maybe
4200 ? G_("may access %E bytes in a region "
4201 "of size %E")
4202 : G_("accessing %E bytes in a region "
4203 "of size %E")),
4204 range[0], size));
4205 else if (tree_int_cst_sign_bit (range[1]))
4206 {
4207 /* Avoid printing the upper bound if it's invalid. */
4208 warned = (func
4209 ? warning_at (loc, opt,
4210 (maybe
4211 ? G_("%qD may access %E or more bytes "
4212 "in a region of size %E")
4213 : G_("%qD accessing %E or more bytes "
4214 "in a region of size %E")),
4215 func, range[0], size)
4216 : warning_at (loc, opt,
4217 (maybe
4218 ? G_("may access %E or more bytes "
4219 "in a region of size %E")
4220 : G_("accessing %E or more bytes "
4221 "in a region of size %E")),
4222 range[0], size));
4223 }
4224 else
4225 warned = (func
4226 ? warning_at (loc, opt,
4227 (maybe
4228 ? G_("%qD may access between %E and %E "
4229 "bytes in a region of size %E")
4230 : G_("%qD accessing between %E and %E "
4231 "bytes in a region of size %E")),
4232 func, range[0], range[1], size)
4233 : warning_at (loc, opt,
4234 (maybe
4235 ? G_("may access between %E and %E bytes "
4236 "in a region of size %E")
4237 : G_("accessing between %E and %E bytes "
4238 "in a region of size %E")),
4239 range[0], range[1], size));
4240 return warned;
4241 }
4242
4243 if (write)
4244 {
4245 if (tree_int_cst_equal (range[0], range[1]))
4246 warned = (func
4247 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4248 (maybe
4249 ? G_("%qD may write %E byte into a region "
4250 "of size %E")
4251 : G_("%qD writing %E byte into a region "
4252 "of size %E overflows the destination")),
4253 (maybe
4254 ? G_("%qD may write %E bytes into a region "
4255 "of size %E")
4256 : G_("%qD writing %E bytes into a region "
4257 "of size %E overflows the destination")),
4258 func, range[0], size)
4259 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4260 (maybe
4261 ? G_("may write %E byte into a region "
4262 "of size %E")
4263 : G_("writing %E byte into a region "
4264 "of size %E overflows the destination")),
4265 (maybe
4266 ? G_("may write %E bytes into a region "
4267 "of size %E")
4268 : G_("writing %E bytes into a region "
4269 "of size %E overflows the destination")),
4270 range[0], size));
4271 else if (tree_int_cst_sign_bit (range[1]))
4272 {
4273 /* Avoid printing the upper bound if it's invalid. */
4274 warned = (func
4275 ? warning_at (loc, opt,
4276 (maybe
4277 ? G_("%qD may write %E or more bytes "
4278 "into a region of size %E")
4279 : G_("%qD writing %E or more bytes "
4280 "into a region of size %E overflows "
4281 "the destination")),
4282 func, range[0], size)
4283 : warning_at (loc, opt,
4284 (maybe
4285 ? G_("may write %E or more bytes into "
4286 "a region of size %E")
4287 : G_("writing %E or more bytes into "
4288 "a region of size %E overflows "
4289 "the destination")),
4290 range[0], size));
4291 }
4292 else
4293 warned = (func
4294 ? warning_at (loc, opt,
4295 (maybe
4296 ? G_("%qD may write between %E and %E bytes "
4297 "into a region of size %E")
4298 : G_("%qD writing between %E and %E bytes "
4299 "into a region of size %E overflows "
4300 "the destination")),
4301 func, range[0], range[1], size)
4302 : warning_at (loc, opt,
4303 (maybe
4304 ? G_("may write between %E and %E bytes "
4305 "into a region of size %E")
4306 : G_("writing between %E and %E bytes "
4307 "into a region of size %E overflows "
4308 "the destination")),
4309 range[0], range[1], size));
4310 return warned;
4311 }
4312
4313 if (read)
4314 {
4315 if (tree_int_cst_equal (range[0], range[1]))
4316 warned = (func
4317 ? warning_n (loc, OPT_Wstringop_overread,
4318 tree_to_uhwi (range[0]),
4319 (maybe
4320 ? G_("%qD may read %E byte from a region "
4321 "of size %E")
4322 : G_("%qD reading %E byte from a region "
4323 "of size %E")),
4324 (maybe
4325 ? G_("%qD may read %E bytes from a region "
4326 "of size %E")
4327 : G_("%qD reading %E bytes from a region "
4328 "of size %E")),
4329 func, range[0], size)
4330 : warning_n (loc, OPT_Wstringop_overread,
4331 tree_to_uhwi (range[0]),
4332 (maybe
4333 ? G_("may read %E byte from a region "
4334 "of size %E")
4335 : G_("reading %E byte from a region "
4336 "of size %E")),
4337 (maybe
4338 ? G_("may read %E bytes from a region "
4339 "of size %E")
4340 : G_("reading %E bytes from a region "
4341 "of size %E")),
4342 range[0], size));
4343 else if (tree_int_cst_sign_bit (range[1]))
4344 {
4345 /* Avoid printing the upper bound if it's invalid. */
4346 warned = (func
4347 ? warning_at (loc, OPT_Wstringop_overread,
4348 (maybe
4349 ? G_("%qD may read %E or more bytes "
4350 "from a region of size %E")
4351 : G_("%qD reading %E or more bytes "
4352 "from a region of size %E")),
4353 func, range[0], size)
4354 : warning_at (loc, OPT_Wstringop_overread,
4355 (maybe
4356 ? G_("may read %E or more bytes "
4357 "from a region of size %E")
4358 : G_("reading %E or more bytes "
4359 "from a region of size %E")),
4360 range[0], size));
4361 }
4362 else
4363 warned = (func
4364 ? warning_at (loc, OPT_Wstringop_overread,
4365 (maybe
4366 ? G_("%qD may read between %E and %E bytes "
4367 "from a region of size %E")
4368 : G_("%qD reading between %E and %E bytes "
4369 "from a region of size %E")),
4370 func, range[0], range[1], size)
4371 : warning_at (loc, opt,
4372 (maybe
4373 ? G_("may read between %E and %E bytes "
4374 "from a region of size %E")
4375 : G_("reading between %E and %E bytes "
4376 "from a region of size %E")),
4377 range[0], range[1], size));
4378
4379 if (warned)
4380 suppress_warning (exp, OPT_Wstringop_overread);
4381
4382 return warned;
4383 }
4384
4385 if (tree_int_cst_equal (range[0], range[1])
4386 || tree_int_cst_sign_bit (range[1]))
4387 warned = (func
4388 ? warning_n (loc, OPT_Wstringop_overread,
4389 tree_to_uhwi (range[0]),
4390 "%qD expecting %E byte in a region of size %E",
4391 "%qD expecting %E bytes in a region of size %E",
4392 func, range[0], size)
4393 : warning_n (loc, OPT_Wstringop_overread,
4394 tree_to_uhwi (range[0]),
4395 "expecting %E byte in a region of size %E",
4396 "expecting %E bytes in a region of size %E",
4397 range[0], size));
4398 else if (tree_int_cst_sign_bit (range[1]))
4399 {
4400 /* Avoid printing the upper bound if it's invalid. */
4401 warned = (func
4402 ? warning_at (loc, OPT_Wstringop_overread,
4403 "%qD expecting %E or more bytes in a region "
4404 "of size %E",
4405 func, range[0], size)
4406 : warning_at (loc, OPT_Wstringop_overread,
4407 "expecting %E or more bytes in a region "
4408 "of size %E",
4409 range[0], size));
4410 }
4411 else
4412 warned = (func
4413 ? warning_at (loc, OPT_Wstringop_overread,
4414 "%qD expecting between %E and %E bytes in "
4415 "a region of size %E",
4416 func, range[0], range[1], size)
4417 : warning_at (loc, OPT_Wstringop_overread,
4418 "expecting between %E and %E bytes in "
4419 "a region of size %E",
4420 range[0], range[1], size));
4421
4422 if (warned)
4423 suppress_warning (exp, OPT_Wstringop_overread);
4424
4425 return warned;
4426 }
4427
4428 /* Issue one inform message describing each target of an access REF.
4429 WRITE is set for a write access and clear for a read access. */
4430
4431 void
4432 access_ref::inform_access (access_mode mode) const
4433 {
4434 const access_ref &aref = *this;
4435 if (!aref.ref)
4436 return;
4437
4438 if (aref.phi ())
4439 {
4440 /* Set MAXREF to refer to the largest object and fill ALL_REFS
4441 with data for all objects referenced by the PHI arguments. */
4442 access_ref maxref;
4443 auto_vec<access_ref> all_refs;
4444 if (!get_ref (&all_refs, &maxref))
4445 return;
4446
4447 /* Except for MAXREF, the rest of the arguments' offsets need not
4448 reflect one added to the PHI itself. Determine the latter from
4449 MAXREF on which the result is based. */
4450 const offset_int orng[] =
4451 {
4452 offrng[0] - maxref.offrng[0],
4453 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
4454 };
4455
4456 /* Add the final PHI's offset to that of each of the arguments
4457 and recurse to issue an inform message for it. */
4458 for (unsigned i = 0; i != all_refs.length (); ++i)
4459 {
4460 /* Skip any PHIs; those could lead to infinite recursion. */
4461 if (all_refs[i].phi ())
4462 continue;
4463
4464 all_refs[i].add_offset (orng[0], orng[1]);
4465 all_refs[i].inform_access (mode);
4466 }
4467 return;
4468 }
4469
4470 /* Convert offset range and avoid including a zero range since it
4471 isn't necessarily meaningful. */
4472 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
4473 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
4474 HOST_WIDE_INT minoff;
4475 HOST_WIDE_INT maxoff = diff_max;
4476 if (wi::fits_shwi_p (aref.offrng[0]))
4477 minoff = aref.offrng[0].to_shwi ();
4478 else
4479 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
4480
4481 if (wi::fits_shwi_p (aref.offrng[1]))
4482 maxoff = aref.offrng[1].to_shwi ();
4483
4484 if (maxoff <= diff_min || maxoff >= diff_max)
4485 /* Avoid mentioning an upper bound that's equal to or in excess
4486 of the maximum of ptrdiff_t. */
4487 maxoff = minoff;
4488
4489 /* Convert size range and always include it since all sizes are
4490 meaningful. */
4491 unsigned long long minsize = 0, maxsize = 0;
4492 if (wi::fits_shwi_p (aref.sizrng[0])
4493 && wi::fits_shwi_p (aref.sizrng[1]))
4494 {
4495 minsize = aref.sizrng[0].to_shwi ();
4496 maxsize = aref.sizrng[1].to_shwi ();
4497 }
4498
4499 /* SIZRNG doesn't necessarily have the same range as the allocation
4500 size determined by gimple_call_alloc_size (). */
4501 char sizestr[80];
4502 if (minsize == maxsize)
4503 sprintf (sizestr, "%llu", minsize);
4504 else
4505 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
4506
4507 char offstr[80];
4508 if (minoff == 0
4509 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
4510 offstr[0] = '\0';
4511 else if (minoff == maxoff)
4512 sprintf (offstr, "%lli", (long long) minoff);
4513 else
4514 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
4515
4516 location_t loc = UNKNOWN_LOCATION;
4517
4518 tree ref = this->ref;
4519 tree allocfn = NULL_TREE;
4520 if (TREE_CODE (ref) == SSA_NAME)
4521 {
4522 gimple *stmt = SSA_NAME_DEF_STMT (ref);
4523 if (is_gimple_call (stmt))
4524 {
4525 loc = gimple_location (stmt);
4526 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4527 {
4528 /* Strip the SSA_NAME suffix from the variable name and
4529 recreate an identifier with the VLA's original name. */
4530 ref = gimple_call_lhs (stmt);
4531 if (SSA_NAME_IDENTIFIER (ref))
4532 {
4533 ref = SSA_NAME_IDENTIFIER (ref);
4534 const char *id = IDENTIFIER_POINTER (ref);
4535 size_t len = strcspn (id, ".$");
4536 if (!len)
4537 len = strlen (id);
4538 ref = get_identifier_with_length (id, len);
4539 }
4540 }
4541 else
4542 {
4543 /* Except for VLAs, retrieve the allocation function. */
4544 allocfn = gimple_call_fndecl (stmt);
4545 if (!allocfn)
4546 allocfn = gimple_call_fn (stmt);
4547 if (TREE_CODE (allocfn) == SSA_NAME)
4548 {
4549 /* For an ALLOC_CALL via a function pointer make a small
4550 effort to determine the destination of the pointer. */
4551 gimple *def = SSA_NAME_DEF_STMT (allocfn);
4552 if (gimple_assign_single_p (def))
4553 {
4554 tree rhs = gimple_assign_rhs1 (def);
4555 if (DECL_P (rhs))
4556 allocfn = rhs;
4557 else if (TREE_CODE (rhs) == COMPONENT_REF)
4558 allocfn = TREE_OPERAND (rhs, 1);
4559 }
4560 }
4561 }
4562 }
4563 else if (gimple_nop_p (stmt))
4564 /* Handle DECL_PARM below. */
4565 ref = SSA_NAME_VAR (ref);
4566 }
4567
4568 if (DECL_P (ref))
4569 loc = DECL_SOURCE_LOCATION (ref);
4570 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
4571 loc = EXPR_LOCATION (ref);
4572 else if (TREE_CODE (ref) != IDENTIFIER_NODE
4573 && TREE_CODE (ref) != SSA_NAME)
4574 return;
4575
4576 if (mode == access_read_write || mode == access_write_only)
4577 {
4578 if (allocfn == NULL_TREE)
4579 {
4580 if (*offstr)
4581 inform (loc, "at offset %s into destination object %qE of size %s",
4582 offstr, ref, sizestr);
4583 else
4584 inform (loc, "destination object %qE of size %s", ref, sizestr);
4585 return;
4586 }
4587
4588 if (*offstr)
4589 inform (loc,
4590 "at offset %s into destination object of size %s "
4591 "allocated by %qE", offstr, sizestr, allocfn);
4592 else
4593 inform (loc, "destination object of size %s allocated by %qE",
4594 sizestr, allocfn);
4595 return;
4596 }
4597
4598 if (mode == access_read_only)
4599 {
4600 if (allocfn == NULL_TREE)
4601 {
4602 if (*offstr)
4603 inform (loc, "at offset %s into source object %qE of size %s",
4604 offstr, ref, sizestr);
4605 else
4606 inform (loc, "source object %qE of size %s", ref, sizestr);
4607
4608 return;
4609 }
4610
4611 if (*offstr)
4612 inform (loc,
4613 "at offset %s into source object of size %s allocated by %qE",
4614 offstr, sizestr, allocfn);
4615 else
4616 inform (loc, "source object of size %s allocated by %qE",
4617 sizestr, allocfn);
4618 return;
4619 }
4620
4621 if (allocfn == NULL_TREE)
4622 {
4623 if (*offstr)
4624 inform (loc, "at offset %s into object %qE of size %s",
4625 offstr, ref, sizestr);
4626 else
4627 inform (loc, "object %qE of size %s", ref, sizestr);
4628
4629 return;
4630 }
4631
4632 if (*offstr)
4633 inform (loc,
4634 "at offset %s into object of size %s allocated by %qE",
4635 offstr, sizestr, allocfn);
4636 else
4637 inform (loc, "object of size %s allocated by %qE",
4638 sizestr, allocfn);
4639 }
4640
4641 /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
4642 by BNDRNG if nonnull and valid. */
4643
4644 static void
4645 get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
4646 {
4647 if (bound)
4648 get_size_range (bound, range);
4649
4650 if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U))
4651 return;
4652
4653 if (range[0] && TREE_CODE (range[0]) == INTEGER_CST)
4654 {
4655 offset_int r[] =
4656 { wi::to_offset (range[0]), wi::to_offset (range[1]) };
4657 if (r[0] < bndrng[0])
4658 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4659 if (bndrng[1] < r[1])
4660 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4661 }
4662 else
4663 {
4664 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4665 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4666 }
4667 }
4668
4669 /* Try to verify that the sizes and lengths of the arguments to a string
4670 manipulation function given by EXP are within valid bounds and that
4671 the operation does not lead to buffer overflow or read past the end.
4672 Arguments other than EXP may be null. When non-null, the arguments
4673 have the following meaning:
4674 DST is the destination of a copy call or NULL otherwise.
4675 SRC is the source of a copy call or NULL otherwise.
4676 DSTWRITE is the number of bytes written into the destination obtained
4677 from the user-supplied size argument to the function (such as in
4678 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
4679 MAXREAD is the user-supplied bound on the length of the source sequence
4680 (such as in strncat(d, s, N). It specifies the upper limit on the number
4681 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
4682 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
4683 expression EXP is a string function call (as opposed to a memory call
4684 like memcpy). As an exception, SRCSTR can also be an integer denoting
4685 the precomputed size of the source string or object (for functions like
4686 memcpy).
4687 DSTSIZE is the size of the destination object.
4688
4689 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
4690 SIZE_MAX.
4691
4692 WRITE is true for write accesses, READ is true for reads. Both are
4693 false for simple size checks in calls to functions that neither read
4694 from nor write to the region.
4695
4696 When nonnull, PAD points to a more detailed description of the access.
4697
4698 If the call is successfully verified as safe return true, otherwise
4699 return false. */
4700
4701 bool
4702 check_access (tree exp, tree dstwrite,
4703 tree maxread, tree srcstr, tree dstsize,
4704 access_mode mode, const access_data *pad /* = NULL */)
4705 {
4706 /* The size of the largest object is half the address space, or
4707 PTRDIFF_MAX. (This is way too permissive.) */
4708 tree maxobjsize = max_object_size ();
4709
4710 /* Either an approximate/minimum the length of the source string for
4711 string functions or the size of the source object for raw memory
4712 functions. */
4713 tree slen = NULL_TREE;
4714
4715 /* The range of the access in bytes; first set to the write access
4716 for functions that write and then read for those that also (or
4717 just) read. */
4718 tree range[2] = { NULL_TREE, NULL_TREE };
4719
4720 /* Set to true when the exact number of bytes written by a string
4721 function like strcpy is not known and the only thing that is
4722 known is that it must be at least one (for the terminating nul). */
4723 bool at_least_one = false;
4724 if (srcstr)
4725 {
4726 /* SRCSTR is normally a pointer to string but as a special case
4727 it can be an integer denoting the length of a string. */
4728 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
4729 {
4730 if (!check_nul_terminated_array (exp, srcstr, maxread))
4731 return false;
4732 /* Try to determine the range of lengths the source string
4733 refers to. If it can be determined and is less than
4734 the upper bound given by MAXREAD add one to it for
4735 the terminating nul. Otherwise, set it to one for
4736 the same reason, or to MAXREAD as appropriate. */
4737 c_strlen_data lendata = { };
4738 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
4739 range[0] = lendata.minlen;
4740 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
4741 if (range[0]
4742 && TREE_CODE (range[0]) == INTEGER_CST
4743 && TREE_CODE (range[1]) == INTEGER_CST
4744 && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
4745 {
4746 if (maxread && tree_int_cst_le (maxread, range[0]))
4747 range[0] = range[1] = maxread;
4748 else
4749 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
4750 range[0], size_one_node);
4751
4752 if (maxread && tree_int_cst_le (maxread, range[1]))
4753 range[1] = maxread;
4754 else if (!integer_all_onesp (range[1]))
4755 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
4756 range[1], size_one_node);
4757
4758 slen = range[0];
4759 }
4760 else
4761 {
4762 at_least_one = true;
4763 slen = size_one_node;
4764 }
4765 }
4766 else
4767 slen = srcstr;
4768 }
4769
4770 if (!dstwrite && !maxread)
4771 {
4772 /* When the only available piece of data is the object size
4773 there is nothing to do. */
4774 if (!slen)
4775 return true;
4776
4777 /* Otherwise, when the length of the source sequence is known
4778 (as with strlen), set DSTWRITE to it. */
4779 if (!range[0])
4780 dstwrite = slen;
4781 }
4782
4783 if (!dstsize)
4784 dstsize = maxobjsize;
4785
4786 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4787 if valid. */
4788 get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL);
4789
4790 tree func = get_callee_fndecl (exp);
4791 /* Read vs write access by built-ins can be determined from the const
4792 qualifiers on the pointer argument. In the absence of attribute
4793 access, non-const qualified pointer arguments to user-defined
4794 functions are assumed to both read and write the objects. */
4795 const bool builtin = func ? fndecl_built_in_p (func) : false;
4796
4797 /* First check the number of bytes to be written against the maximum
4798 object size. */
4799 if (range[0]
4800 && TREE_CODE (range[0]) == INTEGER_CST
4801 && tree_int_cst_lt (maxobjsize, range[0]))
4802 {
4803 location_t loc = EXPR_LOCATION (exp);
4804 maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
4805 NULL_TREE, pad);
4806 return false;
4807 }
4808
4809 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4810 constant, and in range of unsigned HOST_WIDE_INT. */
4811 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
4812
4813 /* Next check the number of bytes to be written against the destination
4814 object size. */
4815 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
4816 {
4817 if (range[0]
4818 && TREE_CODE (range[0]) == INTEGER_CST
4819 && ((tree_fits_uhwi_p (dstsize)
4820 && tree_int_cst_lt (dstsize, range[0]))
4821 || (dstwrite
4822 && tree_fits_uhwi_p (dstwrite)
4823 && tree_int_cst_lt (dstwrite, range[0]))))
4824 {
4825 const opt_code opt = OPT_Wstringop_overflow_;
4826 if (warning_suppressed_p (exp, opt)
4827 || (pad && pad->dst.ref
4828 && warning_suppressed_p (pad->dst.ref, opt)))
4829 return false;
4830
4831 location_t loc = EXPR_LOCATION (exp);
4832 bool warned = false;
4833 if (dstwrite == slen && at_least_one)
4834 {
4835 /* This is a call to strcpy with a destination of 0 size
4836 and a source of unknown length. The call will write
4837 at least one byte past the end of the destination. */
4838 warned = (func
4839 ? warning_at (loc, opt,
4840 "%qD writing %E or more bytes into "
4841 "a region of size %E overflows "
4842 "the destination",
4843 func, range[0], dstsize)
4844 : warning_at (loc, opt,
4845 "writing %E or more bytes into "
4846 "a region of size %E overflows "
4847 "the destination",
4848 range[0], dstsize));
4849 }
4850 else
4851 {
4852 const bool read
4853 = mode == access_read_only || mode == access_read_write;
4854 const bool write
4855 = mode == access_write_only || mode == access_read_write;
4856 const bool maybe = pad && pad->dst.parmarray;
4857 warned = warn_for_access (loc, func, exp,
4858 OPT_Wstringop_overflow_,
4859 range, dstsize,
4860 write, read && !builtin, maybe);
4861 }
4862
4863 if (warned)
4864 {
4865 suppress_warning (exp, OPT_Wstringop_overflow_);
4866 if (pad)
4867 pad->dst.inform_access (pad->mode);
4868 }
4869
4870 /* Return error when an overflow has been detected. */
4871 return false;
4872 }
4873 }
4874
4875 /* Check the maximum length of the source sequence against the size
4876 of the destination object if known, or against the maximum size
4877 of an object. */
4878 if (maxread)
4879 {
4880 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4881 PAD is nonnull and BNDRNG is valid. */
4882 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4883
4884 location_t loc = EXPR_LOCATION (exp);
4885 tree size = dstsize;
4886 if (pad && pad->mode == access_read_only)
4887 size = wide_int_to_tree (sizetype, pad->src.sizrng[1]);
4888
4889 if (range[0] && maxread && tree_fits_uhwi_p (size))
4890 {
4891 if (tree_int_cst_lt (maxobjsize, range[0]))
4892 {
4893 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
4894 range, size, pad);
4895 return false;
4896 }
4897
4898 if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
4899 {
4900 opt_code opt = (dstwrite || mode != access_read_only
4901 ? OPT_Wstringop_overflow_
4902 : OPT_Wstringop_overread);
4903 maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
4904 return false;
4905 }
4906 }
4907
4908 maybe_warn_nonstring_arg (func, exp);
4909 }
4910
4911 /* Check for reading past the end of SRC. */
4912 bool overread = (slen
4913 && slen == srcstr
4914 && dstwrite
4915 && range[0]
4916 && TREE_CODE (slen) == INTEGER_CST
4917 && tree_int_cst_lt (slen, range[0]));
4918 /* If none is determined try to get a better answer based on the details
4919 in PAD. */
4920 if (!overread
4921 && pad
4922 && pad->src.sizrng[1] >= 0
4923 && pad->src.offrng[0] >= 0
4924 && (pad->src.offrng[1] < 0
4925 || pad->src.offrng[0] <= pad->src.offrng[1]))
4926 {
4927 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4928 PAD is nonnull and BNDRNG is valid. */
4929 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4930 /* Set OVERREAD for reads starting just past the end of an object. */
4931 overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4932 range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]);
4933 slen = size_zero_node;
4934 }
4935
4936 if (overread)
4937 {
4938 const opt_code opt = OPT_Wstringop_overread;
4939 if (warning_suppressed_p (exp, opt)
4940 || (srcstr && warning_suppressed_p (srcstr, opt))
4941 || (pad && pad->src.ref
4942 && warning_suppressed_p (pad->src.ref, opt)))
4943 return false;
4944
4945 location_t loc = EXPR_LOCATION (exp);
4946 const bool read
4947 = mode == access_read_only || mode == access_read_write;
4948 const bool maybe = pad && pad->dst.parmarray;
4949 if (warn_for_access (loc, func, exp, opt, range, slen, false, read,
4950 maybe))
4951 {
4952 suppress_warning (exp, opt);
4953 if (pad)
4954 pad->src.inform_access (access_read_only);
4955 }
4956 return false;
4957 }
4958
4959 return true;
4960 }
4961
4962 /* A convenience wrapper for check_access above to check access
4963 by a read-only function like puts. */
4964
4965 static bool
4966 check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4967 int ost /* = 1 */)
4968 {
4969 if (!warn_stringop_overread)
4970 return true;
4971
4972 if (bound && !useless_type_conversion_p (size_type_node, TREE_TYPE (bound)))
4973 bound = fold_convert (size_type_node, bound);
4974 access_data data (exp, access_read_only, NULL_TREE, false, bound, true);
4975 compute_objsize (src, ost, &data.src);
4976 return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound,
4977 /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode,
4978 &data);
4979 }
4980
4981 /* If STMT is a call to an allocation function, returns the constant
4982 maximum size of the object allocated by the call represented as
4983 sizetype. If nonnull, sets RNG1[] to the range of the size.
4984 When nonnull, uses RVALS for range information, otherwise gets global
4985 range info.
4986 Returns null when STMT is not a call to a valid allocation function. */
4987
4988 tree
4989 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
4990 range_query * /* = NULL */)
4991 {
4992 if (!stmt || !is_gimple_call (stmt))
4993 return NULL_TREE;
4994
4995 tree allocfntype;
4996 if (tree fndecl = gimple_call_fndecl (stmt))
4997 allocfntype = TREE_TYPE (fndecl);
4998 else
4999 allocfntype = gimple_call_fntype (stmt);
5000
5001 if (!allocfntype)
5002 return NULL_TREE;
5003
5004 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
5005 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
5006 if (!at)
5007 {
5008 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
5009 return NULL_TREE;
5010
5011 argidx1 = 0;
5012 }
5013
5014 unsigned nargs = gimple_call_num_args (stmt);
5015
5016 if (argidx1 == UINT_MAX)
5017 {
5018 tree atval = TREE_VALUE (at);
5019 if (!atval)
5020 return NULL_TREE;
5021
5022 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
5023 if (nargs <= argidx1)
5024 return NULL_TREE;
5025
5026 atval = TREE_CHAIN (atval);
5027 if (atval)
5028 {
5029 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
5030 if (nargs <= argidx2)
5031 return NULL_TREE;
5032 }
5033 }
5034
5035 tree size = gimple_call_arg (stmt, argidx1);
5036
5037 wide_int rng1_buf[2];
5038 /* If RNG1 is not set, use the buffer. */
5039 if (!rng1)
5040 rng1 = rng1_buf;
5041
5042 /* Use maximum precision to avoid overflow below. */
5043 const int prec = ADDR_MAX_PRECISION;
5044
5045 {
5046 tree r[2];
5047 /* Determine the largest valid range size, including zero. */
5048 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
5049 return NULL_TREE;
5050 rng1[0] = wi::to_wide (r[0], prec);
5051 rng1[1] = wi::to_wide (r[1], prec);
5052 }
5053
5054 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
5055 return fold_convert (sizetype, size);
5056
5057 /* To handle ranges do the math in wide_int and return the product
5058 of the upper bounds as a constant. Ignore anti-ranges. */
5059 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
5060 wide_int rng2[2];
5061 {
5062 tree r[2];
5063 /* As above, use the full non-negative range on failure. */
5064 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
5065 return NULL_TREE;
5066 rng2[0] = wi::to_wide (r[0], prec);
5067 rng2[1] = wi::to_wide (r[1], prec);
5068 }
5069
5070 /* Compute products of both bounds for the caller but return the lesser
5071 of SIZE_MAX and the product of the upper bounds as a constant. */
5072 rng1[0] = rng1[0] * rng2[0];
5073 rng1[1] = rng1[1] * rng2[1];
5074
5075 const tree size_max = TYPE_MAX_VALUE (sizetype);
5076 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
5077 {
5078 rng1[1] = wi::to_wide (size_max, prec);
5079 return size_max;
5080 }
5081
5082 return wide_int_to_tree (sizetype, rng1[1]);
5083 }
5084
5085 /* For an access to an object referenced to by the function parameter PTR
5086 of pointer type, and set RNG[] to the range of sizes of the object
5087 obtainedfrom the attribute access specification for the current function.
5088 Set STATIC_ARRAY if the array parameter has been declared [static].
5089 Return the function parameter on success and null otherwise. */
5090
5091 tree
5092 gimple_parm_array_size (tree ptr, wide_int rng[2],
5093 bool *static_array /* = NULL */)
5094 {
5095 /* For a function argument try to determine the byte size of the array
5096 from the current function declaratation (e.g., attribute access or
5097 related). */
5098 tree var = SSA_NAME_VAR (ptr);
5099 if (TREE_CODE (var) != PARM_DECL)
5100 return NULL_TREE;
5101
5102 const unsigned prec = TYPE_PRECISION (sizetype);
5103
5104 rdwr_map rdwr_idx;
5105 attr_access *access = get_parm_access (rdwr_idx, var);
5106 if (!access)
5107 return NULL_TREE;
5108
5109 if (access->sizarg != UINT_MAX)
5110 {
5111 /* TODO: Try to extract the range from the argument based on
5112 those of subsequent assertions or based on known calls to
5113 the current function. */
5114 return NULL_TREE;
5115 }
5116
5117 if (!access->minsize)
5118 return NULL_TREE;
5119
5120 /* Only consider ordinary array bound at level 2 (or above if it's
5121 ever added). */
5122 if (warn_array_parameter < 2 && !access->static_p)
5123 return NULL_TREE;
5124
5125 if (static_array)
5126 *static_array = access->static_p;
5127
5128 rng[0] = wi::zero (prec);
5129 rng[1] = wi::uhwi (access->minsize, prec);
5130 /* Multiply the array bound encoded in the attribute by the size
5131 of what the pointer argument to which it decays points to. */
5132 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
5133 tree size = TYPE_SIZE_UNIT (eltype);
5134 if (!size || TREE_CODE (size) != INTEGER_CST)
5135 return NULL_TREE;
5136
5137 rng[1] *= wi::to_wide (size, prec);
5138 return var;
5139 }
5140
5141 /* Wrapper around the wide_int overload of get_range that accepts
5142 offset_int instead. For middle end expressions returns the same
5143 result. For a subset of nonconstamt expressions emitted by the front
5144 end determines a more precise range than would be possible otherwise. */
5145
5146 static bool
5147 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
5148 {
5149 offset_int add = 0;
5150 if (TREE_CODE (x) == PLUS_EXPR)
5151 {
5152 /* Handle constant offsets in pointer addition expressions seen
5153 n the front end IL. */
5154 tree op = TREE_OPERAND (x, 1);
5155 if (TREE_CODE (op) == INTEGER_CST)
5156 {
5157 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
5158 add = wi::to_offset (op);
5159 x = TREE_OPERAND (x, 0);
5160 }
5161 }
5162
5163 if (TREE_CODE (x) == NOP_EXPR)
5164 /* Also handle conversions to sizetype seen in the front end IL. */
5165 x = TREE_OPERAND (x, 0);
5166
5167 tree type = TREE_TYPE (x);
5168 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
5169 return false;
5170
5171 if (TREE_CODE (x) != INTEGER_CST
5172 && TREE_CODE (x) != SSA_NAME)
5173 {
5174 if (TYPE_UNSIGNED (type)
5175 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
5176 type = signed_type_for (type);
5177
5178 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
5179 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
5180 return x;
5181 }
5182
5183 wide_int wr[2];
5184 if (!get_range (x, stmt, wr, rvals))
5185 return false;
5186
5187 signop sgn = SIGNED;
5188 /* Only convert signed integers or unsigned sizetype to a signed
5189 offset and avoid converting large positive values in narrower
5190 types to negative offsets. */
5191 if (TYPE_UNSIGNED (type)
5192 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
5193 sgn = UNSIGNED;
5194
5195 r[0] = offset_int::from (wr[0], sgn);
5196 r[1] = offset_int::from (wr[1], sgn);
5197 return true;
5198 }
5199
5200 /* Return the argument that the call STMT to a built-in function returns
5201 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
5202 from the argument reflected in the value returned by the built-in if it
5203 can be determined, otherwise to 0 and HWI_M1U respectively. */
5204
5205 static tree
5206 gimple_call_return_array (gimple *stmt, offset_int offrng[2],
5207 range_query *rvals)
5208 {
5209 {
5210 /* Check for attribute fn spec to see if the function returns one
5211 of its arguments. */
5212 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
5213 unsigned int argno;
5214 if (fnspec.returns_arg (&argno))
5215 {
5216 offrng[0] = offrng[1] = 0;
5217 return gimple_call_arg (stmt, argno);
5218 }
5219 }
5220
5221 if (gimple_call_num_args (stmt) < 1)
5222 return NULL_TREE;
5223
5224 tree fn = gimple_call_fndecl (stmt);
5225 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5226 {
5227 /* See if this is a call to placement new. */
5228 if (!fn
5229 || !DECL_IS_OPERATOR_NEW_P (fn)
5230 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
5231 return NULL_TREE;
5232
5233 /* Check the mangling, keeping in mind that operator new takes
5234 a size_t which could be unsigned int or unsigned long. */
5235 tree fname = DECL_ASSEMBLER_NAME (fn);
5236 if (!id_equal (fname, "_ZnwjPv") // ordinary form
5237 && !id_equal (fname, "_ZnwmPv") // ordinary form
5238 && !id_equal (fname, "_ZnajPv") // array form
5239 && !id_equal (fname, "_ZnamPv")) // array form
5240 return NULL_TREE;
5241
5242 if (gimple_call_num_args (stmt) != 2)
5243 return NULL_TREE;
5244
5245 offrng[0] = offrng[1] = 0;
5246 return gimple_call_arg (stmt, 1);
5247 }
5248
5249 switch (DECL_FUNCTION_CODE (fn))
5250 {
5251 case BUILT_IN_MEMCPY:
5252 case BUILT_IN_MEMCPY_CHK:
5253 case BUILT_IN_MEMMOVE:
5254 case BUILT_IN_MEMMOVE_CHK:
5255 case BUILT_IN_MEMSET:
5256 case BUILT_IN_STPCPY:
5257 case BUILT_IN_STPCPY_CHK:
5258 case BUILT_IN_STPNCPY:
5259 case BUILT_IN_STPNCPY_CHK:
5260 case BUILT_IN_STRCAT:
5261 case BUILT_IN_STRCAT_CHK:
5262 case BUILT_IN_STRCPY:
5263 case BUILT_IN_STRCPY_CHK:
5264 case BUILT_IN_STRNCAT:
5265 case BUILT_IN_STRNCAT_CHK:
5266 case BUILT_IN_STRNCPY:
5267 case BUILT_IN_STRNCPY_CHK:
5268 offrng[0] = offrng[1] = 0;
5269 return gimple_call_arg (stmt, 0);
5270
5271 case BUILT_IN_MEMPCPY:
5272 case BUILT_IN_MEMPCPY_CHK:
5273 {
5274 tree off = gimple_call_arg (stmt, 2);
5275 if (!get_offset_range (off, stmt, offrng, rvals))
5276 {
5277 offrng[0] = 0;
5278 offrng[1] = HOST_WIDE_INT_M1U;
5279 }
5280 return gimple_call_arg (stmt, 0);
5281 }
5282
5283 case BUILT_IN_MEMCHR:
5284 {
5285 tree off = gimple_call_arg (stmt, 2);
5286 if (get_offset_range (off, stmt, offrng, rvals))
5287 offrng[0] = 0;
5288 else
5289 {
5290 offrng[0] = 0;
5291 offrng[1] = HOST_WIDE_INT_M1U;
5292 }
5293 return gimple_call_arg (stmt, 0);
5294 }
5295
5296 case BUILT_IN_STRCHR:
5297 case BUILT_IN_STRRCHR:
5298 case BUILT_IN_STRSTR:
5299 {
5300 offrng[0] = 0;
5301 offrng[1] = HOST_WIDE_INT_M1U;
5302 }
5303 return gimple_call_arg (stmt, 0);
5304
5305 default:
5306 break;
5307 }
5308
5309 return NULL_TREE;
5310 }
5311
5312 /* A helper of compute_objsize_r() to determine the size from an assignment
5313 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. */
5314
5315 static bool
5316 handle_min_max_size (gimple *stmt, int ostype, access_ref *pref,
5317 ssa_name_limit_t &snlim, pointer_query *qry)
5318 {
5319 tree_code code = gimple_assign_rhs_code (stmt);
5320
5321 tree ptr = gimple_assign_rhs1 (stmt);
5322
5323 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
5324 Determine the size/offset of each and use the one with more or less
5325 space remaining, respectively. If either fails, use the information
5326 determined from the other instead, adjusted up or down as appropriate
5327 for the expression. */
5328 access_ref aref[2] = { *pref, *pref };
5329 if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry))
5330 {
5331 aref[0].base0 = false;
5332 aref[0].offrng[0] = aref[0].offrng[1] = 0;
5333 aref[0].add_max_offset ();
5334 aref[0].set_max_size_range ();
5335 }
5336
5337 ptr = gimple_assign_rhs2 (stmt);
5338 if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry))
5339 {
5340 aref[1].base0 = false;
5341 aref[1].offrng[0] = aref[1].offrng[1] = 0;
5342 aref[1].add_max_offset ();
5343 aref[1].set_max_size_range ();
5344 }
5345
5346 if (!aref[0].ref && !aref[1].ref)
5347 /* Fail if the identity of neither argument could be determined. */
5348 return false;
5349
5350 bool i0 = false;
5351 if (aref[0].ref && aref[0].base0)
5352 {
5353 if (aref[1].ref && aref[1].base0)
5354 {
5355 /* If the object referenced by both arguments has been determined
5356 set *PREF to the one with more or less space remainng, whichever
5357 is appopriate for CODE.
5358 TODO: Indicate when the objects are distinct so it can be
5359 diagnosed. */
5360 i0 = code == MAX_EXPR;
5361 const bool i1 = !i0;
5362
5363 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
5364 *pref = aref[i1];
5365 else
5366 *pref = aref[i0];
5367 return true;
5368 }
5369
5370 /* If only the object referenced by one of the arguments could be
5371 determined, use it and... */
5372 *pref = aref[0];
5373 i0 = true;
5374 }
5375 else
5376 *pref = aref[1];
5377
5378 const bool i1 = !i0;
5379 /* ...see if the offset obtained from the other pointer can be used
5380 to tighten up the bound on the offset obtained from the first. */
5381 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
5382 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
5383 {
5384 pref->offrng[0] = aref[i0].offrng[0];
5385 pref->offrng[1] = aref[i0].offrng[1];
5386 }
5387 return true;
5388 }
5389
5390 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
5391 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
5392 on success and false on failure. */
5393
5394 static bool
5395 handle_array_ref (tree aref, bool addr, int ostype, access_ref *pref,
5396 ssa_name_limit_t &snlim, pointer_query *qry)
5397 {
5398 gcc_assert (TREE_CODE (aref) == ARRAY_REF);
5399
5400 ++pref->deref;
5401
5402 tree arefop = TREE_OPERAND (aref, 0);
5403 tree reftype = TREE_TYPE (arefop);
5404 if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
5405 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
5406 of known bound. */
5407 return false;
5408
5409 if (!compute_objsize_r (arefop, ostype, pref, snlim, qry))
5410 return false;
5411
5412 offset_int orng[2];
5413 tree off = pref->eval (TREE_OPERAND (aref, 1));
5414 range_query *const rvals = qry ? qry->rvals : NULL;
5415 if (!get_offset_range (off, NULL, orng, rvals))
5416 {
5417 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
5418 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5419 orng[0] = -orng[1] - 1;
5420 }
5421
5422 /* Convert the array index range determined above to a byte
5423 offset. */
5424 tree lowbnd = array_ref_low_bound (aref);
5425 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
5426 {
5427 /* Adjust the index by the low bound of the array domain
5428 (normally zero but 1 in Fortran). */
5429 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
5430 orng[0] -= lb;
5431 orng[1] -= lb;
5432 }
5433
5434 tree eltype = TREE_TYPE (aref);
5435 tree tpsize = TYPE_SIZE_UNIT (eltype);
5436 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
5437 {
5438 pref->add_max_offset ();
5439 return true;
5440 }
5441
5442 offset_int sz = wi::to_offset (tpsize);
5443 orng[0] *= sz;
5444 orng[1] *= sz;
5445
5446 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
5447 {
5448 /* Except for the permissive raw memory functions which use
5449 the size of the whole object determined above, use the size
5450 of the referenced array. Because the overall offset is from
5451 the beginning of the complete array object add this overall
5452 offset to the size of array. */
5453 offset_int sizrng[2] =
5454 {
5455 pref->offrng[0] + orng[0] + sz,
5456 pref->offrng[1] + orng[1] + sz
5457 };
5458 if (sizrng[1] < sizrng[0])
5459 std::swap (sizrng[0], sizrng[1]);
5460 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
5461 pref->sizrng[0] = sizrng[0];
5462 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
5463 pref->sizrng[1] = sizrng[1];
5464 }
5465
5466 pref->add_offset (orng[0], orng[1]);
5467 return true;
5468 }
5469
5470 /* A helper of compute_objsize_r() to determine the size from MEM_REF
5471 MREF. Return true on success and false on failure. */
5472
5473 static bool
5474 handle_mem_ref (tree mref, int ostype, access_ref *pref,
5475 ssa_name_limit_t &snlim, pointer_query *qry)
5476 {
5477 gcc_assert (TREE_CODE (mref) == MEM_REF);
5478
5479 ++pref->deref;
5480
5481 if (VECTOR_TYPE_P (TREE_TYPE (mref)))
5482 {
5483 /* Hack: Handle MEM_REFs of vector types as those to complete
5484 objects; those may be synthesized from multiple assignments
5485 to consecutive data members (see PR 93200 and 96963).
5486 FIXME: Vectorized assignments should only be present after
5487 vectorization so this hack is only necessary after it has
5488 run and could be avoided in calls from prior passes (e.g.,
5489 tree-ssa-strlen.c).
5490 FIXME: Deal with this more generally, e.g., by marking up
5491 such MEM_REFs at the time they're created. */
5492 ostype = 0;
5493 }
5494
5495 tree mrefop = TREE_OPERAND (mref, 0);
5496 if (!compute_objsize_r (mrefop, ostype, pref, snlim, qry))
5497 return false;
5498
5499 offset_int orng[2];
5500 tree off = pref->eval (TREE_OPERAND (mref, 1));
5501 range_query *const rvals = qry ? qry->rvals : NULL;
5502 if (!get_offset_range (off, NULL, orng, rvals))
5503 {
5504 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
5505 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5506 orng[0] = -orng[1] - 1;
5507 }
5508
5509 pref->add_offset (orng[0], orng[1]);
5510 return true;
5511 }
5512
5513 /* Helper to compute the size of the object referenced by the PTR
5514 expression which must have pointer type, using Object Size type
5515 OSTYPE (only the least significant 2 bits are used).
5516 On success, sets PREF->REF to the DECL of the referenced object
5517 if it's unique, otherwise to null, PREF->OFFRNG to the range of
5518 offsets into it, and PREF->SIZRNG to the range of sizes of
5519 the object(s).
5520 SNLIM is used to avoid visiting the same PHI operand multiple
5521 times, and, when nonnull, RVALS to determine range information.
5522 Returns true on success, false when a meaningful size (or range)
5523 cannot be determined.
5524
5525 The function is intended for diagnostics and should not be used
5526 to influence code generation or optimization. */
5527
5528 static bool
5529 compute_objsize_r (tree ptr, int ostype, access_ref *pref,
5530 ssa_name_limit_t &snlim, pointer_query *qry)
5531 {
5532 STRIP_NOPS (ptr);
5533
5534 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
5535 if (addr)
5536 {
5537 --pref->deref;
5538 ptr = TREE_OPERAND (ptr, 0);
5539 }
5540
5541 if (DECL_P (ptr))
5542 {
5543 pref->ref = ptr;
5544
5545 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
5546 {
5547 /* Set the maximum size if the reference is to the pointer
5548 itself (as opposed to what it points to), and clear
5549 BASE0 since the offset isn't necessarily zero-based. */
5550 pref->set_max_size_range ();
5551 pref->base0 = false;
5552 return true;
5553 }
5554
5555 if (tree size = decl_init_size (ptr, false))
5556 if (TREE_CODE (size) == INTEGER_CST)
5557 {
5558 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5559 return true;
5560 }
5561
5562 pref->set_max_size_range ();
5563 return true;
5564 }
5565
5566 const tree_code code = TREE_CODE (ptr);
5567 range_query *const rvals = qry ? qry->rvals : NULL;
5568
5569 if (code == BIT_FIELD_REF)
5570 {
5571 tree ref = TREE_OPERAND (ptr, 0);
5572 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5573 return false;
5574
5575 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
5576 pref->add_offset (off / BITS_PER_UNIT);
5577 return true;
5578 }
5579
5580 if (code == COMPONENT_REF)
5581 {
5582 tree ref = TREE_OPERAND (ptr, 0);
5583 if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
5584 /* In accesses through union types consider the entire unions
5585 rather than just their members. */
5586 ostype = 0;
5587 tree field = TREE_OPERAND (ptr, 1);
5588
5589 if (ostype == 0)
5590 {
5591 /* In OSTYPE zero (for raw memory functions like memcpy), use
5592 the maximum size instead if the identity of the enclosing
5593 object cannot be determined. */
5594 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5595 return false;
5596
5597 /* Otherwise, use the size of the enclosing object and add
5598 the offset of the member to the offset computed so far. */
5599 tree offset = byte_position (field);
5600 if (TREE_CODE (offset) == INTEGER_CST)
5601 pref->add_offset (wi::to_offset (offset));
5602 else
5603 pref->add_max_offset ();
5604
5605 if (!pref->ref)
5606 /* REF may have been already set to an SSA_NAME earlier
5607 to provide better context for diagnostics. In that case,
5608 leave it unchanged. */
5609 pref->ref = ref;
5610 return true;
5611 }
5612
5613 pref->ref = field;
5614
5615 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
5616 {
5617 /* Set maximum size if the reference is to the pointer member
5618 itself (as opposed to what it points to). */
5619 pref->set_max_size_range ();
5620 return true;
5621 }
5622
5623 /* SAM is set for array members that might need special treatment. */
5624 special_array_member sam;
5625 tree size = component_ref_size (ptr, &sam);
5626 if (sam == special_array_member::int_0)
5627 pref->sizrng[0] = pref->sizrng[1] = 0;
5628 else if (!pref->trail1special && sam == special_array_member::trail_1)
5629 pref->sizrng[0] = pref->sizrng[1] = 1;
5630 else if (size && TREE_CODE (size) == INTEGER_CST)
5631 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5632 else
5633 {
5634 /* When the size of the member is unknown it's either a flexible
5635 array member or a trailing special array member (either zero
5636 length or one-element). Set the size to the maximum minus
5637 the constant size of the type. */
5638 pref->sizrng[0] = 0;
5639 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5640 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
5641 if (TREE_CODE (recsize) == INTEGER_CST)
5642 pref->sizrng[1] -= wi::to_offset (recsize);
5643 }
5644 return true;
5645 }
5646
5647 if (code == ARRAY_REF)
5648 return handle_array_ref (ptr, addr, ostype, pref, snlim, qry);
5649
5650 if (code == MEM_REF)
5651 return handle_mem_ref (ptr, ostype, pref, snlim, qry);
5652
5653 if (code == TARGET_MEM_REF)
5654 {
5655 tree ref = TREE_OPERAND (ptr, 0);
5656 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5657 return false;
5658
5659 /* TODO: Handle remaining operands. Until then, add maximum offset. */
5660 pref->ref = ptr;
5661 pref->add_max_offset ();
5662 return true;
5663 }
5664
5665 if (code == INTEGER_CST)
5666 {
5667 /* Pointer constants other than null are most likely the result
5668 of erroneous null pointer addition/subtraction. Set size to
5669 zero. For null pointers, set size to the maximum for now
5670 since those may be the result of jump threading. */
5671 if (integer_zerop (ptr))
5672 pref->set_max_size_range ();
5673 else
5674 pref->sizrng[0] = pref->sizrng[1] = 0;
5675 pref->ref = ptr;
5676
5677 return true;
5678 }
5679
5680 if (code == STRING_CST)
5681 {
5682 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
5683 pref->ref = ptr;
5684 return true;
5685 }
5686
5687 if (code == POINTER_PLUS_EXPR)
5688 {
5689 tree ref = TREE_OPERAND (ptr, 0);
5690 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5691 return false;
5692
5693 /* Clear DEREF since the offset is being applied to the target
5694 of the dereference. */
5695 pref->deref = 0;
5696
5697 offset_int orng[2];
5698 tree off = pref->eval (TREE_OPERAND (ptr, 1));
5699 if (get_offset_range (off, NULL, orng, rvals))
5700 pref->add_offset (orng[0], orng[1]);
5701 else
5702 pref->add_max_offset ();
5703 return true;
5704 }
5705
5706 if (code == VIEW_CONVERT_EXPR)
5707 {
5708 ptr = TREE_OPERAND (ptr, 0);
5709 return compute_objsize_r (ptr, ostype, pref, snlim, qry);
5710 }
5711
5712 if (code == SSA_NAME)
5713 {
5714 if (!snlim.next ())
5715 return false;
5716
5717 /* Only process an SSA_NAME if the recursion limit has not yet
5718 been reached. */
5719 if (qry)
5720 {
5721 if (++qry->depth)
5722 qry->max_depth = qry->depth;
5723 if (const access_ref *cache_ref = qry->get_ref (ptr))
5724 {
5725 /* If the pointer is in the cache set *PREF to what it refers
5726 to and return success. */
5727 *pref = *cache_ref;
5728 return true;
5729 }
5730 }
5731
5732 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
5733 if (is_gimple_call (stmt))
5734 {
5735 /* If STMT is a call to an allocation function get the size
5736 from its argument(s). If successful, also set *PREF->REF
5737 to PTR for the caller to include in diagnostics. */
5738 wide_int wr[2];
5739 if (gimple_call_alloc_size (stmt, wr, rvals))
5740 {
5741 pref->ref = ptr;
5742 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5743 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5744 /* Constrain both bounds to a valid size. */
5745 offset_int maxsize = wi::to_offset (max_object_size ());
5746 if (pref->sizrng[0] > maxsize)
5747 pref->sizrng[0] = maxsize;
5748 if (pref->sizrng[1] > maxsize)
5749 pref->sizrng[1] = maxsize;
5750 }
5751 else
5752 {
5753 /* For functions known to return one of their pointer arguments
5754 try to determine what the returned pointer points to, and on
5755 success add OFFRNG which was set to the offset added by
5756 the function (e.g., memchr) to the overall offset. */
5757 offset_int offrng[2];
5758 if (tree ret = gimple_call_return_array (stmt, offrng, rvals))
5759 {
5760 if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
5761 return false;
5762
5763 /* Cap OFFRNG[1] to at most the remaining size of
5764 the object. */
5765 offset_int remrng[2];
5766 remrng[1] = pref->size_remaining (remrng);
5767 if (remrng[1] < offrng[1])
5768 offrng[1] = remrng[1];
5769 pref->add_offset (offrng[0], offrng[1]);
5770 }
5771 else
5772 {
5773 /* For other calls that might return arbitrary pointers
5774 including into the middle of objects set the size
5775 range to maximum, clear PREF->BASE0, and also set
5776 PREF->REF to include in diagnostics. */
5777 pref->set_max_size_range ();
5778 pref->base0 = false;
5779 pref->ref = ptr;
5780 }
5781 }
5782 qry->put_ref (ptr, *pref);
5783 return true;
5784 }
5785
5786 if (gimple_nop_p (stmt))
5787 {
5788 /* For a function argument try to determine the byte size
5789 of the array from the current function declaratation
5790 (e.g., attribute access or related). */
5791 wide_int wr[2];
5792 bool static_array = false;
5793 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
5794 {
5795 pref->parmarray = !static_array;
5796 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5797 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5798 pref->ref = ref;
5799 qry->put_ref (ptr, *pref);
5800 return true;
5801 }
5802
5803 pref->set_max_size_range ();
5804 pref->base0 = false;
5805 pref->ref = ptr;
5806 qry->put_ref (ptr, *pref);
5807 return true;
5808 }
5809
5810 if (gimple_code (stmt) == GIMPLE_PHI)
5811 {
5812 pref->ref = ptr;
5813 access_ref phi_ref = *pref;
5814 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
5815 return false;
5816 *pref = phi_ref;
5817 pref->ref = ptr;
5818 qry->put_ref (ptr, *pref);
5819 return true;
5820 }
5821
5822 if (!is_gimple_assign (stmt))
5823 {
5824 /* Clear BASE0 since the assigned pointer might point into
5825 the middle of the object, set the maximum size range and,
5826 if the SSA_NAME refers to a function argumnent, set
5827 PREF->REF to it. */
5828 pref->base0 = false;
5829 pref->set_max_size_range ();
5830 pref->ref = ptr;
5831 return true;
5832 }
5833
5834 tree_code code = gimple_assign_rhs_code (stmt);
5835
5836 if (code == MAX_EXPR || code == MIN_EXPR)
5837 {
5838 if (!handle_min_max_size (stmt, ostype, pref, snlim, qry))
5839 return false;
5840 qry->put_ref (ptr, *pref);
5841 return true;
5842 }
5843
5844 tree rhs = gimple_assign_rhs1 (stmt);
5845
5846 if (code == ASSERT_EXPR)
5847 {
5848 rhs = TREE_OPERAND (rhs, 0);
5849 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
5850 }
5851
5852 if (code == POINTER_PLUS_EXPR
5853 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
5854 {
5855 /* Compute the size of the object first. */
5856 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
5857 return false;
5858
5859 offset_int orng[2];
5860 tree off = gimple_assign_rhs2 (stmt);
5861 if (get_offset_range (off, stmt, orng, rvals))
5862 pref->add_offset (orng[0], orng[1]);
5863 else
5864 pref->add_max_offset ();
5865 qry->put_ref (ptr, *pref);
5866 return true;
5867 }
5868
5869 if (code == ADDR_EXPR
5870 || code == SSA_NAME)
5871 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
5872
5873 /* (This could also be an assignment from a nonlocal pointer.) Save
5874 PTR to mention in diagnostics but otherwise treat it as a pointer
5875 to an unknown object. */
5876 pref->ref = rhs;
5877 pref->base0 = false;
5878 pref->set_max_size_range ();
5879 return true;
5880 }
5881
5882 /* Assume all other expressions point into an unknown object
5883 of the maximum valid size. */
5884 pref->ref = ptr;
5885 pref->base0 = false;
5886 pref->set_max_size_range ();
5887 if (TREE_CODE (ptr) == SSA_NAME)
5888 qry->put_ref (ptr, *pref);
5889 return true;
5890 }
5891
5892 /* A "public" wrapper around the above. Clients should use this overload
5893 instead. */
5894
5895 tree
5896 compute_objsize (tree ptr, int ostype, access_ref *pref,
5897 range_query *rvals /* = NULL */)
5898 {
5899 pointer_query qry;
5900 qry.rvals = rvals;
5901 ssa_name_limit_t snlim;
5902 if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
5903 return NULL_TREE;
5904
5905 offset_int maxsize = pref->size_remaining ();
5906 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5907 pref->offrng[0] = 0;
5908 return wide_int_to_tree (sizetype, maxsize);
5909 }
5910
5911 /* Transitional wrapper. The function should be removed once callers
5912 transition to the pointer_query API. */
5913
5914 tree
5915 compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
5916 {
5917 pointer_query qry;
5918 if (ptr_qry)
5919 ptr_qry->depth = 0;
5920 else
5921 ptr_qry = &qry;
5922
5923 ssa_name_limit_t snlim;
5924 if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
5925 return NULL_TREE;
5926
5927 offset_int maxsize = pref->size_remaining ();
5928 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5929 pref->offrng[0] = 0;
5930 return wide_int_to_tree (sizetype, maxsize);
5931 }
5932
5933 /* Legacy wrapper around the above. The function should be removed
5934 once callers transition to one of the two above. */
5935
5936 tree
5937 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
5938 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
5939 {
5940 /* Set the initial offsets to zero and size to negative to indicate
5941 none has been computed yet. */
5942 access_ref ref;
5943 tree size = compute_objsize (ptr, ostype, &ref, rvals);
5944 if (!size || !ref.base0)
5945 return NULL_TREE;
5946
5947 if (pdecl)
5948 *pdecl = ref.ref;
5949
5950 if (poff)
5951 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
5952
5953 return size;
5954 }
5955
5956 /* Helper to determine and check the sizes of the source and the destination
5957 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
5958 call expression, DEST is the destination argument, SRC is the source
5959 argument or null, and LEN is the number of bytes. Use Object Size type-0
5960 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5961 (no overflow or invalid sizes), false otherwise. */
5962
5963 static bool
5964 check_memop_access (tree exp, tree dest, tree src, tree size)
5965 {
5966 /* For functions like memset and memcpy that operate on raw memory
5967 try to determine the size of the largest source and destination
5968 object using type-0 Object Size regardless of the object size
5969 type specified by the option. */
5970 access_data data (exp, access_read_write);
5971 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
5972 tree dstsize = compute_objsize (dest, 0, &data.dst);
5973
5974 return check_access (exp, size, /*maxread=*/NULL_TREE,
5975 srcsize, dstsize, data.mode, &data);
5976 }
5977
5978 /* Validate memchr arguments without performing any expansion.
5979 Return NULL_RTX. */
5980
5981 static rtx
5982 expand_builtin_memchr (tree exp, rtx)
5983 {
5984 if (!validate_arglist (exp,
5985 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5986 return NULL_RTX;
5987
5988 tree arg1 = CALL_EXPR_ARG (exp, 0);
5989 tree len = CALL_EXPR_ARG (exp, 2);
5990
5991 check_read_access (exp, arg1, len, 0);
5992
5993 return NULL_RTX;
5994 }
5995
5996 /* Expand a call EXP to the memcpy builtin.
5997 Return NULL_RTX if we failed, the caller should emit a normal call,
5998 otherwise try to get the result in TARGET, if convenient (and in
5999 mode MODE if that's convenient). */
6000
6001 static rtx
6002 expand_builtin_memcpy (tree exp, rtx target)
6003 {
6004 if (!validate_arglist (exp,
6005 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6006 return NULL_RTX;
6007
6008 tree dest = CALL_EXPR_ARG (exp, 0);
6009 tree src = CALL_EXPR_ARG (exp, 1);
6010 tree len = CALL_EXPR_ARG (exp, 2);
6011
6012 check_memop_access (exp, dest, src, len);
6013
6014 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
6015 /*retmode=*/ RETURN_BEGIN, false);
6016 }
6017
6018 /* Check a call EXP to the memmove built-in for validity.
6019 Return NULL_RTX on both success and failure. */
6020
6021 static rtx
6022 expand_builtin_memmove (tree exp, rtx target)
6023 {
6024 if (!validate_arglist (exp,
6025 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6026 return NULL_RTX;
6027
6028 tree dest = CALL_EXPR_ARG (exp, 0);
6029 tree src = CALL_EXPR_ARG (exp, 1);
6030 tree len = CALL_EXPR_ARG (exp, 2);
6031
6032 check_memop_access (exp, dest, src, len);
6033
6034 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
6035 /*retmode=*/ RETURN_BEGIN, true);
6036 }
6037
6038 /* Expand a call EXP to the mempcpy builtin.
6039 Return NULL_RTX if we failed; the caller should emit a normal call,
6040 otherwise try to get the result in TARGET, if convenient (and in
6041 mode MODE if that's convenient). */
6042
6043 static rtx
6044 expand_builtin_mempcpy (tree exp, rtx target)
6045 {
6046 if (!validate_arglist (exp,
6047 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6048 return NULL_RTX;
6049
6050 tree dest = CALL_EXPR_ARG (exp, 0);
6051 tree src = CALL_EXPR_ARG (exp, 1);
6052 tree len = CALL_EXPR_ARG (exp, 2);
6053
6054 /* Policy does not generally allow using compute_objsize (which
6055 is used internally by check_memop_size) to change code generation
6056 or drive optimization decisions.
6057
6058 In this instance it is safe because the code we generate has
6059 the same semantics regardless of the return value of
6060 check_memop_sizes. Exactly the same amount of data is copied
6061 and the return value is exactly the same in both cases.
6062
6063 Furthermore, check_memop_size always uses mode 0 for the call to
6064 compute_objsize, so the imprecise nature of compute_objsize is
6065 avoided. */
6066
6067 /* Avoid expanding mempcpy into memcpy when the call is determined
6068 to overflow the buffer. This also prevents the same overflow
6069 from being diagnosed again when expanding memcpy. */
6070 if (!check_memop_access (exp, dest, src, len))
6071 return NULL_RTX;
6072
6073 return expand_builtin_mempcpy_args (dest, src, len,
6074 target, exp, /*retmode=*/ RETURN_END);
6075 }
6076
6077 /* Helper function to do the actual work for expand of memory copy family
6078 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
6079 of memory from SRC to DEST and assign to TARGET if convenient. Return
6080 value is based on RETMODE argument. */
6081
6082 static rtx
6083 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
6084 rtx target, tree exp, memop_ret retmode,
6085 bool might_overlap)
6086 {
6087 unsigned int src_align = get_pointer_alignment (src);
6088 unsigned int dest_align = get_pointer_alignment (dest);
6089 rtx dest_mem, src_mem, dest_addr, len_rtx;
6090 HOST_WIDE_INT expected_size = -1;
6091 unsigned int expected_align = 0;
6092 unsigned HOST_WIDE_INT min_size;
6093 unsigned HOST_WIDE_INT max_size;
6094 unsigned HOST_WIDE_INT probable_max_size;
6095
6096 bool is_move_done;
6097
6098 /* If DEST is not a pointer type, call the normal function. */
6099 if (dest_align == 0)
6100 return NULL_RTX;
6101
6102 /* If either SRC is not a pointer type, don't do this
6103 operation in-line. */
6104 if (src_align == 0)
6105 return NULL_RTX;
6106
6107 if (currently_expanding_gimple_stmt)
6108 stringop_block_profile (currently_expanding_gimple_stmt,
6109 &expected_align, &expected_size);
6110
6111 if (expected_align < dest_align)
6112 expected_align = dest_align;
6113 dest_mem = get_memory_rtx (dest, len);
6114 set_mem_align (dest_mem, dest_align);
6115 len_rtx = expand_normal (len);
6116 determine_block_size (len, len_rtx, &min_size, &max_size,
6117 &probable_max_size);
6118
6119 /* Try to get the byte representation of the constant SRC points to,
6120 with its byte size in NBYTES. */
6121 unsigned HOST_WIDE_INT nbytes;
6122 const char *rep = getbyterep (src, &nbytes);
6123
6124 /* If the function's constant bound LEN_RTX is less than or equal
6125 to the byte size of the representation of the constant argument,
6126 and if block move would be done by pieces, we can avoid loading
6127 the bytes from memory and only store the computed constant.
6128 This works in the overlap (memmove) case as well because
6129 store_by_pieces just generates a series of stores of constants
6130 from the representation returned by getbyterep(). */
6131 if (rep
6132 && CONST_INT_P (len_rtx)
6133 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
6134 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
6135 CONST_CAST (char *, rep),
6136 dest_align, false))
6137 {
6138 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
6139 builtin_memcpy_read_str,
6140 CONST_CAST (char *, rep),
6141 dest_align, false, retmode);
6142 dest_mem = force_operand (XEXP (dest_mem, 0), target);
6143 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6144 return dest_mem;
6145 }
6146
6147 src_mem = get_memory_rtx (src, len);
6148 set_mem_align (src_mem, src_align);
6149
6150 /* Copy word part most expediently. */
6151 enum block_op_methods method = BLOCK_OP_NORMAL;
6152 if (CALL_EXPR_TAILCALL (exp)
6153 && (retmode == RETURN_BEGIN || target == const0_rtx))
6154 method = BLOCK_OP_TAILCALL;
6155 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
6156 && retmode == RETURN_END
6157 && !might_overlap
6158 && target != const0_rtx);
6159 if (use_mempcpy_call)
6160 method = BLOCK_OP_NO_LIBCALL_RET;
6161 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
6162 expected_align, expected_size,
6163 min_size, max_size, probable_max_size,
6164 use_mempcpy_call, &is_move_done,
6165 might_overlap);
6166
6167 /* Bail out when a mempcpy call would be expanded as libcall and when
6168 we have a target that provides a fast implementation
6169 of mempcpy routine. */
6170 if (!is_move_done)
6171 return NULL_RTX;
6172
6173 if (dest_addr == pc_rtx)
6174 return NULL_RTX;
6175
6176 if (dest_addr == 0)
6177 {
6178 dest_addr = force_operand (XEXP (dest_mem, 0), target);
6179 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6180 }
6181
6182 if (retmode != RETURN_BEGIN && target != const0_rtx)
6183 {
6184 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
6185 /* stpcpy pointer to last byte. */
6186 if (retmode == RETURN_END_MINUS_ONE)
6187 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
6188 }
6189
6190 return dest_addr;
6191 }
6192
6193 static rtx
6194 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
6195 rtx target, tree orig_exp, memop_ret retmode)
6196 {
6197 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
6198 retmode, false);
6199 }
6200
6201 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
6202 we failed, the caller should emit a normal call, otherwise try to
6203 get the result in TARGET, if convenient.
6204 Return value is based on RETMODE argument. */
6205
6206 static rtx
6207 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
6208 {
6209 class expand_operand ops[3];
6210 rtx dest_mem;
6211 rtx src_mem;
6212
6213 if (!targetm.have_movstr ())
6214 return NULL_RTX;
6215
6216 dest_mem = get_memory_rtx (dest, NULL);
6217 src_mem = get_memory_rtx (src, NULL);
6218 if (retmode == RETURN_BEGIN)
6219 {
6220 target = force_reg (Pmode, XEXP (dest_mem, 0));
6221 dest_mem = replace_equiv_address (dest_mem, target);
6222 }
6223
6224 create_output_operand (&ops[0],
6225 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
6226 create_fixed_operand (&ops[1], dest_mem);
6227 create_fixed_operand (&ops[2], src_mem);
6228 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
6229 return NULL_RTX;
6230
6231 if (retmode != RETURN_BEGIN && target != const0_rtx)
6232 {
6233 target = ops[0].value;
6234 /* movstr is supposed to set end to the address of the NUL
6235 terminator. If the caller requested a mempcpy-like return value,
6236 adjust it. */
6237 if (retmode == RETURN_END)
6238 {
6239 rtx tem = plus_constant (GET_MODE (target),
6240 gen_lowpart (GET_MODE (target), target), 1);
6241 emit_move_insn (target, force_operand (tem, NULL_RTX));
6242 }
6243 }
6244 return target;
6245 }
6246
6247 /* Do some very basic size validation of a call to the strcpy builtin
6248 given by EXP. Return NULL_RTX to have the built-in expand to a call
6249 to the library function. */
6250
6251 static rtx
6252 expand_builtin_strcat (tree exp)
6253 {
6254 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
6255 || !warn_stringop_overflow)
6256 return NULL_RTX;
6257
6258 tree dest = CALL_EXPR_ARG (exp, 0);
6259 tree src = CALL_EXPR_ARG (exp, 1);
6260
6261 /* There is no way here to determine the length of the string in
6262 the destination to which the SRC string is being appended so
6263 just diagnose cases when the souce string is longer than
6264 the destination object. */
6265 access_data data (exp, access_read_write, NULL_TREE, true,
6266 NULL_TREE, true);
6267 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6268 compute_objsize (src, ost, &data.src);
6269 tree destsize = compute_objsize (dest, ost, &data.dst);
6270
6271 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6272 src, destsize, data.mode, &data);
6273
6274 return NULL_RTX;
6275 }
6276
6277 /* Expand expression EXP, which is a call to the strcpy builtin. Return
6278 NULL_RTX if we failed the caller should emit a normal call, otherwise
6279 try to get the result in TARGET, if convenient (and in mode MODE if that's
6280 convenient). */
6281
6282 static rtx
6283 expand_builtin_strcpy (tree exp, rtx target)
6284 {
6285 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6286 return NULL_RTX;
6287
6288 tree dest = CALL_EXPR_ARG (exp, 0);
6289 tree src = CALL_EXPR_ARG (exp, 1);
6290
6291 if (warn_stringop_overflow)
6292 {
6293 access_data data (exp, access_read_write, NULL_TREE, true,
6294 NULL_TREE, true);
6295 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6296 compute_objsize (src, ost, &data.src);
6297 tree dstsize = compute_objsize (dest, ost, &data.dst);
6298 check_access (exp, /*dstwrite=*/ NULL_TREE,
6299 /*maxread=*/ NULL_TREE, /*srcstr=*/ src,
6300 dstsize, data.mode, &data);
6301 }
6302
6303 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
6304 {
6305 /* Check to see if the argument was declared attribute nonstring
6306 and if so, issue a warning since at this point it's not known
6307 to be nul-terminated. */
6308 tree fndecl = get_callee_fndecl (exp);
6309 maybe_warn_nonstring_arg (fndecl, exp);
6310 return ret;
6311 }
6312
6313 return NULL_RTX;
6314 }
6315
6316 /* Helper function to do the actual work for expand_builtin_strcpy. The
6317 arguments to the builtin_strcpy call DEST and SRC are broken out
6318 so that this can also be called without constructing an actual CALL_EXPR.
6319 The other arguments and return value are the same as for
6320 expand_builtin_strcpy. */
6321
6322 static rtx
6323 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
6324 {
6325 /* Detect strcpy calls with unterminated arrays.. */
6326 tree size;
6327 bool exact;
6328 if (tree nonstr = unterminated_array (src, &size, &exact))
6329 {
6330 /* NONSTR refers to the non-nul terminated constant array. */
6331 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr,
6332 size, exact);
6333 return NULL_RTX;
6334 }
6335
6336 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
6337 }
6338
6339 /* Expand a call EXP to the stpcpy builtin.
6340 Return NULL_RTX if we failed the caller should emit a normal call,
6341 otherwise try to get the result in TARGET, if convenient (and in
6342 mode MODE if that's convenient). */
6343
6344 static rtx
6345 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
6346 {
6347 tree dst, src;
6348 location_t loc = EXPR_LOCATION (exp);
6349
6350 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6351 return NULL_RTX;
6352
6353 dst = CALL_EXPR_ARG (exp, 0);
6354 src = CALL_EXPR_ARG (exp, 1);
6355
6356 if (warn_stringop_overflow)
6357 {
6358 access_data data (exp, access_read_write);
6359 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
6360 &data.dst);
6361 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6362 src, destsize, data.mode, &data);
6363 }
6364
6365 /* If return value is ignored, transform stpcpy into strcpy. */
6366 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
6367 {
6368 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
6369 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
6370 return expand_expr (result, target, mode, EXPAND_NORMAL);
6371 }
6372 else
6373 {
6374 tree len, lenp1;
6375 rtx ret;
6376
6377 /* Ensure we get an actual string whose length can be evaluated at
6378 compile-time, not an expression containing a string. This is
6379 because the latter will potentially produce pessimized code
6380 when used to produce the return value. */
6381 c_strlen_data lendata = { };
6382 if (!c_getstr (src)
6383 || !(len = c_strlen (src, 0, &lendata, 1)))
6384 return expand_movstr (dst, src, target,
6385 /*retmode=*/ RETURN_END_MINUS_ONE);
6386
6387 if (lendata.decl)
6388 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl);
6389
6390 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
6391 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
6392 target, exp,
6393 /*retmode=*/ RETURN_END_MINUS_ONE);
6394
6395 if (ret)
6396 return ret;
6397
6398 if (TREE_CODE (len) == INTEGER_CST)
6399 {
6400 rtx len_rtx = expand_normal (len);
6401
6402 if (CONST_INT_P (len_rtx))
6403 {
6404 ret = expand_builtin_strcpy_args (exp, dst, src, target);
6405
6406 if (ret)
6407 {
6408 if (! target)
6409 {
6410 if (mode != VOIDmode)
6411 target = gen_reg_rtx (mode);
6412 else
6413 target = gen_reg_rtx (GET_MODE (ret));
6414 }
6415 if (GET_MODE (target) != GET_MODE (ret))
6416 ret = gen_lowpart (GET_MODE (target), ret);
6417
6418 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
6419 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
6420 gcc_assert (ret);
6421
6422 return target;
6423 }
6424 }
6425 }
6426
6427 return expand_movstr (dst, src, target,
6428 /*retmode=*/ RETURN_END_MINUS_ONE);
6429 }
6430 }
6431
6432 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
6433 arguments while being careful to avoid duplicate warnings (which could
6434 be issued if the expander were to expand the call, resulting in it
6435 being emitted in expand_call(). */
6436
6437 static rtx
6438 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
6439 {
6440 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
6441 {
6442 /* The call has been successfully expanded. Check for nonstring
6443 arguments and issue warnings as appropriate. */
6444 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6445 return ret;
6446 }
6447
6448 return NULL_RTX;
6449 }
6450
6451 /* Check a call EXP to the stpncpy built-in for validity.
6452 Return NULL_RTX on both success and failure. */
6453
6454 static rtx
6455 expand_builtin_stpncpy (tree exp, rtx)
6456 {
6457 if (!validate_arglist (exp,
6458 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6459 || !warn_stringop_overflow)
6460 return NULL_RTX;
6461
6462 /* The source and destination of the call. */
6463 tree dest = CALL_EXPR_ARG (exp, 0);
6464 tree src = CALL_EXPR_ARG (exp, 1);
6465
6466 /* The exact number of bytes to write (not the maximum). */
6467 tree len = CALL_EXPR_ARG (exp, 2);
6468 access_data data (exp, access_read_write);
6469 /* The size of the destination object. */
6470 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6471 check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data);
6472 return NULL_RTX;
6473 }
6474
6475 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6476 bytes from constant string DATA + OFFSET and return it as target
6477 constant. */
6478
6479 rtx
6480 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
6481 scalar_int_mode mode)
6482 {
6483 const char *str = (const char *) data;
6484
6485 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
6486 return const0_rtx;
6487
6488 return c_readstr (str + offset, mode);
6489 }
6490
6491 /* Helper to check the sizes of sequences and the destination of calls
6492 to __builtin_strncat and __builtin___strncat_chk. Returns true on
6493 success (no overflow or invalid sizes), false otherwise. */
6494
6495 static bool
6496 check_strncat_sizes (tree exp, tree objsize)
6497 {
6498 tree dest = CALL_EXPR_ARG (exp, 0);
6499 tree src = CALL_EXPR_ARG (exp, 1);
6500 tree maxread = CALL_EXPR_ARG (exp, 2);
6501
6502 /* Try to determine the range of lengths that the source expression
6503 refers to. */
6504 c_strlen_data lendata = { };
6505 get_range_strlen (src, &lendata, /* eltsize = */ 1);
6506
6507 /* Try to verify that the destination is big enough for the shortest
6508 string. */
6509
6510 access_data data (exp, access_read_write, maxread, true);
6511 if (!objsize && warn_stringop_overflow)
6512 {
6513 /* If it hasn't been provided by __strncat_chk, try to determine
6514 the size of the destination object into which the source is
6515 being copied. */
6516 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6517 }
6518
6519 /* Add one for the terminating nul. */
6520 tree srclen = (lendata.minlen
6521 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
6522 size_one_node)
6523 : NULL_TREE);
6524
6525 /* The strncat function copies at most MAXREAD bytes and always appends
6526 the terminating nul so the specified upper bound should never be equal
6527 to (or greater than) the size of the destination. */
6528 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
6529 && tree_int_cst_equal (objsize, maxread))
6530 {
6531 location_t loc = EXPR_LOCATION (exp);
6532 warning_at (loc, OPT_Wstringop_overflow_,
6533 "%qD specified bound %E equals destination size",
6534 get_callee_fndecl (exp), maxread);
6535
6536 return false;
6537 }
6538
6539 if (!srclen
6540 || (maxread && tree_fits_uhwi_p (maxread)
6541 && tree_fits_uhwi_p (srclen)
6542 && tree_int_cst_lt (maxread, srclen)))
6543 srclen = maxread;
6544
6545 /* The number of bytes to write is LEN but check_access will alsoa
6546 check SRCLEN if LEN's value isn't known. */
6547 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6548 objsize, data.mode, &data);
6549 }
6550
6551 /* Similar to expand_builtin_strcat, do some very basic size validation
6552 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
6553 the built-in expand to a call to the library function. */
6554
6555 static rtx
6556 expand_builtin_strncat (tree exp, rtx)
6557 {
6558 if (!validate_arglist (exp,
6559 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6560 || !warn_stringop_overflow)
6561 return NULL_RTX;
6562
6563 tree dest = CALL_EXPR_ARG (exp, 0);
6564 tree src = CALL_EXPR_ARG (exp, 1);
6565 /* The upper bound on the number of bytes to write. */
6566 tree maxread = CALL_EXPR_ARG (exp, 2);
6567
6568 /* Detect unterminated source (only). */
6569 if (!check_nul_terminated_array (exp, src, maxread))
6570 return NULL_RTX;
6571
6572 /* The length of the source sequence. */
6573 tree slen = c_strlen (src, 1);
6574
6575 /* Try to determine the range of lengths that the source expression
6576 refers to. Since the lengths are only used for warning and not
6577 for code generation disable strict mode below. */
6578 tree maxlen = slen;
6579 if (!maxlen)
6580 {
6581 c_strlen_data lendata = { };
6582 get_range_strlen (src, &lendata, /* eltsize = */ 1);
6583 maxlen = lendata.maxbound;
6584 }
6585
6586 access_data data (exp, access_read_write);
6587 /* Try to verify that the destination is big enough for the shortest
6588 string. First try to determine the size of the destination object
6589 into which the source is being copied. */
6590 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6591
6592 /* Add one for the terminating nul. */
6593 tree srclen = (maxlen
6594 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
6595 size_one_node)
6596 : NULL_TREE);
6597
6598 /* The strncat function copies at most MAXREAD bytes and always appends
6599 the terminating nul so the specified upper bound should never be equal
6600 to (or greater than) the size of the destination. */
6601 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
6602 && tree_int_cst_equal (destsize, maxread))
6603 {
6604 location_t loc = EXPR_LOCATION (exp);
6605 warning_at (loc, OPT_Wstringop_overflow_,
6606 "%qD specified bound %E equals destination size",
6607 get_callee_fndecl (exp), maxread);
6608
6609 return NULL_RTX;
6610 }
6611
6612 if (!srclen
6613 || (maxread && tree_fits_uhwi_p (maxread)
6614 && tree_fits_uhwi_p (srclen)
6615 && tree_int_cst_lt (maxread, srclen)))
6616 srclen = maxread;
6617
6618 check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6619 destsize, data.mode, &data);
6620 return NULL_RTX;
6621 }
6622
6623 /* Expand expression EXP, which is a call to the strncpy builtin. Return
6624 NULL_RTX if we failed the caller should emit a normal call. */
6625
6626 static rtx
6627 expand_builtin_strncpy (tree exp, rtx target)
6628 {
6629 location_t loc = EXPR_LOCATION (exp);
6630
6631 if (!validate_arglist (exp,
6632 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6633 return NULL_RTX;
6634 tree dest = CALL_EXPR_ARG (exp, 0);
6635 tree src = CALL_EXPR_ARG (exp, 1);
6636 /* The number of bytes to write (not the maximum). */
6637 tree len = CALL_EXPR_ARG (exp, 2);
6638
6639 /* The length of the source sequence. */
6640 tree slen = c_strlen (src, 1);
6641
6642 if (warn_stringop_overflow)
6643 {
6644 access_data data (exp, access_read_write, len, true, len, true);
6645 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6646 compute_objsize (src, ost, &data.src);
6647 tree dstsize = compute_objsize (dest, ost, &data.dst);
6648 /* The number of bytes to write is LEN but check_access will also
6649 check SLEN if LEN's value isn't known. */
6650 check_access (exp, /*dstwrite=*/len,
6651 /*maxread=*/len, src, dstsize, data.mode, &data);
6652 }
6653
6654 /* We must be passed a constant len and src parameter. */
6655 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
6656 return NULL_RTX;
6657
6658 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
6659
6660 /* We're required to pad with trailing zeros if the requested
6661 len is greater than strlen(s2)+1. In that case try to
6662 use store_by_pieces, if it fails, punt. */
6663 if (tree_int_cst_lt (slen, len))
6664 {
6665 unsigned int dest_align = get_pointer_alignment (dest);
6666 const char *p = c_getstr (src);
6667 rtx dest_mem;
6668
6669 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
6670 || !can_store_by_pieces (tree_to_uhwi (len),
6671 builtin_strncpy_read_str,
6672 CONST_CAST (char *, p),
6673 dest_align, false))
6674 return NULL_RTX;
6675
6676 dest_mem = get_memory_rtx (dest, len);
6677 store_by_pieces (dest_mem, tree_to_uhwi (len),
6678 builtin_strncpy_read_str,
6679 CONST_CAST (char *, p), dest_align, false,
6680 RETURN_BEGIN);
6681 dest_mem = force_operand (XEXP (dest_mem, 0), target);
6682 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6683 return dest_mem;
6684 }
6685
6686 return NULL_RTX;
6687 }
6688
6689 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6690 bytes from constant string DATA + OFFSET and return it as target
6691 constant. If PREV isn't nullptr, it has the RTL info from the
6692 previous iteration. */
6693
6694 rtx
6695 builtin_memset_read_str (void *data, void *prevp,
6696 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6697 scalar_int_mode mode)
6698 {
6699 by_pieces_prev *prev = (by_pieces_prev *) prevp;
6700 if (prev != nullptr && prev->data != nullptr)
6701 {
6702 /* Use the previous data in the same mode. */
6703 if (prev->mode == mode)
6704 return prev->data;
6705 }
6706
6707 const char *c = (const char *) data;
6708 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
6709
6710 memset (p, *c, GET_MODE_SIZE (mode));
6711
6712 return c_readstr (p, mode);
6713 }
6714
6715 /* Callback routine for store_by_pieces. Return the RTL of a register
6716 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
6717 char value given in the RTL register data. For example, if mode is
6718 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
6719 nullptr, it has the RTL info from the previous iteration. */
6720
6721 static rtx
6722 builtin_memset_gen_str (void *data, void *prevp,
6723 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6724 scalar_int_mode mode)
6725 {
6726 rtx target, coeff;
6727 size_t size;
6728 char *p;
6729
6730 by_pieces_prev *prev = (by_pieces_prev *) prevp;
6731 if (prev != nullptr && prev->data != nullptr)
6732 {
6733 /* Use the previous data in the same mode. */
6734 if (prev->mode == mode)
6735 return prev->data;
6736
6737 target = simplify_gen_subreg (mode, prev->data, prev->mode, 0);
6738 if (target != nullptr)
6739 return target;
6740 }
6741
6742 size = GET_MODE_SIZE (mode);
6743 if (size == 1)
6744 return (rtx) data;
6745
6746 p = XALLOCAVEC (char, size);
6747 memset (p, 1, size);
6748 coeff = c_readstr (p, mode);
6749
6750 target = convert_to_mode (mode, (rtx) data, 1);
6751 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
6752 return force_reg (mode, target);
6753 }
6754
6755 /* Expand expression EXP, which is a call to the memset builtin. Return
6756 NULL_RTX if we failed the caller should emit a normal call, otherwise
6757 try to get the result in TARGET, if convenient (and in mode MODE if that's
6758 convenient). */
6759
6760 static rtx
6761 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
6762 {
6763 if (!validate_arglist (exp,
6764 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
6765 return NULL_RTX;
6766
6767 tree dest = CALL_EXPR_ARG (exp, 0);
6768 tree val = CALL_EXPR_ARG (exp, 1);
6769 tree len = CALL_EXPR_ARG (exp, 2);
6770
6771 check_memop_access (exp, dest, NULL_TREE, len);
6772
6773 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
6774 }
6775
6776 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
6777 Return TRUE if successful, FALSE otherwise. TO is assumed to be
6778 aligned at an ALIGN-bits boundary. LEN must be a multiple of
6779 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
6780
6781 The strategy is to issue one store_by_pieces for each power of two,
6782 from most to least significant, guarded by a test on whether there
6783 are at least that many bytes left to copy in LEN.
6784
6785 ??? Should we skip some powers of two in favor of loops? Maybe start
6786 at the max of TO/LEN/word alignment, at least when optimizing for
6787 size, instead of ensuring O(log len) dynamic compares? */
6788
6789 bool
6790 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
6791 unsigned HOST_WIDE_INT min_len,
6792 unsigned HOST_WIDE_INT max_len,
6793 rtx val, char valc, unsigned int align)
6794 {
6795 int max_bits = floor_log2 (max_len);
6796 int min_bits = floor_log2 (min_len);
6797 int sctz_len = ctz_len;
6798
6799 gcc_checking_assert (sctz_len >= 0);
6800
6801 if (val)
6802 valc = 1;
6803
6804 /* Bits more significant than TST_BITS are part of the shared prefix
6805 in the binary representation of both min_len and max_len. Since
6806 they're identical, we don't need to test them in the loop. */
6807 int tst_bits = (max_bits != min_bits ? max_bits
6808 : floor_log2 (max_len ^ min_len));
6809
6810 /* Check whether it's profitable to start by storing a fixed BLKSIZE
6811 bytes, to lower max_bits. In the unlikely case of a constant LEN
6812 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
6813 single store_by_pieces, but otherwise, select the minimum multiple
6814 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
6815 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
6816 unsigned HOST_WIDE_INT blksize;
6817 if (max_len > min_len)
6818 {
6819 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
6820 align / BITS_PER_UNIT);
6821 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
6822 blksize &= ~(alrng - 1);
6823 }
6824 else if (max_len == min_len)
6825 blksize = max_len;
6826 else
6827 gcc_unreachable ();
6828 if (min_len >= blksize)
6829 {
6830 min_len -= blksize;
6831 min_bits = floor_log2 (min_len);
6832 max_len -= blksize;
6833 max_bits = floor_log2 (max_len);
6834
6835 tst_bits = (max_bits != min_bits ? max_bits
6836 : floor_log2 (max_len ^ min_len));
6837 }
6838 else
6839 blksize = 0;
6840
6841 /* Check that we can use store by pieces for the maximum store count
6842 we may issue (initial fixed-size block, plus conditional
6843 power-of-two-sized from max_bits to ctz_len. */
6844 unsigned HOST_WIDE_INT xlenest = blksize;
6845 if (max_bits >= 0)
6846 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
6847 - (HOST_WIDE_INT_1U << ctz_len));
6848 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
6849 &valc, align, true))
6850 return false;
6851
6852 rtx (*constfun) (void *, void *, HOST_WIDE_INT, scalar_int_mode);
6853 void *constfundata;
6854 if (val)
6855 {
6856 constfun = builtin_memset_gen_str;
6857 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
6858 val);
6859 }
6860 else
6861 {
6862 constfun = builtin_memset_read_str;
6863 constfundata = &valc;
6864 }
6865
6866 rtx ptr = copy_addr_to_reg (convert_to_mode (ptr_mode, XEXP (to, 0), 0));
6867 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
6868 to = replace_equiv_address (to, ptr);
6869 set_mem_align (to, align);
6870
6871 if (blksize)
6872 {
6873 to = store_by_pieces (to, blksize,
6874 constfun, constfundata,
6875 align, true,
6876 max_len != 0 ? RETURN_END : RETURN_BEGIN);
6877 if (max_len == 0)
6878 return true;
6879
6880 /* Adjust PTR, TO and REM. Since TO's address is likely
6881 PTR+offset, we have to replace it. */
6882 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
6883 to = replace_equiv_address (to, ptr);
6884 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
6885 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
6886 }
6887
6888 /* Iterate over power-of-two block sizes from the maximum length to
6889 the least significant bit possibly set in the length. */
6890 for (int i = max_bits; i >= sctz_len; i--)
6891 {
6892 rtx_code_label *label = NULL;
6893 blksize = HOST_WIDE_INT_1U << i;
6894
6895 /* If we're past the bits shared between min_ and max_len, expand
6896 a test on the dynamic length, comparing it with the
6897 BLKSIZE. */
6898 if (i <= tst_bits)
6899 {
6900 label = gen_label_rtx ();
6901 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
6902 ptr_mode, 1, label,
6903 profile_probability::even ());
6904 }
6905 /* If we are at a bit that is in the prefix shared by min_ and
6906 max_len, skip this BLKSIZE if the bit is clear. */
6907 else if ((max_len & blksize) == 0)
6908 continue;
6909
6910 /* Issue a store of BLKSIZE bytes. */
6911 to = store_by_pieces (to, blksize,
6912 constfun, constfundata,
6913 align, true,
6914 i != sctz_len ? RETURN_END : RETURN_BEGIN);
6915
6916 /* Adjust REM and PTR, unless this is the last iteration. */
6917 if (i != sctz_len)
6918 {
6919 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
6920 to = replace_equiv_address (to, ptr);
6921 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
6922 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
6923 }
6924
6925 if (label)
6926 {
6927 emit_label (label);
6928
6929 /* Given conditional stores, the offset can no longer be
6930 known, so clear it. */
6931 clear_mem_offset (to);
6932 }
6933 }
6934
6935 return true;
6936 }
6937
6938 /* Helper function to do the actual work for expand_builtin_memset. The
6939 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
6940 so that this can also be called without constructing an actual CALL_EXPR.
6941 The other arguments and return value are the same as for
6942 expand_builtin_memset. */
6943
6944 static rtx
6945 expand_builtin_memset_args (tree dest, tree val, tree len,
6946 rtx target, machine_mode mode, tree orig_exp)
6947 {
6948 tree fndecl, fn;
6949 enum built_in_function fcode;
6950 machine_mode val_mode;
6951 char c;
6952 unsigned int dest_align;
6953 rtx dest_mem, dest_addr, len_rtx;
6954 HOST_WIDE_INT expected_size = -1;
6955 unsigned int expected_align = 0;
6956 unsigned HOST_WIDE_INT min_size;
6957 unsigned HOST_WIDE_INT max_size;
6958 unsigned HOST_WIDE_INT probable_max_size;
6959
6960 dest_align = get_pointer_alignment (dest);
6961
6962 /* If DEST is not a pointer type, don't do this operation in-line. */
6963 if (dest_align == 0)
6964 return NULL_RTX;
6965
6966 if (currently_expanding_gimple_stmt)
6967 stringop_block_profile (currently_expanding_gimple_stmt,
6968 &expected_align, &expected_size);
6969
6970 if (expected_align < dest_align)
6971 expected_align = dest_align;
6972
6973 /* If the LEN parameter is zero, return DEST. */
6974 if (integer_zerop (len))
6975 {
6976 /* Evaluate and ignore VAL in case it has side-effects. */
6977 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
6978 return expand_expr (dest, target, mode, EXPAND_NORMAL);
6979 }
6980
6981 /* Stabilize the arguments in case we fail. */
6982 dest = builtin_save_expr (dest);
6983 val = builtin_save_expr (val);
6984 len = builtin_save_expr (len);
6985
6986 len_rtx = expand_normal (len);
6987 determine_block_size (len, len_rtx, &min_size, &max_size,
6988 &probable_max_size);
6989 dest_mem = get_memory_rtx (dest, len);
6990 val_mode = TYPE_MODE (unsigned_char_type_node);
6991
6992 if (TREE_CODE (val) != INTEGER_CST
6993 || target_char_cast (val, &c))
6994 {
6995 rtx val_rtx;
6996
6997 val_rtx = expand_normal (val);
6998 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
6999
7000 /* Assume that we can memset by pieces if we can store
7001 * the coefficients by pieces (in the required modes).
7002 * We can't pass builtin_memset_gen_str as that emits RTL. */
7003 c = 1;
7004 if (tree_fits_uhwi_p (len)
7005 && can_store_by_pieces (tree_to_uhwi (len),
7006 builtin_memset_read_str, &c, dest_align,
7007 true))
7008 {
7009 val_rtx = force_reg (val_mode, val_rtx);
7010 store_by_pieces (dest_mem, tree_to_uhwi (len),
7011 builtin_memset_gen_str, val_rtx, dest_align,
7012 true, RETURN_BEGIN);
7013 }
7014 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
7015 dest_align, expected_align,
7016 expected_size, min_size, max_size,
7017 probable_max_size)
7018 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
7019 tree_ctz (len),
7020 min_size, max_size,
7021 val_rtx, 0,
7022 dest_align))
7023 goto do_libcall;
7024
7025 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
7026 dest_mem = convert_memory_address (ptr_mode, dest_mem);
7027 return dest_mem;
7028 }
7029
7030 if (c)
7031 {
7032 if (tree_fits_uhwi_p (len)
7033 && can_store_by_pieces (tree_to_uhwi (len),
7034 builtin_memset_read_str, &c, dest_align,
7035 true))
7036 store_by_pieces (dest_mem, tree_to_uhwi (len),
7037 builtin_memset_read_str, &c, dest_align, true,
7038 RETURN_BEGIN);
7039 else if (!set_storage_via_setmem (dest_mem, len_rtx,
7040 gen_int_mode (c, val_mode),
7041 dest_align, expected_align,
7042 expected_size, min_size, max_size,
7043 probable_max_size)
7044 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
7045 tree_ctz (len),
7046 min_size, max_size,
7047 NULL_RTX, c,
7048 dest_align))
7049 goto do_libcall;
7050
7051 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
7052 dest_mem = convert_memory_address (ptr_mode, dest_mem);
7053 return dest_mem;
7054 }
7055
7056 set_mem_align (dest_mem, dest_align);
7057 dest_addr = clear_storage_hints (dest_mem, len_rtx,
7058 CALL_EXPR_TAILCALL (orig_exp)
7059 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
7060 expected_align, expected_size,
7061 min_size, max_size,
7062 probable_max_size, tree_ctz (len));
7063
7064 if (dest_addr == 0)
7065 {
7066 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
7067 dest_addr = convert_memory_address (ptr_mode, dest_addr);
7068 }
7069
7070 return dest_addr;
7071
7072 do_libcall:
7073 fndecl = get_callee_fndecl (orig_exp);
7074 fcode = DECL_FUNCTION_CODE (fndecl);
7075 if (fcode == BUILT_IN_MEMSET)
7076 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
7077 dest, val, len);
7078 else if (fcode == BUILT_IN_BZERO)
7079 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
7080 dest, len);
7081 else
7082 gcc_unreachable ();
7083 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
7084 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
7085 return expand_call (fn, target, target == const0_rtx);
7086 }
7087
7088 /* Expand expression EXP, which is a call to the bzero builtin. Return
7089 NULL_RTX if we failed the caller should emit a normal call. */
7090
7091 static rtx
7092 expand_builtin_bzero (tree exp)
7093 {
7094 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7095 return NULL_RTX;
7096
7097 tree dest = CALL_EXPR_ARG (exp, 0);
7098 tree size = CALL_EXPR_ARG (exp, 1);
7099
7100 check_memop_access (exp, dest, NULL_TREE, size);
7101
7102 /* New argument list transforming bzero(ptr x, int y) to
7103 memset(ptr x, int 0, size_t y). This is done this way
7104 so that if it isn't expanded inline, we fallback to
7105 calling bzero instead of memset. */
7106
7107 location_t loc = EXPR_LOCATION (exp);
7108
7109 return expand_builtin_memset_args (dest, integer_zero_node,
7110 fold_convert_loc (loc,
7111 size_type_node, size),
7112 const0_rtx, VOIDmode, exp);
7113 }
7114
7115 /* Try to expand cmpstr operation ICODE with the given operands.
7116 Return the result rtx on success, otherwise return null. */
7117
7118 static rtx
7119 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
7120 HOST_WIDE_INT align)
7121 {
7122 machine_mode insn_mode = insn_data[icode].operand[0].mode;
7123
7124 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
7125 target = NULL_RTX;
7126
7127 class expand_operand ops[4];
7128 create_output_operand (&ops[0], target, insn_mode);
7129 create_fixed_operand (&ops[1], arg1_rtx);
7130 create_fixed_operand (&ops[2], arg2_rtx);
7131 create_integer_operand (&ops[3], align);
7132 if (maybe_expand_insn (icode, 4, ops))
7133 return ops[0].value;
7134 return NULL_RTX;
7135 }
7136
7137 /* Expand expression EXP, which is a call to the memcmp built-in function.
7138 Return NULL_RTX if we failed and the caller should emit a normal call,
7139 otherwise try to get the result in TARGET, if convenient.
7140 RESULT_EQ is true if we can relax the returned value to be either zero
7141 or nonzero, without caring about the sign. */
7142
7143 static rtx
7144 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
7145 {
7146 if (!validate_arglist (exp,
7147 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7148 return NULL_RTX;
7149
7150 tree arg1 = CALL_EXPR_ARG (exp, 0);
7151 tree arg2 = CALL_EXPR_ARG (exp, 1);
7152 tree len = CALL_EXPR_ARG (exp, 2);
7153
7154 /* Diagnose calls where the specified length exceeds the size of either
7155 object. */
7156 if (!check_read_access (exp, arg1, len, 0)
7157 || !check_read_access (exp, arg2, len, 0))
7158 return NULL_RTX;
7159
7160 /* Due to the performance benefit, always inline the calls first
7161 when result_eq is false. */
7162 rtx result = NULL_RTX;
7163 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
7164 if (!result_eq && fcode != BUILT_IN_BCMP)
7165 {
7166 result = inline_expand_builtin_bytecmp (exp, target);
7167 if (result)
7168 return result;
7169 }
7170
7171 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7172 location_t loc = EXPR_LOCATION (exp);
7173
7174 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7175 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7176
7177 /* If we don't have POINTER_TYPE, call the function. */
7178 if (arg1_align == 0 || arg2_align == 0)
7179 return NULL_RTX;
7180
7181 rtx arg1_rtx = get_memory_rtx (arg1, len);
7182 rtx arg2_rtx = get_memory_rtx (arg2, len);
7183 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
7184
7185 /* Set MEM_SIZE as appropriate. */
7186 if (CONST_INT_P (len_rtx))
7187 {
7188 set_mem_size (arg1_rtx, INTVAL (len_rtx));
7189 set_mem_size (arg2_rtx, INTVAL (len_rtx));
7190 }
7191
7192 by_pieces_constfn constfn = NULL;
7193
7194 /* Try to get the byte representation of the constant ARG2 (or, only
7195 when the function's result is used for equality to zero, ARG1)
7196 points to, with its byte size in NBYTES. */
7197 unsigned HOST_WIDE_INT nbytes;
7198 const char *rep = getbyterep (arg2, &nbytes);
7199 if (result_eq && rep == NULL)
7200 {
7201 /* For equality to zero the arguments are interchangeable. */
7202 rep = getbyterep (arg1, &nbytes);
7203 if (rep != NULL)
7204 std::swap (arg1_rtx, arg2_rtx);
7205 }
7206
7207 /* If the function's constant bound LEN_RTX is less than or equal
7208 to the byte size of the representation of the constant argument,
7209 and if block move would be done by pieces, we can avoid loading
7210 the bytes from memory and only store the computed constant result. */
7211 if (rep
7212 && CONST_INT_P (len_rtx)
7213 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
7214 constfn = builtin_memcpy_read_str;
7215
7216 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
7217 TREE_TYPE (len), target,
7218 result_eq, constfn,
7219 CONST_CAST (char *, rep));
7220
7221 if (result)
7222 {
7223 /* Return the value in the proper mode for this function. */
7224 if (GET_MODE (result) == mode)
7225 return result;
7226
7227 if (target != 0)
7228 {
7229 convert_move (target, result, 0);
7230 return target;
7231 }
7232
7233 return convert_to_mode (mode, result, 0);
7234 }
7235
7236 return NULL_RTX;
7237 }
7238
7239 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
7240 if we failed the caller should emit a normal call, otherwise try to get
7241 the result in TARGET, if convenient. */
7242
7243 static rtx
7244 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
7245 {
7246 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7247 return NULL_RTX;
7248
7249 tree arg1 = CALL_EXPR_ARG (exp, 0);
7250 tree arg2 = CALL_EXPR_ARG (exp, 1);
7251
7252 if (!check_read_access (exp, arg1)
7253 || !check_read_access (exp, arg2))
7254 return NULL_RTX;
7255
7256 /* Due to the performance benefit, always inline the calls first. */
7257 rtx result = NULL_RTX;
7258 result = inline_expand_builtin_bytecmp (exp, target);
7259 if (result)
7260 return result;
7261
7262 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
7263 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
7264 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
7265 return NULL_RTX;
7266
7267 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7268 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7269
7270 /* If we don't have POINTER_TYPE, call the function. */
7271 if (arg1_align == 0 || arg2_align == 0)
7272 return NULL_RTX;
7273
7274 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
7275 arg1 = builtin_save_expr (arg1);
7276 arg2 = builtin_save_expr (arg2);
7277
7278 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
7279 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
7280
7281 /* Try to call cmpstrsi. */
7282 if (cmpstr_icode != CODE_FOR_nothing)
7283 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
7284 MIN (arg1_align, arg2_align));
7285
7286 /* Try to determine at least one length and call cmpstrnsi. */
7287 if (!result && cmpstrn_icode != CODE_FOR_nothing)
7288 {
7289 tree len;
7290 rtx arg3_rtx;
7291
7292 tree len1 = c_strlen (arg1, 1);
7293 tree len2 = c_strlen (arg2, 1);
7294
7295 if (len1)
7296 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
7297 if (len2)
7298 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
7299
7300 /* If we don't have a constant length for the first, use the length
7301 of the second, if we know it. We don't require a constant for
7302 this case; some cost analysis could be done if both are available
7303 but neither is constant. For now, assume they're equally cheap,
7304 unless one has side effects. If both strings have constant lengths,
7305 use the smaller. */
7306
7307 if (!len1)
7308 len = len2;
7309 else if (!len2)
7310 len = len1;
7311 else if (TREE_SIDE_EFFECTS (len1))
7312 len = len2;
7313 else if (TREE_SIDE_EFFECTS (len2))
7314 len = len1;
7315 else if (TREE_CODE (len1) != INTEGER_CST)
7316 len = len2;
7317 else if (TREE_CODE (len2) != INTEGER_CST)
7318 len = len1;
7319 else if (tree_int_cst_lt (len1, len2))
7320 len = len1;
7321 else
7322 len = len2;
7323
7324 /* If both arguments have side effects, we cannot optimize. */
7325 if (len && !TREE_SIDE_EFFECTS (len))
7326 {
7327 arg3_rtx = expand_normal (len);
7328 result = expand_cmpstrn_or_cmpmem
7329 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
7330 arg3_rtx, MIN (arg1_align, arg2_align));
7331 }
7332 }
7333
7334 tree fndecl = get_callee_fndecl (exp);
7335 if (result)
7336 {
7337 /* Check to see if the argument was declared attribute nonstring
7338 and if so, issue a warning since at this point it's not known
7339 to be nul-terminated. */
7340 maybe_warn_nonstring_arg (fndecl, exp);
7341
7342 /* Return the value in the proper mode for this function. */
7343 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7344 if (GET_MODE (result) == mode)
7345 return result;
7346 if (target == 0)
7347 return convert_to_mode (mode, result, 0);
7348 convert_move (target, result, 0);
7349 return target;
7350 }
7351
7352 /* Expand the library call ourselves using a stabilized argument
7353 list to avoid re-evaluating the function's arguments twice. */
7354 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
7355 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
7356 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
7357 return expand_call (fn, target, target == const0_rtx);
7358 }
7359
7360 /* Expand expression EXP, which is a call to the strncmp builtin. Return
7361 NULL_RTX if we failed the caller should emit a normal call, otherwise
7362 try to get the result in TARGET, if convenient. */
7363
7364 static rtx
7365 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
7366 ATTRIBUTE_UNUSED machine_mode mode)
7367 {
7368 if (!validate_arglist (exp,
7369 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7370 return NULL_RTX;
7371
7372 tree arg1 = CALL_EXPR_ARG (exp, 0);
7373 tree arg2 = CALL_EXPR_ARG (exp, 1);
7374 tree arg3 = CALL_EXPR_ARG (exp, 2);
7375
7376 if (!check_nul_terminated_array (exp, arg1, arg3)
7377 || !check_nul_terminated_array (exp, arg2, arg3))
7378 return NULL_RTX;
7379
7380 location_t loc = EXPR_LOCATION (exp);
7381 tree len1 = c_strlen (arg1, 1);
7382 tree len2 = c_strlen (arg2, 1);
7383
7384 if (!len1 || !len2)
7385 {
7386 /* Check to see if the argument was declared attribute nonstring
7387 and if so, issue a warning since at this point it's not known
7388 to be nul-terminated. */
7389 if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp)
7390 && !len1 && !len2)
7391 {
7392 /* A strncmp read is constrained not just by the bound but
7393 also by the length of the shorter string. Specifying
7394 a bound that's larger than the size of either array makes
7395 no sense and is likely a bug. When the length of neither
7396 of the two strings is known but the sizes of both of
7397 the arrays they are stored in is, issue a warning if
7398 the bound is larger than than the size of the larger
7399 of the two arrays. */
7400
7401 access_ref ref1 (arg3, true);
7402 access_ref ref2 (arg3, true);
7403
7404 tree bndrng[2] = { NULL_TREE, NULL_TREE };
7405 get_size_range (arg3, bndrng, ref1.bndrng);
7406
7407 tree size1 = compute_objsize (arg1, 1, &ref1);
7408 tree size2 = compute_objsize (arg2, 1, &ref2);
7409 tree func = get_callee_fndecl (exp);
7410
7411 if (size1 && size2 && bndrng[0] && !integer_zerop (bndrng[0]))
7412 {
7413 offset_int rem1 = ref1.size_remaining ();
7414 offset_int rem2 = ref2.size_remaining ();
7415 if (rem1 == 0 || rem2 == 0)
7416 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7417 bndrng, integer_zero_node);
7418 else
7419 {
7420 offset_int maxrem = wi::max (rem1, rem2, UNSIGNED);
7421 if (maxrem < wi::to_offset (bndrng[0]))
7422 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp,
7423 func, bndrng,
7424 wide_int_to_tree (sizetype, maxrem));
7425 }
7426 }
7427 else if (bndrng[0]
7428 && !integer_zerop (bndrng[0])
7429 && ((size1 && integer_zerop (size1))
7430 || (size2 && integer_zerop (size2))))
7431 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7432 bndrng, integer_zero_node);
7433 }
7434 }
7435
7436 /* Due to the performance benefit, always inline the calls first. */
7437 rtx result = NULL_RTX;
7438 result = inline_expand_builtin_bytecmp (exp, target);
7439 if (result)
7440 return result;
7441
7442 /* If c_strlen can determine an expression for one of the string
7443 lengths, and it doesn't have side effects, then emit cmpstrnsi
7444 using length MIN(strlen(string)+1, arg3). */
7445 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
7446 if (cmpstrn_icode == CODE_FOR_nothing)
7447 return NULL_RTX;
7448
7449 tree len;
7450
7451 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7452 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7453
7454 if (len1)
7455 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7456 if (len2)
7457 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7458
7459 tree len3 = fold_convert_loc (loc, sizetype, arg3);
7460
7461 /* If we don't have a constant length for the first, use the length
7462 of the second, if we know it. If neither string is constant length,
7463 use the given length argument. We don't require a constant for
7464 this case; some cost analysis could be done if both are available
7465 but neither is constant. For now, assume they're equally cheap,
7466 unless one has side effects. If both strings have constant lengths,
7467 use the smaller. */
7468
7469 if (!len1 && !len2)
7470 len = len3;
7471 else if (!len1)
7472 len = len2;
7473 else if (!len2)
7474 len = len1;
7475 else if (TREE_SIDE_EFFECTS (len1))
7476 len = len2;
7477 else if (TREE_SIDE_EFFECTS (len2))
7478 len = len1;
7479 else if (TREE_CODE (len1) != INTEGER_CST)
7480 len = len2;
7481 else if (TREE_CODE (len2) != INTEGER_CST)
7482 len = len1;
7483 else if (tree_int_cst_lt (len1, len2))
7484 len = len1;
7485 else
7486 len = len2;
7487
7488 /* If we are not using the given length, we must incorporate it here.
7489 The actual new length parameter will be MIN(len,arg3) in this case. */
7490 if (len != len3)
7491 {
7492 len = fold_convert_loc (loc, sizetype, len);
7493 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
7494 }
7495 rtx arg1_rtx = get_memory_rtx (arg1, len);
7496 rtx arg2_rtx = get_memory_rtx (arg2, len);
7497 rtx arg3_rtx = expand_normal (len);
7498 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
7499 arg2_rtx, TREE_TYPE (len), arg3_rtx,
7500 MIN (arg1_align, arg2_align));
7501
7502 tree fndecl = get_callee_fndecl (exp);
7503 if (result)
7504 {
7505 /* Return the value in the proper mode for this function. */
7506 mode = TYPE_MODE (TREE_TYPE (exp));
7507 if (GET_MODE (result) == mode)
7508 return result;
7509 if (target == 0)
7510 return convert_to_mode (mode, result, 0);
7511 convert_move (target, result, 0);
7512 return target;
7513 }
7514
7515 /* Expand the library call ourselves using a stabilized argument
7516 list to avoid re-evaluating the function's arguments twice. */
7517 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
7518 copy_warning (call, exp);
7519 gcc_assert (TREE_CODE (call) == CALL_EXPR);
7520 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
7521 return expand_call (call, target, target == const0_rtx);
7522 }
7523
7524 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
7525 if that's convenient. */
7526
7527 rtx
7528 expand_builtin_saveregs (void)
7529 {
7530 rtx val;
7531 rtx_insn *seq;
7532
7533 /* Don't do __builtin_saveregs more than once in a function.
7534 Save the result of the first call and reuse it. */
7535 if (saveregs_value != 0)
7536 return saveregs_value;
7537
7538 /* When this function is called, it means that registers must be
7539 saved on entry to this function. So we migrate the call to the
7540 first insn of this function. */
7541
7542 start_sequence ();
7543
7544 /* Do whatever the machine needs done in this case. */
7545 val = targetm.calls.expand_builtin_saveregs ();
7546
7547 seq = get_insns ();
7548 end_sequence ();
7549
7550 saveregs_value = val;
7551
7552 /* Put the insns after the NOTE that starts the function. If this
7553 is inside a start_sequence, make the outer-level insn chain current, so
7554 the code is placed at the start of the function. */
7555 push_topmost_sequence ();
7556 emit_insn_after (seq, entry_of_function ());
7557 pop_topmost_sequence ();
7558
7559 return val;
7560 }
7561
7562 /* Expand a call to __builtin_next_arg. */
7563
7564 static rtx
7565 expand_builtin_next_arg (void)
7566 {
7567 /* Checking arguments is already done in fold_builtin_next_arg
7568 that must be called before this function. */
7569 return expand_binop (ptr_mode, add_optab,
7570 crtl->args.internal_arg_pointer,
7571 crtl->args.arg_offset_rtx,
7572 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7573 }
7574
7575 /* Make it easier for the backends by protecting the valist argument
7576 from multiple evaluations. */
7577
7578 static tree
7579 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
7580 {
7581 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
7582
7583 /* The current way of determining the type of valist is completely
7584 bogus. We should have the information on the va builtin instead. */
7585 if (!vatype)
7586 vatype = targetm.fn_abi_va_list (cfun->decl);
7587
7588 if (TREE_CODE (vatype) == ARRAY_TYPE)
7589 {
7590 if (TREE_SIDE_EFFECTS (valist))
7591 valist = save_expr (valist);
7592
7593 /* For this case, the backends will be expecting a pointer to
7594 vatype, but it's possible we've actually been given an array
7595 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
7596 So fix it. */
7597 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
7598 {
7599 tree p1 = build_pointer_type (TREE_TYPE (vatype));
7600 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
7601 }
7602 }
7603 else
7604 {
7605 tree pt = build_pointer_type (vatype);
7606
7607 if (! needs_lvalue)
7608 {
7609 if (! TREE_SIDE_EFFECTS (valist))
7610 return valist;
7611
7612 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
7613 TREE_SIDE_EFFECTS (valist) = 1;
7614 }
7615
7616 if (TREE_SIDE_EFFECTS (valist))
7617 valist = save_expr (valist);
7618 valist = fold_build2_loc (loc, MEM_REF,
7619 vatype, valist, build_int_cst (pt, 0));
7620 }
7621
7622 return valist;
7623 }
7624
7625 /* The "standard" definition of va_list is void*. */
7626
7627 tree
7628 std_build_builtin_va_list (void)
7629 {
7630 return ptr_type_node;
7631 }
7632
7633 /* The "standard" abi va_list is va_list_type_node. */
7634
7635 tree
7636 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
7637 {
7638 return va_list_type_node;
7639 }
7640
7641 /* The "standard" type of va_list is va_list_type_node. */
7642
7643 tree
7644 std_canonical_va_list_type (tree type)
7645 {
7646 tree wtype, htype;
7647
7648 wtype = va_list_type_node;
7649 htype = type;
7650
7651 if (TREE_CODE (wtype) == ARRAY_TYPE)
7652 {
7653 /* If va_list is an array type, the argument may have decayed
7654 to a pointer type, e.g. by being passed to another function.
7655 In that case, unwrap both types so that we can compare the
7656 underlying records. */
7657 if (TREE_CODE (htype) == ARRAY_TYPE
7658 || POINTER_TYPE_P (htype))
7659 {
7660 wtype = TREE_TYPE (wtype);
7661 htype = TREE_TYPE (htype);
7662 }
7663 }
7664 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
7665 return va_list_type_node;
7666
7667 return NULL_TREE;
7668 }
7669
7670 /* The "standard" implementation of va_start: just assign `nextarg' to
7671 the variable. */
7672
7673 void
7674 std_expand_builtin_va_start (tree valist, rtx nextarg)
7675 {
7676 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
7677 convert_move (va_r, nextarg, 0);
7678 }
7679
7680 /* Expand EXP, a call to __builtin_va_start. */
7681
7682 static rtx
7683 expand_builtin_va_start (tree exp)
7684 {
7685 rtx nextarg;
7686 tree valist;
7687 location_t loc = EXPR_LOCATION (exp);
7688
7689 if (call_expr_nargs (exp) < 2)
7690 {
7691 error_at (loc, "too few arguments to function %<va_start%>");
7692 return const0_rtx;
7693 }
7694
7695 if (fold_builtin_next_arg (exp, true))
7696 return const0_rtx;
7697
7698 nextarg = expand_builtin_next_arg ();
7699 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
7700
7701 if (targetm.expand_builtin_va_start)
7702 targetm.expand_builtin_va_start (valist, nextarg);
7703 else
7704 std_expand_builtin_va_start (valist, nextarg);
7705
7706 return const0_rtx;
7707 }
7708
7709 /* Expand EXP, a call to __builtin_va_end. */
7710
7711 static rtx
7712 expand_builtin_va_end (tree exp)
7713 {
7714 tree valist = CALL_EXPR_ARG (exp, 0);
7715
7716 /* Evaluate for side effects, if needed. I hate macros that don't
7717 do that. */
7718 if (TREE_SIDE_EFFECTS (valist))
7719 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
7720
7721 return const0_rtx;
7722 }
7723
7724 /* Expand EXP, a call to __builtin_va_copy. We do this as a
7725 builtin rather than just as an assignment in stdarg.h because of the
7726 nastiness of array-type va_list types. */
7727
7728 static rtx
7729 expand_builtin_va_copy (tree exp)
7730 {
7731 tree dst, src, t;
7732 location_t loc = EXPR_LOCATION (exp);
7733
7734 dst = CALL_EXPR_ARG (exp, 0);
7735 src = CALL_EXPR_ARG (exp, 1);
7736
7737 dst = stabilize_va_list_loc (loc, dst, 1);
7738 src = stabilize_va_list_loc (loc, src, 0);
7739
7740 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
7741
7742 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
7743 {
7744 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
7745 TREE_SIDE_EFFECTS (t) = 1;
7746 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7747 }
7748 else
7749 {
7750 rtx dstb, srcb, size;
7751
7752 /* Evaluate to pointers. */
7753 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
7754 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
7755 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
7756 NULL_RTX, VOIDmode, EXPAND_NORMAL);
7757
7758 dstb = convert_memory_address (Pmode, dstb);
7759 srcb = convert_memory_address (Pmode, srcb);
7760
7761 /* "Dereference" to BLKmode memories. */
7762 dstb = gen_rtx_MEM (BLKmode, dstb);
7763 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
7764 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7765 srcb = gen_rtx_MEM (BLKmode, srcb);
7766 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
7767 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7768
7769 /* Copy. */
7770 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
7771 }
7772
7773 return const0_rtx;
7774 }
7775
7776 /* Expand a call to one of the builtin functions __builtin_frame_address or
7777 __builtin_return_address. */
7778
7779 static rtx
7780 expand_builtin_frame_address (tree fndecl, tree exp)
7781 {
7782 /* The argument must be a nonnegative integer constant.
7783 It counts the number of frames to scan up the stack.
7784 The value is either the frame pointer value or the return
7785 address saved in that frame. */
7786 if (call_expr_nargs (exp) == 0)
7787 /* Warning about missing arg was already issued. */
7788 return const0_rtx;
7789 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
7790 {
7791 error ("invalid argument to %qD", fndecl);
7792 return const0_rtx;
7793 }
7794 else
7795 {
7796 /* Number of frames to scan up the stack. */
7797 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
7798
7799 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
7800
7801 /* Some ports cannot access arbitrary stack frames. */
7802 if (tem == NULL)
7803 {
7804 warning (0, "unsupported argument to %qD", fndecl);
7805 return const0_rtx;
7806 }
7807
7808 if (count)
7809 {
7810 /* Warn since no effort is made to ensure that any frame
7811 beyond the current one exists or can be safely reached. */
7812 warning (OPT_Wframe_address, "calling %qD with "
7813 "a nonzero argument is unsafe", fndecl);
7814 }
7815
7816 /* For __builtin_frame_address, return what we've got. */
7817 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7818 return tem;
7819
7820 if (!REG_P (tem)
7821 && ! CONSTANT_P (tem))
7822 tem = copy_addr_to_reg (tem);
7823 return tem;
7824 }
7825 }
7826
7827 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
7828 failed and the caller should emit a normal call. */
7829
7830 static rtx
7831 expand_builtin_alloca (tree exp)
7832 {
7833 rtx op0;
7834 rtx result;
7835 unsigned int align;
7836 tree fndecl = get_callee_fndecl (exp);
7837 HOST_WIDE_INT max_size;
7838 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7839 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
7840 bool valid_arglist
7841 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7842 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
7843 VOID_TYPE)
7844 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
7845 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
7846 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
7847
7848 if (!valid_arglist)
7849 return NULL_RTX;
7850
7851 if ((alloca_for_var
7852 && warn_vla_limit >= HOST_WIDE_INT_MAX
7853 && warn_alloc_size_limit < warn_vla_limit)
7854 || (!alloca_for_var
7855 && warn_alloca_limit >= HOST_WIDE_INT_MAX
7856 && warn_alloc_size_limit < warn_alloca_limit
7857 ))
7858 {
7859 /* -Walloca-larger-than and -Wvla-larger-than settings of
7860 less than HOST_WIDE_INT_MAX override the more general
7861 -Walloc-size-larger-than so unless either of the former
7862 options is smaller than the last one (wchich would imply
7863 that the call was already checked), check the alloca
7864 arguments for overflow. */
7865 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
7866 int idx[] = { 0, -1 };
7867 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
7868 }
7869
7870 /* Compute the argument. */
7871 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
7872
7873 /* Compute the alignment. */
7874 align = (fcode == BUILT_IN_ALLOCA
7875 ? BIGGEST_ALIGNMENT
7876 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
7877
7878 /* Compute the maximum size. */
7879 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7880 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
7881 : -1);
7882
7883 /* Allocate the desired space. If the allocation stems from the declaration
7884 of a variable-sized object, it cannot accumulate. */
7885 result
7886 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
7887 result = convert_memory_address (ptr_mode, result);
7888
7889 /* Dynamic allocations for variables are recorded during gimplification. */
7890 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
7891 record_dynamic_alloc (exp);
7892
7893 return result;
7894 }
7895
7896 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
7897 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
7898 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
7899 handle_builtin_stack_restore function. */
7900
7901 static rtx
7902 expand_asan_emit_allocas_unpoison (tree exp)
7903 {
7904 tree arg0 = CALL_EXPR_ARG (exp, 0);
7905 tree arg1 = CALL_EXPR_ARG (exp, 1);
7906 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7907 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7908 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
7909 stack_pointer_rtx, NULL_RTX, 0,
7910 OPTAB_LIB_WIDEN);
7911 off = convert_modes (ptr_mode, Pmode, off, 0);
7912 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
7913 OPTAB_LIB_WIDEN);
7914 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
7915 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
7916 top, ptr_mode, bot, ptr_mode);
7917 return ret;
7918 }
7919
7920 /* Expand a call to bswap builtin in EXP.
7921 Return NULL_RTX if a normal call should be emitted rather than expanding the
7922 function in-line. If convenient, the result should be placed in TARGET.
7923 SUBTARGET may be used as the target for computing one of EXP's operands. */
7924
7925 static rtx
7926 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
7927 rtx subtarget)
7928 {
7929 tree arg;
7930 rtx op0;
7931
7932 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7933 return NULL_RTX;
7934
7935 arg = CALL_EXPR_ARG (exp, 0);
7936 op0 = expand_expr (arg,
7937 subtarget && GET_MODE (subtarget) == target_mode
7938 ? subtarget : NULL_RTX,
7939 target_mode, EXPAND_NORMAL);
7940 if (GET_MODE (op0) != target_mode)
7941 op0 = convert_to_mode (target_mode, op0, 1);
7942
7943 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
7944
7945 gcc_assert (target);
7946
7947 return convert_to_mode (target_mode, target, 1);
7948 }
7949
7950 /* Expand a call to a unary builtin in EXP.
7951 Return NULL_RTX if a normal call should be emitted rather than expanding the
7952 function in-line. If convenient, the result should be placed in TARGET.
7953 SUBTARGET may be used as the target for computing one of EXP's operands. */
7954
7955 static rtx
7956 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
7957 rtx subtarget, optab op_optab)
7958 {
7959 rtx op0;
7960
7961 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7962 return NULL_RTX;
7963
7964 /* Compute the argument. */
7965 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
7966 (subtarget
7967 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
7968 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
7969 VOIDmode, EXPAND_NORMAL);
7970 /* Compute op, into TARGET if possible.
7971 Set TARGET to wherever the result comes back. */
7972 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
7973 op_optab, op0, target, op_optab != clrsb_optab);
7974 gcc_assert (target);
7975
7976 return convert_to_mode (target_mode, target, 0);
7977 }
7978
7979 /* Expand a call to __builtin_expect. We just return our argument
7980 as the builtin_expect semantic should've been already executed by
7981 tree branch prediction pass. */
7982
7983 static rtx
7984 expand_builtin_expect (tree exp, rtx target)
7985 {
7986 tree arg;
7987
7988 if (call_expr_nargs (exp) < 2)
7989 return const0_rtx;
7990 arg = CALL_EXPR_ARG (exp, 0);
7991
7992 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7993 /* When guessing was done, the hints should be already stripped away. */
7994 gcc_assert (!flag_guess_branch_prob
7995 || optimize == 0 || seen_error ());
7996 return target;
7997 }
7998
7999 /* Expand a call to __builtin_expect_with_probability. We just return our
8000 argument as the builtin_expect semantic should've been already executed by
8001 tree branch prediction pass. */
8002
8003 static rtx
8004 expand_builtin_expect_with_probability (tree exp, rtx target)
8005 {
8006 tree arg;
8007
8008 if (call_expr_nargs (exp) < 3)
8009 return const0_rtx;
8010 arg = CALL_EXPR_ARG (exp, 0);
8011
8012 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
8013 /* When guessing was done, the hints should be already stripped away. */
8014 gcc_assert (!flag_guess_branch_prob
8015 || optimize == 0 || seen_error ());
8016 return target;
8017 }
8018
8019
8020 /* Expand a call to __builtin_assume_aligned. We just return our first
8021 argument as the builtin_assume_aligned semantic should've been already
8022 executed by CCP. */
8023
8024 static rtx
8025 expand_builtin_assume_aligned (tree exp, rtx target)
8026 {
8027 if (call_expr_nargs (exp) < 2)
8028 return const0_rtx;
8029 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
8030 EXPAND_NORMAL);
8031 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
8032 && (call_expr_nargs (exp) < 3
8033 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
8034 return target;
8035 }
8036
8037 void
8038 expand_builtin_trap (void)
8039 {
8040 if (targetm.have_trap ())
8041 {
8042 rtx_insn *insn = emit_insn (targetm.gen_trap ());
8043 /* For trap insns when not accumulating outgoing args force
8044 REG_ARGS_SIZE note to prevent crossjumping of calls with
8045 different args sizes. */
8046 if (!ACCUMULATE_OUTGOING_ARGS)
8047 add_args_size_note (insn, stack_pointer_delta);
8048 }
8049 else
8050 {
8051 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
8052 tree call_expr = build_call_expr (fn, 0);
8053 expand_call (call_expr, NULL_RTX, false);
8054 }
8055
8056 emit_barrier ();
8057 }
8058
8059 /* Expand a call to __builtin_unreachable. We do nothing except emit
8060 a barrier saying that control flow will not pass here.
8061
8062 It is the responsibility of the program being compiled to ensure
8063 that control flow does never reach __builtin_unreachable. */
8064 static void
8065 expand_builtin_unreachable (void)
8066 {
8067 emit_barrier ();
8068 }
8069
8070 /* Expand EXP, a call to fabs, fabsf or fabsl.
8071 Return NULL_RTX if a normal call should be emitted rather than expanding
8072 the function inline. If convenient, the result should be placed
8073 in TARGET. SUBTARGET may be used as the target for computing
8074 the operand. */
8075
8076 static rtx
8077 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
8078 {
8079 machine_mode mode;
8080 tree arg;
8081 rtx op0;
8082
8083 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
8084 return NULL_RTX;
8085
8086 arg = CALL_EXPR_ARG (exp, 0);
8087 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
8088 mode = TYPE_MODE (TREE_TYPE (arg));
8089 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
8090 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
8091 }
8092
8093 /* Expand EXP, a call to copysign, copysignf, or copysignl.
8094 Return NULL is a normal call should be emitted rather than expanding the
8095 function inline. If convenient, the result should be placed in TARGET.
8096 SUBTARGET may be used as the target for computing the operand. */
8097
8098 static rtx
8099 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
8100 {
8101 rtx op0, op1;
8102 tree arg;
8103
8104 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
8105 return NULL_RTX;
8106
8107 arg = CALL_EXPR_ARG (exp, 0);
8108 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
8109
8110 arg = CALL_EXPR_ARG (exp, 1);
8111 op1 = expand_normal (arg);
8112
8113 return expand_copysign (op0, op1, target);
8114 }
8115
8116 /* Emit a call to __builtin___clear_cache. */
8117
8118 void
8119 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
8120 {
8121 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
8122 BUILTIN_ASM_NAME_PTR
8123 (BUILT_IN_CLEAR_CACHE));
8124
8125 emit_library_call (callee,
8126 LCT_NORMAL, VOIDmode,
8127 convert_memory_address (ptr_mode, begin), ptr_mode,
8128 convert_memory_address (ptr_mode, end), ptr_mode);
8129 }
8130
8131 /* Emit a call to __builtin___clear_cache, unless the target specifies
8132 it as do-nothing. This function can be used by trampoline
8133 finalizers to duplicate the effects of expanding a call to the
8134 clear_cache builtin. */
8135
8136 void
8137 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
8138 {
8139 if ((GET_MODE (begin) != ptr_mode && GET_MODE (begin) != Pmode)
8140 || (GET_MODE (end) != ptr_mode && GET_MODE (end) != Pmode))
8141 {
8142 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
8143 return;
8144 }
8145
8146 if (targetm.have_clear_cache ())
8147 {
8148 /* We have a "clear_cache" insn, and it will handle everything. */
8149 class expand_operand ops[2];
8150
8151 create_address_operand (&ops[0], begin);
8152 create_address_operand (&ops[1], end);
8153
8154 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8155 return;
8156 }
8157 else
8158 {
8159 #ifndef CLEAR_INSN_CACHE
8160 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
8161 does nothing. There is no need to call it. Do nothing. */
8162 return;
8163 #endif /* CLEAR_INSN_CACHE */
8164 }
8165
8166 targetm.calls.emit_call_builtin___clear_cache (begin, end);
8167 }
8168
8169 /* Expand a call to __builtin___clear_cache. */
8170
8171 static void
8172 expand_builtin___clear_cache (tree exp)
8173 {
8174 tree begin, end;
8175 rtx begin_rtx, end_rtx;
8176
8177 /* We must not expand to a library call. If we did, any
8178 fallback library function in libgcc that might contain a call to
8179 __builtin___clear_cache() would recurse infinitely. */
8180 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8181 {
8182 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
8183 return;
8184 }
8185
8186 begin = CALL_EXPR_ARG (exp, 0);
8187 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
8188
8189 end = CALL_EXPR_ARG (exp, 1);
8190 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
8191
8192 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
8193 }
8194
8195 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
8196
8197 static rtx
8198 round_trampoline_addr (rtx tramp)
8199 {
8200 rtx temp, addend, mask;
8201
8202 /* If we don't need too much alignment, we'll have been guaranteed
8203 proper alignment by get_trampoline_type. */
8204 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
8205 return tramp;
8206
8207 /* Round address up to desired boundary. */
8208 temp = gen_reg_rtx (Pmode);
8209 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
8210 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
8211
8212 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
8213 temp, 0, OPTAB_LIB_WIDEN);
8214 tramp = expand_simple_binop (Pmode, AND, temp, mask,
8215 temp, 0, OPTAB_LIB_WIDEN);
8216
8217 return tramp;
8218 }
8219
8220 static rtx
8221 expand_builtin_init_trampoline (tree exp, bool onstack)
8222 {
8223 tree t_tramp, t_func, t_chain;
8224 rtx m_tramp, r_tramp, r_chain, tmp;
8225
8226 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
8227 POINTER_TYPE, VOID_TYPE))
8228 return NULL_RTX;
8229
8230 t_tramp = CALL_EXPR_ARG (exp, 0);
8231 t_func = CALL_EXPR_ARG (exp, 1);
8232 t_chain = CALL_EXPR_ARG (exp, 2);
8233
8234 r_tramp = expand_normal (t_tramp);
8235 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
8236 MEM_NOTRAP_P (m_tramp) = 1;
8237
8238 /* If ONSTACK, the TRAMP argument should be the address of a field
8239 within the local function's FRAME decl. Either way, let's see if
8240 we can fill in the MEM_ATTRs for this memory. */
8241 if (TREE_CODE (t_tramp) == ADDR_EXPR)
8242 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
8243
8244 /* Creator of a heap trampoline is responsible for making sure the
8245 address is aligned to at least STACK_BOUNDARY. Normally malloc
8246 will ensure this anyhow. */
8247 tmp = round_trampoline_addr (r_tramp);
8248 if (tmp != r_tramp)
8249 {
8250 m_tramp = change_address (m_tramp, BLKmode, tmp);
8251 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
8252 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
8253 }
8254
8255 /* The FUNC argument should be the address of the nested function.
8256 Extract the actual function decl to pass to the hook. */
8257 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
8258 t_func = TREE_OPERAND (t_func, 0);
8259 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
8260
8261 r_chain = expand_normal (t_chain);
8262
8263 /* Generate insns to initialize the trampoline. */
8264 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
8265
8266 if (onstack)
8267 {
8268 trampolines_created = 1;
8269
8270 if (targetm.calls.custom_function_descriptors != 0)
8271 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
8272 "trampoline generated for nested function %qD", t_func);
8273 }
8274
8275 return const0_rtx;
8276 }
8277
8278 static rtx
8279 expand_builtin_adjust_trampoline (tree exp)
8280 {
8281 rtx tramp;
8282
8283 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8284 return NULL_RTX;
8285
8286 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
8287 tramp = round_trampoline_addr (tramp);
8288 if (targetm.calls.trampoline_adjust_address)
8289 tramp = targetm.calls.trampoline_adjust_address (tramp);
8290
8291 return tramp;
8292 }
8293
8294 /* Expand a call to the builtin descriptor initialization routine.
8295 A descriptor is made up of a couple of pointers to the static
8296 chain and the code entry in this order. */
8297
8298 static rtx
8299 expand_builtin_init_descriptor (tree exp)
8300 {
8301 tree t_descr, t_func, t_chain;
8302 rtx m_descr, r_descr, r_func, r_chain;
8303
8304 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
8305 VOID_TYPE))
8306 return NULL_RTX;
8307
8308 t_descr = CALL_EXPR_ARG (exp, 0);
8309 t_func = CALL_EXPR_ARG (exp, 1);
8310 t_chain = CALL_EXPR_ARG (exp, 2);
8311
8312 r_descr = expand_normal (t_descr);
8313 m_descr = gen_rtx_MEM (BLKmode, r_descr);
8314 MEM_NOTRAP_P (m_descr) = 1;
8315 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
8316
8317 r_func = expand_normal (t_func);
8318 r_chain = expand_normal (t_chain);
8319
8320 /* Generate insns to initialize the descriptor. */
8321 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
8322 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
8323 POINTER_SIZE / BITS_PER_UNIT), r_func);
8324
8325 return const0_rtx;
8326 }
8327
8328 /* Expand a call to the builtin descriptor adjustment routine. */
8329
8330 static rtx
8331 expand_builtin_adjust_descriptor (tree exp)
8332 {
8333 rtx tramp;
8334
8335 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8336 return NULL_RTX;
8337
8338 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
8339
8340 /* Unalign the descriptor to allow runtime identification. */
8341 tramp = plus_constant (ptr_mode, tramp,
8342 targetm.calls.custom_function_descriptors);
8343
8344 return force_operand (tramp, NULL_RTX);
8345 }
8346
8347 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
8348 function. The function first checks whether the back end provides
8349 an insn to implement signbit for the respective mode. If not, it
8350 checks whether the floating point format of the value is such that
8351 the sign bit can be extracted. If that is not the case, error out.
8352 EXP is the expression that is a call to the builtin function; if
8353 convenient, the result should be placed in TARGET. */
8354 static rtx
8355 expand_builtin_signbit (tree exp, rtx target)
8356 {
8357 const struct real_format *fmt;
8358 scalar_float_mode fmode;
8359 scalar_int_mode rmode, imode;
8360 tree arg;
8361 int word, bitpos;
8362 enum insn_code icode;
8363 rtx temp;
8364 location_t loc = EXPR_LOCATION (exp);
8365
8366 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
8367 return NULL_RTX;
8368
8369 arg = CALL_EXPR_ARG (exp, 0);
8370 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
8371 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
8372 fmt = REAL_MODE_FORMAT (fmode);
8373
8374 arg = builtin_save_expr (arg);
8375
8376 /* Expand the argument yielding a RTX expression. */
8377 temp = expand_normal (arg);
8378
8379 /* Check if the back end provides an insn that handles signbit for the
8380 argument's mode. */
8381 icode = optab_handler (signbit_optab, fmode);
8382 if (icode != CODE_FOR_nothing)
8383 {
8384 rtx_insn *last = get_last_insn ();
8385 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8386 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
8387 return target;
8388 delete_insns_since (last);
8389 }
8390
8391 /* For floating point formats without a sign bit, implement signbit
8392 as "ARG < 0.0". */
8393 bitpos = fmt->signbit_ro;
8394 if (bitpos < 0)
8395 {
8396 /* But we can't do this if the format supports signed zero. */
8397 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
8398
8399 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
8400 build_real (TREE_TYPE (arg), dconst0));
8401 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
8402 }
8403
8404 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
8405 {
8406 imode = int_mode_for_mode (fmode).require ();
8407 temp = gen_lowpart (imode, temp);
8408 }
8409 else
8410 {
8411 imode = word_mode;
8412 /* Handle targets with different FP word orders. */
8413 if (FLOAT_WORDS_BIG_ENDIAN)
8414 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
8415 else
8416 word = bitpos / BITS_PER_WORD;
8417 temp = operand_subword_force (temp, word, fmode);
8418 bitpos = bitpos % BITS_PER_WORD;
8419 }
8420
8421 /* Force the intermediate word_mode (or narrower) result into a
8422 register. This avoids attempting to create paradoxical SUBREGs
8423 of floating point modes below. */
8424 temp = force_reg (imode, temp);
8425
8426 /* If the bitpos is within the "result mode" lowpart, the operation
8427 can be implement with a single bitwise AND. Otherwise, we need
8428 a right shift and an AND. */
8429
8430 if (bitpos < GET_MODE_BITSIZE (rmode))
8431 {
8432 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
8433
8434 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
8435 temp = gen_lowpart (rmode, temp);
8436 temp = expand_binop (rmode, and_optab, temp,
8437 immed_wide_int_const (mask, rmode),
8438 NULL_RTX, 1, OPTAB_LIB_WIDEN);
8439 }
8440 else
8441 {
8442 /* Perform a logical right shift to place the signbit in the least
8443 significant bit, then truncate the result to the desired mode
8444 and mask just this bit. */
8445 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
8446 temp = gen_lowpart (rmode, temp);
8447 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
8448 NULL_RTX, 1, OPTAB_LIB_WIDEN);
8449 }
8450
8451 return temp;
8452 }
8453
8454 /* Expand fork or exec calls. TARGET is the desired target of the
8455 call. EXP is the call. FN is the
8456 identificator of the actual function. IGNORE is nonzero if the
8457 value is to be ignored. */
8458
8459 static rtx
8460 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
8461 {
8462 tree id, decl;
8463 tree call;
8464
8465 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
8466 {
8467 tree path = CALL_EXPR_ARG (exp, 0);
8468 /* Detect unterminated path. */
8469 if (!check_read_access (exp, path))
8470 return NULL_RTX;
8471
8472 /* Also detect unterminated first argument. */
8473 switch (DECL_FUNCTION_CODE (fn))
8474 {
8475 case BUILT_IN_EXECL:
8476 case BUILT_IN_EXECLE:
8477 case BUILT_IN_EXECLP:
8478 if (!check_read_access (exp, path))
8479 return NULL_RTX;
8480 default:
8481 break;
8482 }
8483 }
8484
8485
8486 /* If we are not profiling, just call the function. */
8487 if (!profile_arc_flag)
8488 return NULL_RTX;
8489
8490 /* Otherwise call the wrapper. This should be equivalent for the rest of
8491 compiler, so the code does not diverge, and the wrapper may run the
8492 code necessary for keeping the profiling sane. */
8493
8494 switch (DECL_FUNCTION_CODE (fn))
8495 {
8496 case BUILT_IN_FORK:
8497 id = get_identifier ("__gcov_fork");
8498 break;
8499
8500 case BUILT_IN_EXECL:
8501 id = get_identifier ("__gcov_execl");
8502 break;
8503
8504 case BUILT_IN_EXECV:
8505 id = get_identifier ("__gcov_execv");
8506 break;
8507
8508 case BUILT_IN_EXECLP:
8509 id = get_identifier ("__gcov_execlp");
8510 break;
8511
8512 case BUILT_IN_EXECLE:
8513 id = get_identifier ("__gcov_execle");
8514 break;
8515
8516 case BUILT_IN_EXECVP:
8517 id = get_identifier ("__gcov_execvp");
8518 break;
8519
8520 case BUILT_IN_EXECVE:
8521 id = get_identifier ("__gcov_execve");
8522 break;
8523
8524 default:
8525 gcc_unreachable ();
8526 }
8527
8528 decl = build_decl (DECL_SOURCE_LOCATION (fn),
8529 FUNCTION_DECL, id, TREE_TYPE (fn));
8530 DECL_EXTERNAL (decl) = 1;
8531 TREE_PUBLIC (decl) = 1;
8532 DECL_ARTIFICIAL (decl) = 1;
8533 TREE_NOTHROW (decl) = 1;
8534 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
8535 DECL_VISIBILITY_SPECIFIED (decl) = 1;
8536 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
8537 return expand_call (call, target, ignore);
8538 }
8539
8540
8541 \f
8542 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
8543 the pointer in these functions is void*, the tree optimizers may remove
8544 casts. The mode computed in expand_builtin isn't reliable either, due
8545 to __sync_bool_compare_and_swap.
8546
8547 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
8548 group of builtins. This gives us log2 of the mode size. */
8549
8550 static inline machine_mode
8551 get_builtin_sync_mode (int fcode_diff)
8552 {
8553 /* The size is not negotiable, so ask not to get BLKmode in return
8554 if the target indicates that a smaller size would be better. */
8555 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
8556 }
8557
8558 /* Expand the memory expression LOC and return the appropriate memory operand
8559 for the builtin_sync operations. */
8560
8561 static rtx
8562 get_builtin_sync_mem (tree loc, machine_mode mode)
8563 {
8564 rtx addr, mem;
8565 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
8566 ? TREE_TYPE (TREE_TYPE (loc))
8567 : TREE_TYPE (loc));
8568 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
8569
8570 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
8571 addr = convert_memory_address (addr_mode, addr);
8572
8573 /* Note that we explicitly do not want any alias information for this
8574 memory, so that we kill all other live memories. Otherwise we don't
8575 satisfy the full barrier semantics of the intrinsic. */
8576 mem = gen_rtx_MEM (mode, addr);
8577
8578 set_mem_addr_space (mem, addr_space);
8579
8580 mem = validize_mem (mem);
8581
8582 /* The alignment needs to be at least according to that of the mode. */
8583 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
8584 get_pointer_alignment (loc)));
8585 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
8586 MEM_VOLATILE_P (mem) = 1;
8587
8588 return mem;
8589 }
8590
8591 /* Make sure an argument is in the right mode.
8592 EXP is the tree argument.
8593 MODE is the mode it should be in. */
8594
8595 static rtx
8596 expand_expr_force_mode (tree exp, machine_mode mode)
8597 {
8598 rtx val;
8599 machine_mode old_mode;
8600
8601 if (TREE_CODE (exp) == SSA_NAME
8602 && TYPE_MODE (TREE_TYPE (exp)) != mode)
8603 {
8604 /* Undo argument promotion if possible, as combine might not
8605 be able to do it later due to MEM_VOLATILE_P uses in the
8606 patterns. */
8607 gimple *g = get_gimple_for_ssa_name (exp);
8608 if (g && gimple_assign_cast_p (g))
8609 {
8610 tree rhs = gimple_assign_rhs1 (g);
8611 tree_code code = gimple_assign_rhs_code (g);
8612 if (CONVERT_EXPR_CODE_P (code)
8613 && TYPE_MODE (TREE_TYPE (rhs)) == mode
8614 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
8615 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
8616 && (TYPE_PRECISION (TREE_TYPE (exp))
8617 > TYPE_PRECISION (TREE_TYPE (rhs))))
8618 exp = rhs;
8619 }
8620 }
8621
8622 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
8623 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
8624 of CONST_INTs, where we know the old_mode only from the call argument. */
8625
8626 old_mode = GET_MODE (val);
8627 if (old_mode == VOIDmode)
8628 old_mode = TYPE_MODE (TREE_TYPE (exp));
8629 val = convert_modes (mode, old_mode, val, 1);
8630 return val;
8631 }
8632
8633
8634 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
8635 EXP is the CALL_EXPR. CODE is the rtx code
8636 that corresponds to the arithmetic or logical operation from the name;
8637 an exception here is that NOT actually means NAND. TARGET is an optional
8638 place for us to store the results; AFTER is true if this is the
8639 fetch_and_xxx form. */
8640
8641 static rtx
8642 expand_builtin_sync_operation (machine_mode mode, tree exp,
8643 enum rtx_code code, bool after,
8644 rtx target)
8645 {
8646 rtx val, mem;
8647 location_t loc = EXPR_LOCATION (exp);
8648
8649 if (code == NOT && warn_sync_nand)
8650 {
8651 tree fndecl = get_callee_fndecl (exp);
8652 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8653
8654 static bool warned_f_a_n, warned_n_a_f;
8655
8656 switch (fcode)
8657 {
8658 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8659 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8660 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8661 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8662 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8663 if (warned_f_a_n)
8664 break;
8665
8666 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
8667 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8668 warned_f_a_n = true;
8669 break;
8670
8671 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8672 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8673 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8674 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8675 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8676 if (warned_n_a_f)
8677 break;
8678
8679 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
8680 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8681 warned_n_a_f = true;
8682 break;
8683
8684 default:
8685 gcc_unreachable ();
8686 }
8687 }
8688
8689 /* Expand the operands. */
8690 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8691 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8692
8693 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
8694 after);
8695 }
8696
8697 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
8698 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
8699 true if this is the boolean form. TARGET is a place for us to store the
8700 results; this is NOT optional if IS_BOOL is true. */
8701
8702 static rtx
8703 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
8704 bool is_bool, rtx target)
8705 {
8706 rtx old_val, new_val, mem;
8707 rtx *pbool, *poval;
8708
8709 /* Expand the operands. */
8710 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8711 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8712 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8713
8714 pbool = poval = NULL;
8715 if (target != const0_rtx)
8716 {
8717 if (is_bool)
8718 pbool = &target;
8719 else
8720 poval = &target;
8721 }
8722 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
8723 false, MEMMODEL_SYNC_SEQ_CST,
8724 MEMMODEL_SYNC_SEQ_CST))
8725 return NULL_RTX;
8726
8727 return target;
8728 }
8729
8730 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
8731 general form is actually an atomic exchange, and some targets only
8732 support a reduced form with the second argument being a constant 1.
8733 EXP is the CALL_EXPR; TARGET is an optional place for us to store
8734 the results. */
8735
8736 static rtx
8737 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
8738 rtx target)
8739 {
8740 rtx val, mem;
8741
8742 /* Expand the operands. */
8743 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8744 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8745
8746 return expand_sync_lock_test_and_set (target, mem, val);
8747 }
8748
8749 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
8750
8751 static void
8752 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
8753 {
8754 rtx mem;
8755
8756 /* Expand the operands. */
8757 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8758
8759 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
8760 }
8761
8762 /* Given an integer representing an ``enum memmodel'', verify its
8763 correctness and return the memory model enum. */
8764
8765 static enum memmodel
8766 get_memmodel (tree exp)
8767 {
8768 rtx op;
8769 unsigned HOST_WIDE_INT val;
8770 location_t loc
8771 = expansion_point_location_if_in_system_header (input_location);
8772
8773 /* If the parameter is not a constant, it's a run time value so we'll just
8774 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
8775 if (TREE_CODE (exp) != INTEGER_CST)
8776 return MEMMODEL_SEQ_CST;
8777
8778 op = expand_normal (exp);
8779
8780 val = INTVAL (op);
8781 if (targetm.memmodel_check)
8782 val = targetm.memmodel_check (val);
8783 else if (val & ~MEMMODEL_MASK)
8784 {
8785 warning_at (loc, OPT_Winvalid_memory_model,
8786 "unknown architecture specifier in memory model to builtin");
8787 return MEMMODEL_SEQ_CST;
8788 }
8789
8790 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
8791 if (memmodel_base (val) >= MEMMODEL_LAST)
8792 {
8793 warning_at (loc, OPT_Winvalid_memory_model,
8794 "invalid memory model argument to builtin");
8795 return MEMMODEL_SEQ_CST;
8796 }
8797
8798 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
8799 be conservative and promote consume to acquire. */
8800 if (val == MEMMODEL_CONSUME)
8801 val = MEMMODEL_ACQUIRE;
8802
8803 return (enum memmodel) val;
8804 }
8805
8806 /* Expand the __atomic_exchange intrinsic:
8807 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
8808 EXP is the CALL_EXPR.
8809 TARGET is an optional place for us to store the results. */
8810
8811 static rtx
8812 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
8813 {
8814 rtx val, mem;
8815 enum memmodel model;
8816
8817 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8818
8819 if (!flag_inline_atomics)
8820 return NULL_RTX;
8821
8822 /* Expand the operands. */
8823 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8824 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8825
8826 return expand_atomic_exchange (target, mem, val, model);
8827 }
8828
8829 /* Expand the __atomic_compare_exchange intrinsic:
8830 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
8831 TYPE desired, BOOL weak,
8832 enum memmodel success,
8833 enum memmodel failure)
8834 EXP is the CALL_EXPR.
8835 TARGET is an optional place for us to store the results. */
8836
8837 static rtx
8838 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
8839 rtx target)
8840 {
8841 rtx expect, desired, mem, oldval;
8842 rtx_code_label *label;
8843 enum memmodel success, failure;
8844 tree weak;
8845 bool is_weak;
8846 location_t loc
8847 = expansion_point_location_if_in_system_header (input_location);
8848
8849 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
8850 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
8851
8852 if (failure > success)
8853 {
8854 warning_at (loc, OPT_Winvalid_memory_model,
8855 "failure memory model cannot be stronger than success "
8856 "memory model for %<__atomic_compare_exchange%>");
8857 success = MEMMODEL_SEQ_CST;
8858 }
8859
8860 if (is_mm_release (failure) || is_mm_acq_rel (failure))
8861 {
8862 warning_at (loc, OPT_Winvalid_memory_model,
8863 "invalid failure memory model for "
8864 "%<__atomic_compare_exchange%>");
8865 failure = MEMMODEL_SEQ_CST;
8866 success = MEMMODEL_SEQ_CST;
8867 }
8868
8869
8870 if (!flag_inline_atomics)
8871 return NULL_RTX;
8872
8873 /* Expand the operands. */
8874 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8875
8876 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
8877 expect = convert_memory_address (Pmode, expect);
8878 expect = gen_rtx_MEM (mode, expect);
8879 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8880
8881 weak = CALL_EXPR_ARG (exp, 3);
8882 is_weak = false;
8883 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
8884 is_weak = true;
8885
8886 if (target == const0_rtx)
8887 target = NULL;
8888
8889 /* Lest the rtl backend create a race condition with an imporoper store
8890 to memory, always create a new pseudo for OLDVAL. */
8891 oldval = NULL;
8892
8893 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
8894 is_weak, success, failure))
8895 return NULL_RTX;
8896
8897 /* Conditionally store back to EXPECT, lest we create a race condition
8898 with an improper store to memory. */
8899 /* ??? With a rearrangement of atomics at the gimple level, we can handle
8900 the normal case where EXPECT is totally private, i.e. a register. At
8901 which point the store can be unconditional. */
8902 label = gen_label_rtx ();
8903 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
8904 GET_MODE (target), 1, label);
8905 emit_move_insn (expect, oldval);
8906 emit_label (label);
8907
8908 return target;
8909 }
8910
8911 /* Helper function for expand_ifn_atomic_compare_exchange - expand
8912 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
8913 call. The weak parameter must be dropped to match the expected parameter
8914 list and the expected argument changed from value to pointer to memory
8915 slot. */
8916
8917 static void
8918 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
8919 {
8920 unsigned int z;
8921 vec<tree, va_gc> *vec;
8922
8923 vec_alloc (vec, 5);
8924 vec->quick_push (gimple_call_arg (call, 0));
8925 tree expected = gimple_call_arg (call, 1);
8926 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
8927 TREE_TYPE (expected));
8928 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
8929 if (expd != x)
8930 emit_move_insn (x, expd);
8931 tree v = make_tree (TREE_TYPE (expected), x);
8932 vec->quick_push (build1 (ADDR_EXPR,
8933 build_pointer_type (TREE_TYPE (expected)), v));
8934 vec->quick_push (gimple_call_arg (call, 2));
8935 /* Skip the boolean weak parameter. */
8936 for (z = 4; z < 6; z++)
8937 vec->quick_push (gimple_call_arg (call, z));
8938 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
8939 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
8940 gcc_assert (bytes_log2 < 5);
8941 built_in_function fncode
8942 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
8943 + bytes_log2);
8944 tree fndecl = builtin_decl_explicit (fncode);
8945 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
8946 fndecl);
8947 tree exp = build_call_vec (boolean_type_node, fn, vec);
8948 tree lhs = gimple_call_lhs (call);
8949 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
8950 if (lhs)
8951 {
8952 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8953 if (GET_MODE (boolret) != mode)
8954 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8955 x = force_reg (mode, x);
8956 write_complex_part (target, boolret, true);
8957 write_complex_part (target, x, false);
8958 }
8959 }
8960
8961 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
8962
8963 void
8964 expand_ifn_atomic_compare_exchange (gcall *call)
8965 {
8966 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
8967 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
8968 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
8969 rtx expect, desired, mem, oldval, boolret;
8970 enum memmodel success, failure;
8971 tree lhs;
8972 bool is_weak;
8973 location_t loc
8974 = expansion_point_location_if_in_system_header (gimple_location (call));
8975
8976 success = get_memmodel (gimple_call_arg (call, 4));
8977 failure = get_memmodel (gimple_call_arg (call, 5));
8978
8979 if (failure > success)
8980 {
8981 warning_at (loc, OPT_Winvalid_memory_model,
8982 "failure memory model cannot be stronger than success "
8983 "memory model for %<__atomic_compare_exchange%>");
8984 success = MEMMODEL_SEQ_CST;
8985 }
8986
8987 if (is_mm_release (failure) || is_mm_acq_rel (failure))
8988 {
8989 warning_at (loc, OPT_Winvalid_memory_model,
8990 "invalid failure memory model for "
8991 "%<__atomic_compare_exchange%>");
8992 failure = MEMMODEL_SEQ_CST;
8993 success = MEMMODEL_SEQ_CST;
8994 }
8995
8996 if (!flag_inline_atomics)
8997 {
8998 expand_ifn_atomic_compare_exchange_into_call (call, mode);
8999 return;
9000 }
9001
9002 /* Expand the operands. */
9003 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
9004
9005 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
9006 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
9007
9008 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
9009
9010 boolret = NULL;
9011 oldval = NULL;
9012
9013 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
9014 is_weak, success, failure))
9015 {
9016 expand_ifn_atomic_compare_exchange_into_call (call, mode);
9017 return;
9018 }
9019
9020 lhs = gimple_call_lhs (call);
9021 if (lhs)
9022 {
9023 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
9024 if (GET_MODE (boolret) != mode)
9025 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
9026 write_complex_part (target, boolret, true);
9027 write_complex_part (target, oldval, false);
9028 }
9029 }
9030
9031 /* Expand the __atomic_load intrinsic:
9032 TYPE __atomic_load (TYPE *object, enum memmodel)
9033 EXP is the CALL_EXPR.
9034 TARGET is an optional place for us to store the results. */
9035
9036 static rtx
9037 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
9038 {
9039 rtx mem;
9040 enum memmodel model;
9041
9042 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
9043 if (is_mm_release (model) || is_mm_acq_rel (model))
9044 {
9045 location_t loc
9046 = expansion_point_location_if_in_system_header (input_location);
9047 warning_at (loc, OPT_Winvalid_memory_model,
9048 "invalid memory model for %<__atomic_load%>");
9049 model = MEMMODEL_SEQ_CST;
9050 }
9051
9052 if (!flag_inline_atomics)
9053 return NULL_RTX;
9054
9055 /* Expand the operand. */
9056 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
9057
9058 return expand_atomic_load (target, mem, model);
9059 }
9060
9061
9062 /* Expand the __atomic_store intrinsic:
9063 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
9064 EXP is the CALL_EXPR.
9065 TARGET is an optional place for us to store the results. */
9066
9067 static rtx
9068 expand_builtin_atomic_store (machine_mode mode, tree exp)
9069 {
9070 rtx mem, val;
9071 enum memmodel model;
9072
9073 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
9074 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
9075 || is_mm_release (model)))
9076 {
9077 location_t loc
9078 = expansion_point_location_if_in_system_header (input_location);
9079 warning_at (loc, OPT_Winvalid_memory_model,
9080 "invalid memory model for %<__atomic_store%>");
9081 model = MEMMODEL_SEQ_CST;
9082 }
9083
9084 if (!flag_inline_atomics)
9085 return NULL_RTX;
9086
9087 /* Expand the operands. */
9088 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
9089 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
9090
9091 return expand_atomic_store (mem, val, model, false);
9092 }
9093
9094 /* Expand the __atomic_fetch_XXX intrinsic:
9095 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
9096 EXP is the CALL_EXPR.
9097 TARGET is an optional place for us to store the results.
9098 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
9099 FETCH_AFTER is true if returning the result of the operation.
9100 FETCH_AFTER is false if returning the value before the operation.
9101 IGNORE is true if the result is not used.
9102 EXT_CALL is the correct builtin for an external call if this cannot be
9103 resolved to an instruction sequence. */
9104
9105 static rtx
9106 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
9107 enum rtx_code code, bool fetch_after,
9108 bool ignore, enum built_in_function ext_call)
9109 {
9110 rtx val, mem, ret;
9111 enum memmodel model;
9112 tree fndecl;
9113 tree addr;
9114
9115 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
9116
9117 /* Expand the operands. */
9118 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
9119 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
9120
9121 /* Only try generating instructions if inlining is turned on. */
9122 if (flag_inline_atomics)
9123 {
9124 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
9125 if (ret)
9126 return ret;
9127 }
9128
9129 /* Return if a different routine isn't needed for the library call. */
9130 if (ext_call == BUILT_IN_NONE)
9131 return NULL_RTX;
9132
9133 /* Change the call to the specified function. */
9134 fndecl = get_callee_fndecl (exp);
9135 addr = CALL_EXPR_FN (exp);
9136 STRIP_NOPS (addr);
9137
9138 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9139 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
9140
9141 /* If we will emit code after the call, the call cannot be a tail call.
9142 If it is emitted as a tail call, a barrier is emitted after it, and
9143 then all trailing code is removed. */
9144 if (!ignore)
9145 CALL_EXPR_TAILCALL (exp) = 0;
9146
9147 /* Expand the call here so we can emit trailing code. */
9148 ret = expand_call (exp, target, ignore);
9149
9150 /* Replace the original function just in case it matters. */
9151 TREE_OPERAND (addr, 0) = fndecl;
9152
9153 /* Then issue the arithmetic correction to return the right result. */
9154 if (!ignore)
9155 {
9156 if (code == NOT)
9157 {
9158 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
9159 OPTAB_LIB_WIDEN);
9160 ret = expand_simple_unop (mode, NOT, ret, target, true);
9161 }
9162 else
9163 ret = expand_simple_binop (mode, code, ret, val, target, true,
9164 OPTAB_LIB_WIDEN);
9165 }
9166 return ret;
9167 }
9168
9169 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
9170
9171 void
9172 expand_ifn_atomic_bit_test_and (gcall *call)
9173 {
9174 tree ptr = gimple_call_arg (call, 0);
9175 tree bit = gimple_call_arg (call, 1);
9176 tree flag = gimple_call_arg (call, 2);
9177 tree lhs = gimple_call_lhs (call);
9178 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
9179 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
9180 enum rtx_code code;
9181 optab optab;
9182 class expand_operand ops[5];
9183
9184 gcc_assert (flag_inline_atomics);
9185
9186 if (gimple_call_num_args (call) == 4)
9187 model = get_memmodel (gimple_call_arg (call, 3));
9188
9189 rtx mem = get_builtin_sync_mem (ptr, mode);
9190 rtx val = expand_expr_force_mode (bit, mode);
9191
9192 switch (gimple_call_internal_fn (call))
9193 {
9194 case IFN_ATOMIC_BIT_TEST_AND_SET:
9195 code = IOR;
9196 optab = atomic_bit_test_and_set_optab;
9197 break;
9198 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
9199 code = XOR;
9200 optab = atomic_bit_test_and_complement_optab;
9201 break;
9202 case IFN_ATOMIC_BIT_TEST_AND_RESET:
9203 code = AND;
9204 optab = atomic_bit_test_and_reset_optab;
9205 break;
9206 default:
9207 gcc_unreachable ();
9208 }
9209
9210 if (lhs == NULL_TREE)
9211 {
9212 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
9213 val, NULL_RTX, true, OPTAB_DIRECT);
9214 if (code == AND)
9215 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
9216 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
9217 return;
9218 }
9219
9220 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
9221 enum insn_code icode = direct_optab_handler (optab, mode);
9222 gcc_assert (icode != CODE_FOR_nothing);
9223 create_output_operand (&ops[0], target, mode);
9224 create_fixed_operand (&ops[1], mem);
9225 create_convert_operand_to (&ops[2], val, mode, true);
9226 create_integer_operand (&ops[3], model);
9227 create_integer_operand (&ops[4], integer_onep (flag));
9228 if (maybe_expand_insn (icode, 5, ops))
9229 return;
9230
9231 rtx bitval = val;
9232 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
9233 val, NULL_RTX, true, OPTAB_DIRECT);
9234 rtx maskval = val;
9235 if (code == AND)
9236 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
9237 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
9238 code, model, false);
9239 if (integer_onep (flag))
9240 {
9241 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
9242 NULL_RTX, true, OPTAB_DIRECT);
9243 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
9244 true, OPTAB_DIRECT);
9245 }
9246 else
9247 result = expand_simple_binop (mode, AND, result, maskval, target, true,
9248 OPTAB_DIRECT);
9249 if (result != target)
9250 emit_move_insn (target, result);
9251 }
9252
9253 /* Expand an atomic clear operation.
9254 void _atomic_clear (BOOL *obj, enum memmodel)
9255 EXP is the call expression. */
9256
9257 static rtx
9258 expand_builtin_atomic_clear (tree exp)
9259 {
9260 machine_mode mode;
9261 rtx mem, ret;
9262 enum memmodel model;
9263
9264 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
9265 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
9266 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
9267
9268 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
9269 {
9270 location_t loc
9271 = expansion_point_location_if_in_system_header (input_location);
9272 warning_at (loc, OPT_Winvalid_memory_model,
9273 "invalid memory model for %<__atomic_store%>");
9274 model = MEMMODEL_SEQ_CST;
9275 }
9276
9277 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
9278 Failing that, a store is issued by __atomic_store. The only way this can
9279 fail is if the bool type is larger than a word size. Unlikely, but
9280 handle it anyway for completeness. Assume a single threaded model since
9281 there is no atomic support in this case, and no barriers are required. */
9282 ret = expand_atomic_store (mem, const0_rtx, model, true);
9283 if (!ret)
9284 emit_move_insn (mem, const0_rtx);
9285 return const0_rtx;
9286 }
9287
9288 /* Expand an atomic test_and_set operation.
9289 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
9290 EXP is the call expression. */
9291
9292 static rtx
9293 expand_builtin_atomic_test_and_set (tree exp, rtx target)
9294 {
9295 rtx mem;
9296 enum memmodel model;
9297 machine_mode mode;
9298
9299 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
9300 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
9301 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
9302
9303 return expand_atomic_test_and_set (target, mem, model);
9304 }
9305
9306
9307 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
9308 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
9309
9310 static tree
9311 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
9312 {
9313 int size;
9314 machine_mode mode;
9315 unsigned int mode_align, type_align;
9316
9317 if (TREE_CODE (arg0) != INTEGER_CST)
9318 return NULL_TREE;
9319
9320 /* We need a corresponding integer mode for the access to be lock-free. */
9321 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
9322 if (!int_mode_for_size (size, 0).exists (&mode))
9323 return boolean_false_node;
9324
9325 mode_align = GET_MODE_ALIGNMENT (mode);
9326
9327 if (TREE_CODE (arg1) == INTEGER_CST)
9328 {
9329 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
9330
9331 /* Either this argument is null, or it's a fake pointer encoding
9332 the alignment of the object. */
9333 val = least_bit_hwi (val);
9334 val *= BITS_PER_UNIT;
9335
9336 if (val == 0 || mode_align < val)
9337 type_align = mode_align;
9338 else
9339 type_align = val;
9340 }
9341 else
9342 {
9343 tree ttype = TREE_TYPE (arg1);
9344
9345 /* This function is usually invoked and folded immediately by the front
9346 end before anything else has a chance to look at it. The pointer
9347 parameter at this point is usually cast to a void *, so check for that
9348 and look past the cast. */
9349 if (CONVERT_EXPR_P (arg1)
9350 && POINTER_TYPE_P (ttype)
9351 && VOID_TYPE_P (TREE_TYPE (ttype))
9352 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
9353 arg1 = TREE_OPERAND (arg1, 0);
9354
9355 ttype = TREE_TYPE (arg1);
9356 gcc_assert (POINTER_TYPE_P (ttype));
9357
9358 /* Get the underlying type of the object. */
9359 ttype = TREE_TYPE (ttype);
9360 type_align = TYPE_ALIGN (ttype);
9361 }
9362
9363 /* If the object has smaller alignment, the lock free routines cannot
9364 be used. */
9365 if (type_align < mode_align)
9366 return boolean_false_node;
9367
9368 /* Check if a compare_and_swap pattern exists for the mode which represents
9369 the required size. The pattern is not allowed to fail, so the existence
9370 of the pattern indicates support is present. Also require that an
9371 atomic load exists for the required size. */
9372 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
9373 return boolean_true_node;
9374 else
9375 return boolean_false_node;
9376 }
9377
9378 /* Return true if the parameters to call EXP represent an object which will
9379 always generate lock free instructions. The first argument represents the
9380 size of the object, and the second parameter is a pointer to the object
9381 itself. If NULL is passed for the object, then the result is based on
9382 typical alignment for an object of the specified size. Otherwise return
9383 false. */
9384
9385 static rtx
9386 expand_builtin_atomic_always_lock_free (tree exp)
9387 {
9388 tree size;
9389 tree arg0 = CALL_EXPR_ARG (exp, 0);
9390 tree arg1 = CALL_EXPR_ARG (exp, 1);
9391
9392 if (TREE_CODE (arg0) != INTEGER_CST)
9393 {
9394 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
9395 return const0_rtx;
9396 }
9397
9398 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
9399 if (size == boolean_true_node)
9400 return const1_rtx;
9401 return const0_rtx;
9402 }
9403
9404 /* Return a one or zero if it can be determined that object ARG1 of size ARG
9405 is lock free on this architecture. */
9406
9407 static tree
9408 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
9409 {
9410 if (!flag_inline_atomics)
9411 return NULL_TREE;
9412
9413 /* If it isn't always lock free, don't generate a result. */
9414 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
9415 return boolean_true_node;
9416
9417 return NULL_TREE;
9418 }
9419
9420 /* Return true if the parameters to call EXP represent an object which will
9421 always generate lock free instructions. The first argument represents the
9422 size of the object, and the second parameter is a pointer to the object
9423 itself. If NULL is passed for the object, then the result is based on
9424 typical alignment for an object of the specified size. Otherwise return
9425 NULL*/
9426
9427 static rtx
9428 expand_builtin_atomic_is_lock_free (tree exp)
9429 {
9430 tree size;
9431 tree arg0 = CALL_EXPR_ARG (exp, 0);
9432 tree arg1 = CALL_EXPR_ARG (exp, 1);
9433
9434 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9435 {
9436 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
9437 return NULL_RTX;
9438 }
9439
9440 if (!flag_inline_atomics)
9441 return NULL_RTX;
9442
9443 /* If the value is known at compile time, return the RTX for it. */
9444 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
9445 if (size == boolean_true_node)
9446 return const1_rtx;
9447
9448 return NULL_RTX;
9449 }
9450
9451 /* Expand the __atomic_thread_fence intrinsic:
9452 void __atomic_thread_fence (enum memmodel)
9453 EXP is the CALL_EXPR. */
9454
9455 static void
9456 expand_builtin_atomic_thread_fence (tree exp)
9457 {
9458 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9459 expand_mem_thread_fence (model);
9460 }
9461
9462 /* Expand the __atomic_signal_fence intrinsic:
9463 void __atomic_signal_fence (enum memmodel)
9464 EXP is the CALL_EXPR. */
9465
9466 static void
9467 expand_builtin_atomic_signal_fence (tree exp)
9468 {
9469 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9470 expand_mem_signal_fence (model);
9471 }
9472
9473 /* Expand the __sync_synchronize intrinsic. */
9474
9475 static void
9476 expand_builtin_sync_synchronize (void)
9477 {
9478 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
9479 }
9480
9481 static rtx
9482 expand_builtin_thread_pointer (tree exp, rtx target)
9483 {
9484 enum insn_code icode;
9485 if (!validate_arglist (exp, VOID_TYPE))
9486 return const0_rtx;
9487 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
9488 if (icode != CODE_FOR_nothing)
9489 {
9490 class expand_operand op;
9491 /* If the target is not sutitable then create a new target. */
9492 if (target == NULL_RTX
9493 || !REG_P (target)
9494 || GET_MODE (target) != Pmode)
9495 target = gen_reg_rtx (Pmode);
9496 create_output_operand (&op, target, Pmode);
9497 expand_insn (icode, 1, &op);
9498 return target;
9499 }
9500 error ("%<__builtin_thread_pointer%> is not supported on this target");
9501 return const0_rtx;
9502 }
9503
9504 static void
9505 expand_builtin_set_thread_pointer (tree exp)
9506 {
9507 enum insn_code icode;
9508 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9509 return;
9510 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
9511 if (icode != CODE_FOR_nothing)
9512 {
9513 class expand_operand op;
9514 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
9515 Pmode, EXPAND_NORMAL);
9516 create_input_operand (&op, val, Pmode);
9517 expand_insn (icode, 1, &op);
9518 return;
9519 }
9520 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
9521 }
9522
9523 \f
9524 /* Emit code to restore the current value of stack. */
9525
9526 static void
9527 expand_stack_restore (tree var)
9528 {
9529 rtx_insn *prev;
9530 rtx sa = expand_normal (var);
9531
9532 sa = convert_memory_address (Pmode, sa);
9533
9534 prev = get_last_insn ();
9535 emit_stack_restore (SAVE_BLOCK, sa);
9536
9537 record_new_stack_level ();
9538
9539 fixup_args_size_notes (prev, get_last_insn (), 0);
9540 }
9541
9542 /* Emit code to save the current value of stack. */
9543
9544 static rtx
9545 expand_stack_save (void)
9546 {
9547 rtx ret = NULL_RTX;
9548
9549 emit_stack_save (SAVE_BLOCK, &ret);
9550 return ret;
9551 }
9552
9553 /* Emit code to get the openacc gang, worker or vector id or size. */
9554
9555 static rtx
9556 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
9557 {
9558 const char *name;
9559 rtx fallback_retval;
9560 rtx_insn *(*gen_fn) (rtx, rtx);
9561 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
9562 {
9563 case BUILT_IN_GOACC_PARLEVEL_ID:
9564 name = "__builtin_goacc_parlevel_id";
9565 fallback_retval = const0_rtx;
9566 gen_fn = targetm.gen_oacc_dim_pos;
9567 break;
9568 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9569 name = "__builtin_goacc_parlevel_size";
9570 fallback_retval = const1_rtx;
9571 gen_fn = targetm.gen_oacc_dim_size;
9572 break;
9573 default:
9574 gcc_unreachable ();
9575 }
9576
9577 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
9578 {
9579 error ("%qs only supported in OpenACC code", name);
9580 return const0_rtx;
9581 }
9582
9583 tree arg = CALL_EXPR_ARG (exp, 0);
9584 if (TREE_CODE (arg) != INTEGER_CST)
9585 {
9586 error ("non-constant argument 0 to %qs", name);
9587 return const0_rtx;
9588 }
9589
9590 int dim = TREE_INT_CST_LOW (arg);
9591 switch (dim)
9592 {
9593 case GOMP_DIM_GANG:
9594 case GOMP_DIM_WORKER:
9595 case GOMP_DIM_VECTOR:
9596 break;
9597 default:
9598 error ("illegal argument 0 to %qs", name);
9599 return const0_rtx;
9600 }
9601
9602 if (ignore)
9603 return target;
9604
9605 if (target == NULL_RTX)
9606 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9607
9608 if (!targetm.have_oacc_dim_size ())
9609 {
9610 emit_move_insn (target, fallback_retval);
9611 return target;
9612 }
9613
9614 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
9615 emit_insn (gen_fn (reg, GEN_INT (dim)));
9616 if (reg != target)
9617 emit_move_insn (target, reg);
9618
9619 return target;
9620 }
9621
9622 /* Expand a string compare operation using a sequence of char comparison
9623 to get rid of the calling overhead, with result going to TARGET if
9624 that's convenient.
9625
9626 VAR_STR is the variable string source;
9627 CONST_STR is the constant string source;
9628 LENGTH is the number of chars to compare;
9629 CONST_STR_N indicates which source string is the constant string;
9630 IS_MEMCMP indicates whether it's a memcmp or strcmp.
9631
9632 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
9633
9634 target = (int) (unsigned char) var_str[0]
9635 - (int) (unsigned char) const_str[0];
9636 if (target != 0)
9637 goto ne_label;
9638 ...
9639 target = (int) (unsigned char) var_str[length - 2]
9640 - (int) (unsigned char) const_str[length - 2];
9641 if (target != 0)
9642 goto ne_label;
9643 target = (int) (unsigned char) var_str[length - 1]
9644 - (int) (unsigned char) const_str[length - 1];
9645 ne_label:
9646 */
9647
9648 static rtx
9649 inline_string_cmp (rtx target, tree var_str, const char *const_str,
9650 unsigned HOST_WIDE_INT length,
9651 int const_str_n, machine_mode mode)
9652 {
9653 HOST_WIDE_INT offset = 0;
9654 rtx var_rtx_array
9655 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
9656 rtx var_rtx = NULL_RTX;
9657 rtx const_rtx = NULL_RTX;
9658 rtx result = target ? target : gen_reg_rtx (mode);
9659 rtx_code_label *ne_label = gen_label_rtx ();
9660 tree unit_type_node = unsigned_char_type_node;
9661 scalar_int_mode unit_mode
9662 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
9663
9664 start_sequence ();
9665
9666 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
9667 {
9668 var_rtx
9669 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
9670 const_rtx = c_readstr (const_str + offset, unit_mode);
9671 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
9672 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
9673
9674 op0 = convert_modes (mode, unit_mode, op0, 1);
9675 op1 = convert_modes (mode, unit_mode, op1, 1);
9676 result = expand_simple_binop (mode, MINUS, op0, op1,
9677 result, 1, OPTAB_WIDEN);
9678 if (i < length - 1)
9679 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
9680 mode, true, ne_label);
9681 offset += GET_MODE_SIZE (unit_mode);
9682 }
9683
9684 emit_label (ne_label);
9685 rtx_insn *insns = get_insns ();
9686 end_sequence ();
9687 emit_insn (insns);
9688
9689 return result;
9690 }
9691
9692 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
9693 to TARGET if that's convenient.
9694 If the call is not been inlined, return NULL_RTX. */
9695
9696 static rtx
9697 inline_expand_builtin_bytecmp (tree exp, rtx target)
9698 {
9699 tree fndecl = get_callee_fndecl (exp);
9700 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9701 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
9702
9703 /* Do NOT apply this inlining expansion when optimizing for size or
9704 optimization level below 2. */
9705 if (optimize < 2 || optimize_insn_for_size_p ())
9706 return NULL_RTX;
9707
9708 gcc_checking_assert (fcode == BUILT_IN_STRCMP
9709 || fcode == BUILT_IN_STRNCMP
9710 || fcode == BUILT_IN_MEMCMP);
9711
9712 /* On a target where the type of the call (int) has same or narrower presicion
9713 than unsigned char, give up the inlining expansion. */
9714 if (TYPE_PRECISION (unsigned_char_type_node)
9715 >= TYPE_PRECISION (TREE_TYPE (exp)))
9716 return NULL_RTX;
9717
9718 tree arg1 = CALL_EXPR_ARG (exp, 0);
9719 tree arg2 = CALL_EXPR_ARG (exp, 1);
9720 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
9721
9722 unsigned HOST_WIDE_INT len1 = 0;
9723 unsigned HOST_WIDE_INT len2 = 0;
9724 unsigned HOST_WIDE_INT len3 = 0;
9725
9726 /* Get the object representation of the initializers of ARG1 and ARG2
9727 as strings, provided they refer to constant objects, with their byte
9728 sizes in LEN1 and LEN2, respectively. */
9729 const char *bytes1 = getbyterep (arg1, &len1);
9730 const char *bytes2 = getbyterep (arg2, &len2);
9731
9732 /* Fail if neither argument refers to an initialized constant. */
9733 if (!bytes1 && !bytes2)
9734 return NULL_RTX;
9735
9736 if (is_ncmp)
9737 {
9738 /* Fail if the memcmp/strncmp bound is not a constant. */
9739 if (!tree_fits_uhwi_p (len3_tree))
9740 return NULL_RTX;
9741
9742 len3 = tree_to_uhwi (len3_tree);
9743
9744 if (fcode == BUILT_IN_MEMCMP)
9745 {
9746 /* Fail if the memcmp bound is greater than the size of either
9747 of the two constant objects. */
9748 if ((bytes1 && len1 < len3)
9749 || (bytes2 && len2 < len3))
9750 return NULL_RTX;
9751 }
9752 }
9753
9754 if (fcode != BUILT_IN_MEMCMP)
9755 {
9756 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
9757 and LEN2 to the length of the nul-terminated string stored
9758 in each. */
9759 if (bytes1 != NULL)
9760 len1 = strnlen (bytes1, len1) + 1;
9761 if (bytes2 != NULL)
9762 len2 = strnlen (bytes2, len2) + 1;
9763 }
9764
9765 /* See inline_string_cmp. */
9766 int const_str_n;
9767 if (!len1)
9768 const_str_n = 2;
9769 else if (!len2)
9770 const_str_n = 1;
9771 else if (len2 > len1)
9772 const_str_n = 1;
9773 else
9774 const_str_n = 2;
9775
9776 /* For strncmp only, compute the new bound as the smallest of
9777 the lengths of the two strings (plus 1) and the bound provided
9778 to the function. */
9779 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
9780 if (is_ncmp && len3 < bound)
9781 bound = len3;
9782
9783 /* If the bound of the comparison is larger than the threshold,
9784 do nothing. */
9785 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
9786 return NULL_RTX;
9787
9788 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9789
9790 /* Now, start inline expansion the call. */
9791 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
9792 (const_str_n == 1) ? bytes1 : bytes2, bound,
9793 const_str_n, mode);
9794 }
9795
9796 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
9797 represents the size of the first argument to that call, or VOIDmode
9798 if the argument is a pointer. IGNORE will be true if the result
9799 isn't used. */
9800 static rtx
9801 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
9802 bool ignore)
9803 {
9804 rtx val, failsafe;
9805 unsigned nargs = call_expr_nargs (exp);
9806
9807 tree arg0 = CALL_EXPR_ARG (exp, 0);
9808
9809 if (mode == VOIDmode)
9810 {
9811 mode = TYPE_MODE (TREE_TYPE (arg0));
9812 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
9813 }
9814
9815 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
9816
9817 /* An optional second argument can be used as a failsafe value on
9818 some machines. If it isn't present, then the failsafe value is
9819 assumed to be 0. */
9820 if (nargs > 1)
9821 {
9822 tree arg1 = CALL_EXPR_ARG (exp, 1);
9823 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
9824 }
9825 else
9826 failsafe = const0_rtx;
9827
9828 /* If the result isn't used, the behavior is undefined. It would be
9829 nice to emit a warning here, but path splitting means this might
9830 happen with legitimate code. So simply drop the builtin
9831 expansion in that case; we've handled any side-effects above. */
9832 if (ignore)
9833 return const0_rtx;
9834
9835 /* If we don't have a suitable target, create one to hold the result. */
9836 if (target == NULL || GET_MODE (target) != mode)
9837 target = gen_reg_rtx (mode);
9838
9839 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
9840 val = convert_modes (mode, VOIDmode, val, false);
9841
9842 return targetm.speculation_safe_value (mode, target, val, failsafe);
9843 }
9844
9845 /* Expand an expression EXP that calls a built-in function,
9846 with result going to TARGET if that's convenient
9847 (and in mode MODE if that's convenient).
9848 SUBTARGET may be used as the target for computing one of EXP's operands.
9849 IGNORE is nonzero if the value is to be ignored. */
9850
9851 rtx
9852 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
9853 int ignore)
9854 {
9855 tree fndecl = get_callee_fndecl (exp);
9856 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9857 int flags;
9858
9859 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9860 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
9861
9862 /* When ASan is enabled, we don't want to expand some memory/string
9863 builtins and rely on libsanitizer's hooks. This allows us to avoid
9864 redundant checks and be sure, that possible overflow will be detected
9865 by ASan. */
9866
9867 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9868 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
9869 return expand_call (exp, target, ignore);
9870
9871 /* When not optimizing, generate calls to library functions for a certain
9872 set of builtins. */
9873 if (!optimize
9874 && !called_as_built_in (fndecl)
9875 && fcode != BUILT_IN_FORK
9876 && fcode != BUILT_IN_EXECL
9877 && fcode != BUILT_IN_EXECV
9878 && fcode != BUILT_IN_EXECLP
9879 && fcode != BUILT_IN_EXECLE
9880 && fcode != BUILT_IN_EXECVP
9881 && fcode != BUILT_IN_EXECVE
9882 && fcode != BUILT_IN_CLEAR_CACHE
9883 && !ALLOCA_FUNCTION_CODE_P (fcode)
9884 && fcode != BUILT_IN_FREE)
9885 return expand_call (exp, target, ignore);
9886
9887 /* The built-in function expanders test for target == const0_rtx
9888 to determine whether the function's result will be ignored. */
9889 if (ignore)
9890 target = const0_rtx;
9891
9892 /* If the result of a pure or const built-in function is ignored, and
9893 none of its arguments are volatile, we can avoid expanding the
9894 built-in call and just evaluate the arguments for side-effects. */
9895 if (target == const0_rtx
9896 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
9897 && !(flags & ECF_LOOPING_CONST_OR_PURE))
9898 {
9899 bool volatilep = false;
9900 tree arg;
9901 call_expr_arg_iterator iter;
9902
9903 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9904 if (TREE_THIS_VOLATILE (arg))
9905 {
9906 volatilep = true;
9907 break;
9908 }
9909
9910 if (! volatilep)
9911 {
9912 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9913 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
9914 return const0_rtx;
9915 }
9916 }
9917
9918 switch (fcode)
9919 {
9920 CASE_FLT_FN (BUILT_IN_FABS):
9921 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9922 case BUILT_IN_FABSD32:
9923 case BUILT_IN_FABSD64:
9924 case BUILT_IN_FABSD128:
9925 target = expand_builtin_fabs (exp, target, subtarget);
9926 if (target)
9927 return target;
9928 break;
9929
9930 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9931 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
9932 target = expand_builtin_copysign (exp, target, subtarget);
9933 if (target)
9934 return target;
9935 break;
9936
9937 /* Just do a normal library call if we were unable to fold
9938 the values. */
9939 CASE_FLT_FN (BUILT_IN_CABS):
9940 break;
9941
9942 CASE_FLT_FN (BUILT_IN_FMA):
9943 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9944 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
9945 if (target)
9946 return target;
9947 break;
9948
9949 CASE_FLT_FN (BUILT_IN_ILOGB):
9950 if (! flag_unsafe_math_optimizations)
9951 break;
9952 gcc_fallthrough ();
9953 CASE_FLT_FN (BUILT_IN_ISINF):
9954 CASE_FLT_FN (BUILT_IN_FINITE):
9955 case BUILT_IN_ISFINITE:
9956 case BUILT_IN_ISNORMAL:
9957 target = expand_builtin_interclass_mathfn (exp, target);
9958 if (target)
9959 return target;
9960 break;
9961
9962 CASE_FLT_FN (BUILT_IN_ICEIL):
9963 CASE_FLT_FN (BUILT_IN_LCEIL):
9964 CASE_FLT_FN (BUILT_IN_LLCEIL):
9965 CASE_FLT_FN (BUILT_IN_LFLOOR):
9966 CASE_FLT_FN (BUILT_IN_IFLOOR):
9967 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9968 target = expand_builtin_int_roundingfn (exp, target);
9969 if (target)
9970 return target;
9971 break;
9972
9973 CASE_FLT_FN (BUILT_IN_IRINT):
9974 CASE_FLT_FN (BUILT_IN_LRINT):
9975 CASE_FLT_FN (BUILT_IN_LLRINT):
9976 CASE_FLT_FN (BUILT_IN_IROUND):
9977 CASE_FLT_FN (BUILT_IN_LROUND):
9978 CASE_FLT_FN (BUILT_IN_LLROUND):
9979 target = expand_builtin_int_roundingfn_2 (exp, target);
9980 if (target)
9981 return target;
9982 break;
9983
9984 CASE_FLT_FN (BUILT_IN_POWI):
9985 target = expand_builtin_powi (exp, target);
9986 if (target)
9987 return target;
9988 break;
9989
9990 CASE_FLT_FN (BUILT_IN_CEXPI):
9991 target = expand_builtin_cexpi (exp, target);
9992 gcc_assert (target);
9993 return target;
9994
9995 CASE_FLT_FN (BUILT_IN_SIN):
9996 CASE_FLT_FN (BUILT_IN_COS):
9997 if (! flag_unsafe_math_optimizations)
9998 break;
9999 target = expand_builtin_mathfn_3 (exp, target, subtarget);
10000 if (target)
10001 return target;
10002 break;
10003
10004 CASE_FLT_FN (BUILT_IN_SINCOS):
10005 if (! flag_unsafe_math_optimizations)
10006 break;
10007 target = expand_builtin_sincos (exp);
10008 if (target)
10009 return target;
10010 break;
10011
10012 case BUILT_IN_APPLY_ARGS:
10013 return expand_builtin_apply_args ();
10014
10015 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
10016 FUNCTION with a copy of the parameters described by
10017 ARGUMENTS, and ARGSIZE. It returns a block of memory
10018 allocated on the stack into which is stored all the registers
10019 that might possibly be used for returning the result of a
10020 function. ARGUMENTS is the value returned by
10021 __builtin_apply_args. ARGSIZE is the number of bytes of
10022 arguments that must be copied. ??? How should this value be
10023 computed? We'll also need a safe worst case value for varargs
10024 functions. */
10025 case BUILT_IN_APPLY:
10026 if (!validate_arglist (exp, POINTER_TYPE,
10027 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
10028 && !validate_arglist (exp, REFERENCE_TYPE,
10029 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10030 return const0_rtx;
10031 else
10032 {
10033 rtx ops[3];
10034
10035 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
10036 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
10037 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
10038
10039 return expand_builtin_apply (ops[0], ops[1], ops[2]);
10040 }
10041
10042 /* __builtin_return (RESULT) causes the function to return the
10043 value described by RESULT. RESULT is address of the block of
10044 memory returned by __builtin_apply. */
10045 case BUILT_IN_RETURN:
10046 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10047 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
10048 return const0_rtx;
10049
10050 case BUILT_IN_SAVEREGS:
10051 return expand_builtin_saveregs ();
10052
10053 case BUILT_IN_VA_ARG_PACK:
10054 /* All valid uses of __builtin_va_arg_pack () are removed during
10055 inlining. */
10056 error ("invalid use of %<__builtin_va_arg_pack ()%>");
10057 return const0_rtx;
10058
10059 case BUILT_IN_VA_ARG_PACK_LEN:
10060 /* All valid uses of __builtin_va_arg_pack_len () are removed during
10061 inlining. */
10062 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
10063 return const0_rtx;
10064
10065 /* Return the address of the first anonymous stack arg. */
10066 case BUILT_IN_NEXT_ARG:
10067 if (fold_builtin_next_arg (exp, false))
10068 return const0_rtx;
10069 return expand_builtin_next_arg ();
10070
10071 case BUILT_IN_CLEAR_CACHE:
10072 expand_builtin___clear_cache (exp);
10073 return const0_rtx;
10074
10075 case BUILT_IN_CLASSIFY_TYPE:
10076 return expand_builtin_classify_type (exp);
10077
10078 case BUILT_IN_CONSTANT_P:
10079 return const0_rtx;
10080
10081 case BUILT_IN_FRAME_ADDRESS:
10082 case BUILT_IN_RETURN_ADDRESS:
10083 return expand_builtin_frame_address (fndecl, exp);
10084
10085 /* Returns the address of the area where the structure is returned.
10086 0 otherwise. */
10087 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10088 if (call_expr_nargs (exp) != 0
10089 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
10090 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
10091 return const0_rtx;
10092 else
10093 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
10094
10095 CASE_BUILT_IN_ALLOCA:
10096 target = expand_builtin_alloca (exp);
10097 if (target)
10098 return target;
10099 break;
10100
10101 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
10102 return expand_asan_emit_allocas_unpoison (exp);
10103
10104 case BUILT_IN_STACK_SAVE:
10105 return expand_stack_save ();
10106
10107 case BUILT_IN_STACK_RESTORE:
10108 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
10109 return const0_rtx;
10110
10111 case BUILT_IN_BSWAP16:
10112 case BUILT_IN_BSWAP32:
10113 case BUILT_IN_BSWAP64:
10114 case BUILT_IN_BSWAP128:
10115 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
10116 if (target)
10117 return target;
10118 break;
10119
10120 CASE_INT_FN (BUILT_IN_FFS):
10121 target = expand_builtin_unop (target_mode, exp, target,
10122 subtarget, ffs_optab);
10123 if (target)
10124 return target;
10125 break;
10126
10127 CASE_INT_FN (BUILT_IN_CLZ):
10128 target = expand_builtin_unop (target_mode, exp, target,
10129 subtarget, clz_optab);
10130 if (target)
10131 return target;
10132 break;
10133
10134 CASE_INT_FN (BUILT_IN_CTZ):
10135 target = expand_builtin_unop (target_mode, exp, target,
10136 subtarget, ctz_optab);
10137 if (target)
10138 return target;
10139 break;
10140
10141 CASE_INT_FN (BUILT_IN_CLRSB):
10142 target = expand_builtin_unop (target_mode, exp, target,
10143 subtarget, clrsb_optab);
10144 if (target)
10145 return target;
10146 break;
10147
10148 CASE_INT_FN (BUILT_IN_POPCOUNT):
10149 target = expand_builtin_unop (target_mode, exp, target,
10150 subtarget, popcount_optab);
10151 if (target)
10152 return target;
10153 break;
10154
10155 CASE_INT_FN (BUILT_IN_PARITY):
10156 target = expand_builtin_unop (target_mode, exp, target,
10157 subtarget, parity_optab);
10158 if (target)
10159 return target;
10160 break;
10161
10162 case BUILT_IN_STRLEN:
10163 target = expand_builtin_strlen (exp, target, target_mode);
10164 if (target)
10165 return target;
10166 break;
10167
10168 case BUILT_IN_STRNLEN:
10169 target = expand_builtin_strnlen (exp, target, target_mode);
10170 if (target)
10171 return target;
10172 break;
10173
10174 case BUILT_IN_STRCAT:
10175 target = expand_builtin_strcat (exp);
10176 if (target)
10177 return target;
10178 break;
10179
10180 case BUILT_IN_GETTEXT:
10181 case BUILT_IN_PUTS:
10182 case BUILT_IN_PUTS_UNLOCKED:
10183 case BUILT_IN_STRDUP:
10184 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10185 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
10186 break;
10187
10188 case BUILT_IN_INDEX:
10189 case BUILT_IN_RINDEX:
10190 case BUILT_IN_STRCHR:
10191 case BUILT_IN_STRRCHR:
10192 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10193 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
10194 break;
10195
10196 case BUILT_IN_FPUTS:
10197 case BUILT_IN_FPUTS_UNLOCKED:
10198 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
10199 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
10200 break;
10201
10202 case BUILT_IN_STRNDUP:
10203 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10204 check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1));
10205 break;
10206
10207 case BUILT_IN_STRCASECMP:
10208 case BUILT_IN_STRPBRK:
10209 case BUILT_IN_STRSPN:
10210 case BUILT_IN_STRCSPN:
10211 case BUILT_IN_STRSTR:
10212 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
10213 {
10214 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
10215 check_read_access (exp, CALL_EXPR_ARG (exp, 1));
10216 }
10217 break;
10218
10219 case BUILT_IN_STRCPY:
10220 target = expand_builtin_strcpy (exp, target);
10221 if (target)
10222 return target;
10223 break;
10224
10225 case BUILT_IN_STRNCAT:
10226 target = expand_builtin_strncat (exp, target);
10227 if (target)
10228 return target;
10229 break;
10230
10231 case BUILT_IN_STRNCPY:
10232 target = expand_builtin_strncpy (exp, target);
10233 if (target)
10234 return target;
10235 break;
10236
10237 case BUILT_IN_STPCPY:
10238 target = expand_builtin_stpcpy (exp, target, mode);
10239 if (target)
10240 return target;
10241 break;
10242
10243 case BUILT_IN_STPNCPY:
10244 target = expand_builtin_stpncpy (exp, target);
10245 if (target)
10246 return target;
10247 break;
10248
10249 case BUILT_IN_MEMCHR:
10250 target = expand_builtin_memchr (exp, target);
10251 if (target)
10252 return target;
10253 break;
10254
10255 case BUILT_IN_MEMCPY:
10256 target = expand_builtin_memcpy (exp, target);
10257 if (target)
10258 return target;
10259 break;
10260
10261 case BUILT_IN_MEMMOVE:
10262 target = expand_builtin_memmove (exp, target);
10263 if (target)
10264 return target;
10265 break;
10266
10267 case BUILT_IN_MEMPCPY:
10268 target = expand_builtin_mempcpy (exp, target);
10269 if (target)
10270 return target;
10271 break;
10272
10273 case BUILT_IN_MEMSET:
10274 target = expand_builtin_memset (exp, target, mode);
10275 if (target)
10276 return target;
10277 break;
10278
10279 case BUILT_IN_BZERO:
10280 target = expand_builtin_bzero (exp);
10281 if (target)
10282 return target;
10283 break;
10284
10285 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
10286 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
10287 when changing it to a strcmp call. */
10288 case BUILT_IN_STRCMP_EQ:
10289 target = expand_builtin_memcmp (exp, target, true);
10290 if (target)
10291 return target;
10292
10293 /* Change this call back to a BUILT_IN_STRCMP. */
10294 TREE_OPERAND (exp, 1)
10295 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
10296
10297 /* Delete the last parameter. */
10298 unsigned int i;
10299 vec<tree, va_gc> *arg_vec;
10300 vec_alloc (arg_vec, 2);
10301 for (i = 0; i < 2; i++)
10302 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
10303 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
10304 /* FALLTHROUGH */
10305
10306 case BUILT_IN_STRCMP:
10307 target = expand_builtin_strcmp (exp, target);
10308 if (target)
10309 return target;
10310 break;
10311
10312 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
10313 back to a BUILT_IN_STRNCMP. */
10314 case BUILT_IN_STRNCMP_EQ:
10315 target = expand_builtin_memcmp (exp, target, true);
10316 if (target)
10317 return target;
10318
10319 /* Change it back to a BUILT_IN_STRNCMP. */
10320 TREE_OPERAND (exp, 1)
10321 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
10322 /* FALLTHROUGH */
10323
10324 case BUILT_IN_STRNCMP:
10325 target = expand_builtin_strncmp (exp, target, mode);
10326 if (target)
10327 return target;
10328 break;
10329
10330 case BUILT_IN_BCMP:
10331 case BUILT_IN_MEMCMP:
10332 case BUILT_IN_MEMCMP_EQ:
10333 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
10334 if (target)
10335 return target;
10336 if (fcode == BUILT_IN_MEMCMP_EQ)
10337 {
10338 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
10339 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
10340 }
10341 break;
10342
10343 case BUILT_IN_SETJMP:
10344 /* This should have been lowered to the builtins below. */
10345 gcc_unreachable ();
10346
10347 case BUILT_IN_SETJMP_SETUP:
10348 /* __builtin_setjmp_setup is passed a pointer to an array of five words
10349 and the receiver label. */
10350 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
10351 {
10352 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10353 VOIDmode, EXPAND_NORMAL);
10354 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
10355 rtx_insn *label_r = label_rtx (label);
10356
10357 /* This is copied from the handling of non-local gotos. */
10358 expand_builtin_setjmp_setup (buf_addr, label_r);
10359 nonlocal_goto_handler_labels
10360 = gen_rtx_INSN_LIST (VOIDmode, label_r,
10361 nonlocal_goto_handler_labels);
10362 /* ??? Do not let expand_label treat us as such since we would
10363 not want to be both on the list of non-local labels and on
10364 the list of forced labels. */
10365 FORCED_LABEL (label) = 0;
10366 return const0_rtx;
10367 }
10368 break;
10369
10370 case BUILT_IN_SETJMP_RECEIVER:
10371 /* __builtin_setjmp_receiver is passed the receiver label. */
10372 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10373 {
10374 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
10375 rtx_insn *label_r = label_rtx (label);
10376
10377 expand_builtin_setjmp_receiver (label_r);
10378 return const0_rtx;
10379 }
10380 break;
10381
10382 /* __builtin_longjmp is passed a pointer to an array of five words.
10383 It's similar to the C library longjmp function but works with
10384 __builtin_setjmp above. */
10385 case BUILT_IN_LONGJMP:
10386 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10387 {
10388 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10389 VOIDmode, EXPAND_NORMAL);
10390 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
10391
10392 if (value != const1_rtx)
10393 {
10394 error ("%<__builtin_longjmp%> second argument must be 1");
10395 return const0_rtx;
10396 }
10397
10398 expand_builtin_longjmp (buf_addr, value);
10399 return const0_rtx;
10400 }
10401 break;
10402
10403 case BUILT_IN_NONLOCAL_GOTO:
10404 target = expand_builtin_nonlocal_goto (exp);
10405 if (target)
10406 return target;
10407 break;
10408
10409 /* This updates the setjmp buffer that is its argument with the value
10410 of the current stack pointer. */
10411 case BUILT_IN_UPDATE_SETJMP_BUF:
10412 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10413 {
10414 rtx buf_addr
10415 = expand_normal (CALL_EXPR_ARG (exp, 0));
10416
10417 expand_builtin_update_setjmp_buf (buf_addr);
10418 return const0_rtx;
10419 }
10420 break;
10421
10422 case BUILT_IN_TRAP:
10423 expand_builtin_trap ();
10424 return const0_rtx;
10425
10426 case BUILT_IN_UNREACHABLE:
10427 expand_builtin_unreachable ();
10428 return const0_rtx;
10429
10430 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10431 case BUILT_IN_SIGNBITD32:
10432 case BUILT_IN_SIGNBITD64:
10433 case BUILT_IN_SIGNBITD128:
10434 target = expand_builtin_signbit (exp, target);
10435 if (target)
10436 return target;
10437 break;
10438
10439 /* Various hooks for the DWARF 2 __throw routine. */
10440 case BUILT_IN_UNWIND_INIT:
10441 expand_builtin_unwind_init ();
10442 return const0_rtx;
10443 case BUILT_IN_DWARF_CFA:
10444 return virtual_cfa_rtx;
10445 #ifdef DWARF2_UNWIND_INFO
10446 case BUILT_IN_DWARF_SP_COLUMN:
10447 return expand_builtin_dwarf_sp_column ();
10448 case BUILT_IN_INIT_DWARF_REG_SIZES:
10449 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
10450 return const0_rtx;
10451 #endif
10452 case BUILT_IN_FROB_RETURN_ADDR:
10453 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
10454 case BUILT_IN_EXTRACT_RETURN_ADDR:
10455 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
10456 case BUILT_IN_EH_RETURN:
10457 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
10458 CALL_EXPR_ARG (exp, 1));
10459 return const0_rtx;
10460 case BUILT_IN_EH_RETURN_DATA_REGNO:
10461 return expand_builtin_eh_return_data_regno (exp);
10462 case BUILT_IN_EXTEND_POINTER:
10463 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
10464 case BUILT_IN_EH_POINTER:
10465 return expand_builtin_eh_pointer (exp);
10466 case BUILT_IN_EH_FILTER:
10467 return expand_builtin_eh_filter (exp);
10468 case BUILT_IN_EH_COPY_VALUES:
10469 return expand_builtin_eh_copy_values (exp);
10470
10471 case BUILT_IN_VA_START:
10472 return expand_builtin_va_start (exp);
10473 case BUILT_IN_VA_END:
10474 return expand_builtin_va_end (exp);
10475 case BUILT_IN_VA_COPY:
10476 return expand_builtin_va_copy (exp);
10477 case BUILT_IN_EXPECT:
10478 return expand_builtin_expect (exp, target);
10479 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10480 return expand_builtin_expect_with_probability (exp, target);
10481 case BUILT_IN_ASSUME_ALIGNED:
10482 return expand_builtin_assume_aligned (exp, target);
10483 case BUILT_IN_PREFETCH:
10484 expand_builtin_prefetch (exp);
10485 return const0_rtx;
10486
10487 case BUILT_IN_INIT_TRAMPOLINE:
10488 return expand_builtin_init_trampoline (exp, true);
10489 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
10490 return expand_builtin_init_trampoline (exp, false);
10491 case BUILT_IN_ADJUST_TRAMPOLINE:
10492 return expand_builtin_adjust_trampoline (exp);
10493
10494 case BUILT_IN_INIT_DESCRIPTOR:
10495 return expand_builtin_init_descriptor (exp);
10496 case BUILT_IN_ADJUST_DESCRIPTOR:
10497 return expand_builtin_adjust_descriptor (exp);
10498
10499 case BUILT_IN_FORK:
10500 case BUILT_IN_EXECL:
10501 case BUILT_IN_EXECV:
10502 case BUILT_IN_EXECLP:
10503 case BUILT_IN_EXECLE:
10504 case BUILT_IN_EXECVP:
10505 case BUILT_IN_EXECVE:
10506 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
10507 if (target)
10508 return target;
10509 break;
10510
10511 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
10512 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
10513 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
10514 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
10515 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
10516 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
10517 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
10518 if (target)
10519 return target;
10520 break;
10521
10522 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
10523 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
10524 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
10525 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
10526 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
10527 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
10528 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
10529 if (target)
10530 return target;
10531 break;
10532
10533 case BUILT_IN_SYNC_FETCH_AND_OR_1:
10534 case BUILT_IN_SYNC_FETCH_AND_OR_2:
10535 case BUILT_IN_SYNC_FETCH_AND_OR_4:
10536 case BUILT_IN_SYNC_FETCH_AND_OR_8:
10537 case BUILT_IN_SYNC_FETCH_AND_OR_16:
10538 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
10539 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
10540 if (target)
10541 return target;
10542 break;
10543
10544 case BUILT_IN_SYNC_FETCH_AND_AND_1:
10545 case BUILT_IN_SYNC_FETCH_AND_AND_2:
10546 case BUILT_IN_SYNC_FETCH_AND_AND_4:
10547 case BUILT_IN_SYNC_FETCH_AND_AND_8:
10548 case BUILT_IN_SYNC_FETCH_AND_AND_16:
10549 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
10550 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
10551 if (target)
10552 return target;
10553 break;
10554
10555 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
10556 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
10557 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
10558 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
10559 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
10560 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
10561 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
10562 if (target)
10563 return target;
10564 break;
10565
10566 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
10567 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
10568 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
10569 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
10570 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
10571 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
10572 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
10573 if (target)
10574 return target;
10575 break;
10576
10577 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
10578 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
10579 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
10580 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
10581 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
10582 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
10583 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
10584 if (target)
10585 return target;
10586 break;
10587
10588 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
10589 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
10590 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
10591 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
10592 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
10593 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
10594 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
10595 if (target)
10596 return target;
10597 break;
10598
10599 case BUILT_IN_SYNC_OR_AND_FETCH_1:
10600 case BUILT_IN_SYNC_OR_AND_FETCH_2:
10601 case BUILT_IN_SYNC_OR_AND_FETCH_4:
10602 case BUILT_IN_SYNC_OR_AND_FETCH_8:
10603 case BUILT_IN_SYNC_OR_AND_FETCH_16:
10604 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
10605 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
10606 if (target)
10607 return target;
10608 break;
10609
10610 case BUILT_IN_SYNC_AND_AND_FETCH_1:
10611 case BUILT_IN_SYNC_AND_AND_FETCH_2:
10612 case BUILT_IN_SYNC_AND_AND_FETCH_4:
10613 case BUILT_IN_SYNC_AND_AND_FETCH_8:
10614 case BUILT_IN_SYNC_AND_AND_FETCH_16:
10615 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
10616 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
10617 if (target)
10618 return target;
10619 break;
10620
10621 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
10622 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
10623 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
10624 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
10625 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
10626 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
10627 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
10628 if (target)
10629 return target;
10630 break;
10631
10632 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
10633 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
10634 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
10635 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
10636 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
10637 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
10638 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
10639 if (target)
10640 return target;
10641 break;
10642
10643 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
10644 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
10645 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
10646 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
10647 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
10648 if (mode == VOIDmode)
10649 mode = TYPE_MODE (boolean_type_node);
10650 if (!target || !register_operand (target, mode))
10651 target = gen_reg_rtx (mode);
10652
10653 mode = get_builtin_sync_mode
10654 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
10655 target = expand_builtin_compare_and_swap (mode, exp, true, target);
10656 if (target)
10657 return target;
10658 break;
10659
10660 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
10661 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
10662 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
10663 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
10664 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
10665 mode = get_builtin_sync_mode
10666 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
10667 target = expand_builtin_compare_and_swap (mode, exp, false, target);
10668 if (target)
10669 return target;
10670 break;
10671
10672 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
10673 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
10674 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
10675 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
10676 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
10677 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
10678 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
10679 if (target)
10680 return target;
10681 break;
10682
10683 case BUILT_IN_SYNC_LOCK_RELEASE_1:
10684 case BUILT_IN_SYNC_LOCK_RELEASE_2:
10685 case BUILT_IN_SYNC_LOCK_RELEASE_4:
10686 case BUILT_IN_SYNC_LOCK_RELEASE_8:
10687 case BUILT_IN_SYNC_LOCK_RELEASE_16:
10688 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
10689 expand_builtin_sync_lock_release (mode, exp);
10690 return const0_rtx;
10691
10692 case BUILT_IN_SYNC_SYNCHRONIZE:
10693 expand_builtin_sync_synchronize ();
10694 return const0_rtx;
10695
10696 case BUILT_IN_ATOMIC_EXCHANGE_1:
10697 case BUILT_IN_ATOMIC_EXCHANGE_2:
10698 case BUILT_IN_ATOMIC_EXCHANGE_4:
10699 case BUILT_IN_ATOMIC_EXCHANGE_8:
10700 case BUILT_IN_ATOMIC_EXCHANGE_16:
10701 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
10702 target = expand_builtin_atomic_exchange (mode, exp, target);
10703 if (target)
10704 return target;
10705 break;
10706
10707 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
10708 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
10709 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
10710 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
10711 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
10712 {
10713 unsigned int nargs, z;
10714 vec<tree, va_gc> *vec;
10715
10716 mode =
10717 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
10718 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
10719 if (target)
10720 return target;
10721
10722 /* If this is turned into an external library call, the weak parameter
10723 must be dropped to match the expected parameter list. */
10724 nargs = call_expr_nargs (exp);
10725 vec_alloc (vec, nargs - 1);
10726 for (z = 0; z < 3; z++)
10727 vec->quick_push (CALL_EXPR_ARG (exp, z));
10728 /* Skip the boolean weak parameter. */
10729 for (z = 4; z < 6; z++)
10730 vec->quick_push (CALL_EXPR_ARG (exp, z));
10731 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
10732 break;
10733 }
10734
10735 case BUILT_IN_ATOMIC_LOAD_1:
10736 case BUILT_IN_ATOMIC_LOAD_2:
10737 case BUILT_IN_ATOMIC_LOAD_4:
10738 case BUILT_IN_ATOMIC_LOAD_8:
10739 case BUILT_IN_ATOMIC_LOAD_16:
10740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
10741 target = expand_builtin_atomic_load (mode, exp, target);
10742 if (target)
10743 return target;
10744 break;
10745
10746 case BUILT_IN_ATOMIC_STORE_1:
10747 case BUILT_IN_ATOMIC_STORE_2:
10748 case BUILT_IN_ATOMIC_STORE_4:
10749 case BUILT_IN_ATOMIC_STORE_8:
10750 case BUILT_IN_ATOMIC_STORE_16:
10751 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
10752 target = expand_builtin_atomic_store (mode, exp);
10753 if (target)
10754 return const0_rtx;
10755 break;
10756
10757 case BUILT_IN_ATOMIC_ADD_FETCH_1:
10758 case BUILT_IN_ATOMIC_ADD_FETCH_2:
10759 case BUILT_IN_ATOMIC_ADD_FETCH_4:
10760 case BUILT_IN_ATOMIC_ADD_FETCH_8:
10761 case BUILT_IN_ATOMIC_ADD_FETCH_16:
10762 {
10763 enum built_in_function lib;
10764 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
10765 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
10766 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
10767 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
10768 ignore, lib);
10769 if (target)
10770 return target;
10771 break;
10772 }
10773 case BUILT_IN_ATOMIC_SUB_FETCH_1:
10774 case BUILT_IN_ATOMIC_SUB_FETCH_2:
10775 case BUILT_IN_ATOMIC_SUB_FETCH_4:
10776 case BUILT_IN_ATOMIC_SUB_FETCH_8:
10777 case BUILT_IN_ATOMIC_SUB_FETCH_16:
10778 {
10779 enum built_in_function lib;
10780 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
10781 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
10782 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
10783 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
10784 ignore, lib);
10785 if (target)
10786 return target;
10787 break;
10788 }
10789 case BUILT_IN_ATOMIC_AND_FETCH_1:
10790 case BUILT_IN_ATOMIC_AND_FETCH_2:
10791 case BUILT_IN_ATOMIC_AND_FETCH_4:
10792 case BUILT_IN_ATOMIC_AND_FETCH_8:
10793 case BUILT_IN_ATOMIC_AND_FETCH_16:
10794 {
10795 enum built_in_function lib;
10796 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
10797 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
10798 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
10799 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
10800 ignore, lib);
10801 if (target)
10802 return target;
10803 break;
10804 }
10805 case BUILT_IN_ATOMIC_NAND_FETCH_1:
10806 case BUILT_IN_ATOMIC_NAND_FETCH_2:
10807 case BUILT_IN_ATOMIC_NAND_FETCH_4:
10808 case BUILT_IN_ATOMIC_NAND_FETCH_8:
10809 case BUILT_IN_ATOMIC_NAND_FETCH_16:
10810 {
10811 enum built_in_function lib;
10812 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
10813 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
10814 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
10815 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
10816 ignore, lib);
10817 if (target)
10818 return target;
10819 break;
10820 }
10821 case BUILT_IN_ATOMIC_XOR_FETCH_1:
10822 case BUILT_IN_ATOMIC_XOR_FETCH_2:
10823 case BUILT_IN_ATOMIC_XOR_FETCH_4:
10824 case BUILT_IN_ATOMIC_XOR_FETCH_8:
10825 case BUILT_IN_ATOMIC_XOR_FETCH_16:
10826 {
10827 enum built_in_function lib;
10828 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
10829 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
10830 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
10831 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
10832 ignore, lib);
10833 if (target)
10834 return target;
10835 break;
10836 }
10837 case BUILT_IN_ATOMIC_OR_FETCH_1:
10838 case BUILT_IN_ATOMIC_OR_FETCH_2:
10839 case BUILT_IN_ATOMIC_OR_FETCH_4:
10840 case BUILT_IN_ATOMIC_OR_FETCH_8:
10841 case BUILT_IN_ATOMIC_OR_FETCH_16:
10842 {
10843 enum built_in_function lib;
10844 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
10845 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
10846 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
10847 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
10848 ignore, lib);
10849 if (target)
10850 return target;
10851 break;
10852 }
10853 case BUILT_IN_ATOMIC_FETCH_ADD_1:
10854 case BUILT_IN_ATOMIC_FETCH_ADD_2:
10855 case BUILT_IN_ATOMIC_FETCH_ADD_4:
10856 case BUILT_IN_ATOMIC_FETCH_ADD_8:
10857 case BUILT_IN_ATOMIC_FETCH_ADD_16:
10858 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
10859 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
10860 ignore, BUILT_IN_NONE);
10861 if (target)
10862 return target;
10863 break;
10864
10865 case BUILT_IN_ATOMIC_FETCH_SUB_1:
10866 case BUILT_IN_ATOMIC_FETCH_SUB_2:
10867 case BUILT_IN_ATOMIC_FETCH_SUB_4:
10868 case BUILT_IN_ATOMIC_FETCH_SUB_8:
10869 case BUILT_IN_ATOMIC_FETCH_SUB_16:
10870 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
10871 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
10872 ignore, BUILT_IN_NONE);
10873 if (target)
10874 return target;
10875 break;
10876
10877 case BUILT_IN_ATOMIC_FETCH_AND_1:
10878 case BUILT_IN_ATOMIC_FETCH_AND_2:
10879 case BUILT_IN_ATOMIC_FETCH_AND_4:
10880 case BUILT_IN_ATOMIC_FETCH_AND_8:
10881 case BUILT_IN_ATOMIC_FETCH_AND_16:
10882 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
10883 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
10884 ignore, BUILT_IN_NONE);
10885 if (target)
10886 return target;
10887 break;
10888
10889 case BUILT_IN_ATOMIC_FETCH_NAND_1:
10890 case BUILT_IN_ATOMIC_FETCH_NAND_2:
10891 case BUILT_IN_ATOMIC_FETCH_NAND_4:
10892 case BUILT_IN_ATOMIC_FETCH_NAND_8:
10893 case BUILT_IN_ATOMIC_FETCH_NAND_16:
10894 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
10895 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
10896 ignore, BUILT_IN_NONE);
10897 if (target)
10898 return target;
10899 break;
10900
10901 case BUILT_IN_ATOMIC_FETCH_XOR_1:
10902 case BUILT_IN_ATOMIC_FETCH_XOR_2:
10903 case BUILT_IN_ATOMIC_FETCH_XOR_4:
10904 case BUILT_IN_ATOMIC_FETCH_XOR_8:
10905 case BUILT_IN_ATOMIC_FETCH_XOR_16:
10906 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
10907 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
10908 ignore, BUILT_IN_NONE);
10909 if (target)
10910 return target;
10911 break;
10912
10913 case BUILT_IN_ATOMIC_FETCH_OR_1:
10914 case BUILT_IN_ATOMIC_FETCH_OR_2:
10915 case BUILT_IN_ATOMIC_FETCH_OR_4:
10916 case BUILT_IN_ATOMIC_FETCH_OR_8:
10917 case BUILT_IN_ATOMIC_FETCH_OR_16:
10918 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
10919 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
10920 ignore, BUILT_IN_NONE);
10921 if (target)
10922 return target;
10923 break;
10924
10925 case BUILT_IN_ATOMIC_TEST_AND_SET:
10926 return expand_builtin_atomic_test_and_set (exp, target);
10927
10928 case BUILT_IN_ATOMIC_CLEAR:
10929 return expand_builtin_atomic_clear (exp);
10930
10931 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10932 return expand_builtin_atomic_always_lock_free (exp);
10933
10934 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10935 target = expand_builtin_atomic_is_lock_free (exp);
10936 if (target)
10937 return target;
10938 break;
10939
10940 case BUILT_IN_ATOMIC_THREAD_FENCE:
10941 expand_builtin_atomic_thread_fence (exp);
10942 return const0_rtx;
10943
10944 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
10945 expand_builtin_atomic_signal_fence (exp);
10946 return const0_rtx;
10947
10948 case BUILT_IN_OBJECT_SIZE:
10949 return expand_builtin_object_size (exp);
10950
10951 case BUILT_IN_MEMCPY_CHK:
10952 case BUILT_IN_MEMPCPY_CHK:
10953 case BUILT_IN_MEMMOVE_CHK:
10954 case BUILT_IN_MEMSET_CHK:
10955 target = expand_builtin_memory_chk (exp, target, mode, fcode);
10956 if (target)
10957 return target;
10958 break;
10959
10960 case BUILT_IN_STRCPY_CHK:
10961 case BUILT_IN_STPCPY_CHK:
10962 case BUILT_IN_STRNCPY_CHK:
10963 case BUILT_IN_STPNCPY_CHK:
10964 case BUILT_IN_STRCAT_CHK:
10965 case BUILT_IN_STRNCAT_CHK:
10966 case BUILT_IN_SNPRINTF_CHK:
10967 case BUILT_IN_VSNPRINTF_CHK:
10968 maybe_emit_chk_warning (exp, fcode);
10969 break;
10970
10971 case BUILT_IN_SPRINTF_CHK:
10972 case BUILT_IN_VSPRINTF_CHK:
10973 maybe_emit_sprintf_chk_warning (exp, fcode);
10974 break;
10975
10976 case BUILT_IN_THREAD_POINTER:
10977 return expand_builtin_thread_pointer (exp, target);
10978
10979 case BUILT_IN_SET_THREAD_POINTER:
10980 expand_builtin_set_thread_pointer (exp);
10981 return const0_rtx;
10982
10983 case BUILT_IN_ACC_ON_DEVICE:
10984 /* Do library call, if we failed to expand the builtin when
10985 folding. */
10986 break;
10987
10988 case BUILT_IN_GOACC_PARLEVEL_ID:
10989 case BUILT_IN_GOACC_PARLEVEL_SIZE:
10990 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
10991
10992 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
10993 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
10994
10995 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
10996 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
10997 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
10998 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
10999 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
11000 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
11001 return expand_speculation_safe_value (mode, exp, target, ignore);
11002
11003 default: /* just do library call, if unknown builtin */
11004 break;
11005 }
11006
11007 /* The switch statement above can drop through to cause the function
11008 to be called normally. */
11009 return expand_call (exp, target, ignore);
11010 }
11011
11012 /* Determine whether a tree node represents a call to a built-in
11013 function. If the tree T is a call to a built-in function with
11014 the right number of arguments of the appropriate types, return
11015 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
11016 Otherwise the return value is END_BUILTINS. */
11017
11018 enum built_in_function
11019 builtin_mathfn_code (const_tree t)
11020 {
11021 const_tree fndecl, arg, parmlist;
11022 const_tree argtype, parmtype;
11023 const_call_expr_arg_iterator iter;
11024
11025 if (TREE_CODE (t) != CALL_EXPR)
11026 return END_BUILTINS;
11027
11028 fndecl = get_callee_fndecl (t);
11029 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
11030 return END_BUILTINS;
11031
11032 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
11033 init_const_call_expr_arg_iterator (t, &iter);
11034 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
11035 {
11036 /* If a function doesn't take a variable number of arguments,
11037 the last element in the list will have type `void'. */
11038 parmtype = TREE_VALUE (parmlist);
11039 if (VOID_TYPE_P (parmtype))
11040 {
11041 if (more_const_call_expr_args_p (&iter))
11042 return END_BUILTINS;
11043 return DECL_FUNCTION_CODE (fndecl);
11044 }
11045
11046 if (! more_const_call_expr_args_p (&iter))
11047 return END_BUILTINS;
11048
11049 arg = next_const_call_expr_arg (&iter);
11050 argtype = TREE_TYPE (arg);
11051
11052 if (SCALAR_FLOAT_TYPE_P (parmtype))
11053 {
11054 if (! SCALAR_FLOAT_TYPE_P (argtype))
11055 return END_BUILTINS;
11056 }
11057 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
11058 {
11059 if (! COMPLEX_FLOAT_TYPE_P (argtype))
11060 return END_BUILTINS;
11061 }
11062 else if (POINTER_TYPE_P (parmtype))
11063 {
11064 if (! POINTER_TYPE_P (argtype))
11065 return END_BUILTINS;
11066 }
11067 else if (INTEGRAL_TYPE_P (parmtype))
11068 {
11069 if (! INTEGRAL_TYPE_P (argtype))
11070 return END_BUILTINS;
11071 }
11072 else
11073 return END_BUILTINS;
11074 }
11075
11076 /* Variable-length argument list. */
11077 return DECL_FUNCTION_CODE (fndecl);
11078 }
11079
11080 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
11081 evaluate to a constant. */
11082
11083 static tree
11084 fold_builtin_constant_p (tree arg)
11085 {
11086 /* We return 1 for a numeric type that's known to be a constant
11087 value at compile-time or for an aggregate type that's a
11088 literal constant. */
11089 STRIP_NOPS (arg);
11090
11091 /* If we know this is a constant, emit the constant of one. */
11092 if (CONSTANT_CLASS_P (arg)
11093 || (TREE_CODE (arg) == CONSTRUCTOR
11094 && TREE_CONSTANT (arg)))
11095 return integer_one_node;
11096 if (TREE_CODE (arg) == ADDR_EXPR)
11097 {
11098 tree op = TREE_OPERAND (arg, 0);
11099 if (TREE_CODE (op) == STRING_CST
11100 || (TREE_CODE (op) == ARRAY_REF
11101 && integer_zerop (TREE_OPERAND (op, 1))
11102 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
11103 return integer_one_node;
11104 }
11105
11106 /* If this expression has side effects, show we don't know it to be a
11107 constant. Likewise if it's a pointer or aggregate type since in
11108 those case we only want literals, since those are only optimized
11109 when generating RTL, not later.
11110 And finally, if we are compiling an initializer, not code, we
11111 need to return a definite result now; there's not going to be any
11112 more optimization done. */
11113 if (TREE_SIDE_EFFECTS (arg)
11114 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
11115 || POINTER_TYPE_P (TREE_TYPE (arg))
11116 || cfun == 0
11117 || folding_initializer
11118 || force_folding_builtin_constant_p)
11119 return integer_zero_node;
11120
11121 return NULL_TREE;
11122 }
11123
11124 /* Create builtin_expect or builtin_expect_with_probability
11125 with PRED and EXPECTED as its arguments and return it as a truthvalue.
11126 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
11127 builtin_expect_with_probability instead uses third argument as PROBABILITY
11128 value. */
11129
11130 static tree
11131 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
11132 tree predictor, tree probability)
11133 {
11134 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
11135
11136 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
11137 : BUILT_IN_EXPECT_WITH_PROBABILITY);
11138 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
11139 ret_type = TREE_TYPE (TREE_TYPE (fn));
11140 pred_type = TREE_VALUE (arg_types);
11141 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
11142
11143 pred = fold_convert_loc (loc, pred_type, pred);
11144 expected = fold_convert_loc (loc, expected_type, expected);
11145
11146 if (probability)
11147 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
11148 else
11149 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
11150 predictor);
11151
11152 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
11153 build_int_cst (ret_type, 0));
11154 }
11155
11156 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
11157 NULL_TREE if no simplification is possible. */
11158
11159 tree
11160 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
11161 tree arg3)
11162 {
11163 tree inner, fndecl, inner_arg0;
11164 enum tree_code code;
11165
11166 /* Distribute the expected value over short-circuiting operators.
11167 See through the cast from truthvalue_type_node to long. */
11168 inner_arg0 = arg0;
11169 while (CONVERT_EXPR_P (inner_arg0)
11170 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
11171 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
11172 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
11173
11174 /* If this is a builtin_expect within a builtin_expect keep the
11175 inner one. See through a comparison against a constant. It
11176 might have been added to create a thruthvalue. */
11177 inner = inner_arg0;
11178
11179 if (COMPARISON_CLASS_P (inner)
11180 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
11181 inner = TREE_OPERAND (inner, 0);
11182
11183 if (TREE_CODE (inner) == CALL_EXPR
11184 && (fndecl = get_callee_fndecl (inner))
11185 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
11186 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
11187 return arg0;
11188
11189 inner = inner_arg0;
11190 code = TREE_CODE (inner);
11191 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
11192 {
11193 tree op0 = TREE_OPERAND (inner, 0);
11194 tree op1 = TREE_OPERAND (inner, 1);
11195 arg1 = save_expr (arg1);
11196
11197 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
11198 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
11199 inner = build2 (code, TREE_TYPE (inner), op0, op1);
11200
11201 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
11202 }
11203
11204 /* If the argument isn't invariant then there's nothing else we can do. */
11205 if (!TREE_CONSTANT (inner_arg0))
11206 return NULL_TREE;
11207
11208 /* If we expect that a comparison against the argument will fold to
11209 a constant return the constant. In practice, this means a true
11210 constant or the address of a non-weak symbol. */
11211 inner = inner_arg0;
11212 STRIP_NOPS (inner);
11213 if (TREE_CODE (inner) == ADDR_EXPR)
11214 {
11215 do
11216 {
11217 inner = TREE_OPERAND (inner, 0);
11218 }
11219 while (TREE_CODE (inner) == COMPONENT_REF
11220 || TREE_CODE (inner) == ARRAY_REF);
11221 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
11222 return NULL_TREE;
11223 }
11224
11225 /* Otherwise, ARG0 already has the proper type for the return value. */
11226 return arg0;
11227 }
11228
11229 /* Fold a call to __builtin_classify_type with argument ARG. */
11230
11231 static tree
11232 fold_builtin_classify_type (tree arg)
11233 {
11234 if (arg == 0)
11235 return build_int_cst (integer_type_node, no_type_class);
11236
11237 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
11238 }
11239
11240 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
11241 ARG. */
11242
11243 static tree
11244 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
11245 {
11246 if (!validate_arg (arg, POINTER_TYPE))
11247 return NULL_TREE;
11248 else
11249 {
11250 c_strlen_data lendata = { };
11251 tree len = c_strlen (arg, 0, &lendata);
11252
11253 if (len)
11254 return fold_convert_loc (loc, type, len);
11255
11256 if (!lendata.decl)
11257 c_strlen (arg, 1, &lendata);
11258
11259 if (lendata.decl)
11260 {
11261 if (EXPR_HAS_LOCATION (arg))
11262 loc = EXPR_LOCATION (arg);
11263 else if (loc == UNKNOWN_LOCATION)
11264 loc = input_location;
11265 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
11266 }
11267
11268 return NULL_TREE;
11269 }
11270 }
11271
11272 /* Fold a call to __builtin_inf or __builtin_huge_val. */
11273
11274 static tree
11275 fold_builtin_inf (location_t loc, tree type, int warn)
11276 {
11277 REAL_VALUE_TYPE real;
11278
11279 /* __builtin_inff is intended to be usable to define INFINITY on all
11280 targets. If an infinity is not available, INFINITY expands "to a
11281 positive constant of type float that overflows at translation
11282 time", footnote "In this case, using INFINITY will violate the
11283 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
11284 Thus we pedwarn to ensure this constraint violation is
11285 diagnosed. */
11286 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
11287 pedwarn (loc, 0, "target format does not support infinity");
11288
11289 real_inf (&real);
11290 return build_real (type, real);
11291 }
11292
11293 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
11294 NULL_TREE if no simplification can be made. */
11295
11296 static tree
11297 fold_builtin_sincos (location_t loc,
11298 tree arg0, tree arg1, tree arg2)
11299 {
11300 tree type;
11301 tree fndecl, call = NULL_TREE;
11302
11303 if (!validate_arg (arg0, REAL_TYPE)
11304 || !validate_arg (arg1, POINTER_TYPE)
11305 || !validate_arg (arg2, POINTER_TYPE))
11306 return NULL_TREE;
11307
11308 type = TREE_TYPE (arg0);
11309
11310 /* Calculate the result when the argument is a constant. */
11311 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
11312 if (fn == END_BUILTINS)
11313 return NULL_TREE;
11314
11315 /* Canonicalize sincos to cexpi. */
11316 if (TREE_CODE (arg0) == REAL_CST)
11317 {
11318 tree complex_type = build_complex_type (type);
11319 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
11320 }
11321 if (!call)
11322 {
11323 if (!targetm.libc_has_function (function_c99_math_complex, type)
11324 || !builtin_decl_implicit_p (fn))
11325 return NULL_TREE;
11326 fndecl = builtin_decl_explicit (fn);
11327 call = build_call_expr_loc (loc, fndecl, 1, arg0);
11328 call = builtin_save_expr (call);
11329 }
11330
11331 tree ptype = build_pointer_type (type);
11332 arg1 = fold_convert (ptype, arg1);
11333 arg2 = fold_convert (ptype, arg2);
11334 return build2 (COMPOUND_EXPR, void_type_node,
11335 build2 (MODIFY_EXPR, void_type_node,
11336 build_fold_indirect_ref_loc (loc, arg1),
11337 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
11338 build2 (MODIFY_EXPR, void_type_node,
11339 build_fold_indirect_ref_loc (loc, arg2),
11340 fold_build1_loc (loc, REALPART_EXPR, type, call)));
11341 }
11342
11343 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
11344 Return NULL_TREE if no simplification can be made. */
11345
11346 static tree
11347 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
11348 {
11349 if (!validate_arg (arg1, POINTER_TYPE)
11350 || !validate_arg (arg2, POINTER_TYPE)
11351 || !validate_arg (len, INTEGER_TYPE))
11352 return NULL_TREE;
11353
11354 /* If the LEN parameter is zero, return zero. */
11355 if (integer_zerop (len))
11356 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
11357 arg1, arg2);
11358
11359 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
11360 if (operand_equal_p (arg1, arg2, 0))
11361 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
11362
11363 /* If len parameter is one, return an expression corresponding to
11364 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
11365 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
11366 {
11367 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
11368 tree cst_uchar_ptr_node
11369 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
11370
11371 tree ind1
11372 = fold_convert_loc (loc, integer_type_node,
11373 build1 (INDIRECT_REF, cst_uchar_node,
11374 fold_convert_loc (loc,
11375 cst_uchar_ptr_node,
11376 arg1)));
11377 tree ind2
11378 = fold_convert_loc (loc, integer_type_node,
11379 build1 (INDIRECT_REF, cst_uchar_node,
11380 fold_convert_loc (loc,
11381 cst_uchar_ptr_node,
11382 arg2)));
11383 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
11384 }
11385
11386 return NULL_TREE;
11387 }
11388
11389 /* Fold a call to builtin isascii with argument ARG. */
11390
11391 static tree
11392 fold_builtin_isascii (location_t loc, tree arg)
11393 {
11394 if (!validate_arg (arg, INTEGER_TYPE))
11395 return NULL_TREE;
11396 else
11397 {
11398 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
11399 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
11400 build_int_cst (integer_type_node,
11401 ~ (unsigned HOST_WIDE_INT) 0x7f));
11402 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
11403 arg, integer_zero_node);
11404 }
11405 }
11406
11407 /* Fold a call to builtin toascii with argument ARG. */
11408
11409 static tree
11410 fold_builtin_toascii (location_t loc, tree arg)
11411 {
11412 if (!validate_arg (arg, INTEGER_TYPE))
11413 return NULL_TREE;
11414
11415 /* Transform toascii(c) -> (c & 0x7f). */
11416 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
11417 build_int_cst (integer_type_node, 0x7f));
11418 }
11419
11420 /* Fold a call to builtin isdigit with argument ARG. */
11421
11422 static tree
11423 fold_builtin_isdigit (location_t loc, tree arg)
11424 {
11425 if (!validate_arg (arg, INTEGER_TYPE))
11426 return NULL_TREE;
11427 else
11428 {
11429 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
11430 /* According to the C standard, isdigit is unaffected by locale.
11431 However, it definitely is affected by the target character set. */
11432 unsigned HOST_WIDE_INT target_digit0
11433 = lang_hooks.to_target_charset ('0');
11434
11435 if (target_digit0 == 0)
11436 return NULL_TREE;
11437
11438 arg = fold_convert_loc (loc, unsigned_type_node, arg);
11439 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
11440 build_int_cst (unsigned_type_node, target_digit0));
11441 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
11442 build_int_cst (unsigned_type_node, 9));
11443 }
11444 }
11445
11446 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
11447
11448 static tree
11449 fold_builtin_fabs (location_t loc, tree arg, tree type)
11450 {
11451 if (!validate_arg (arg, REAL_TYPE))
11452 return NULL_TREE;
11453
11454 arg = fold_convert_loc (loc, type, arg);
11455 return fold_build1_loc (loc, ABS_EXPR, type, arg);
11456 }
11457
11458 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
11459
11460 static tree
11461 fold_builtin_abs (location_t loc, tree arg, tree type)
11462 {
11463 if (!validate_arg (arg, INTEGER_TYPE))
11464 return NULL_TREE;
11465
11466 arg = fold_convert_loc (loc, type, arg);
11467 return fold_build1_loc (loc, ABS_EXPR, type, arg);
11468 }
11469
11470 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
11471
11472 static tree
11473 fold_builtin_carg (location_t loc, tree arg, tree type)
11474 {
11475 if (validate_arg (arg, COMPLEX_TYPE)
11476 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
11477 {
11478 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
11479
11480 if (atan2_fn)
11481 {
11482 tree new_arg = builtin_save_expr (arg);
11483 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
11484 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
11485 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
11486 }
11487 }
11488
11489 return NULL_TREE;
11490 }
11491
11492 /* Fold a call to builtin frexp, we can assume the base is 2. */
11493
11494 static tree
11495 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
11496 {
11497 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11498 return NULL_TREE;
11499
11500 STRIP_NOPS (arg0);
11501
11502 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11503 return NULL_TREE;
11504
11505 arg1 = build_fold_indirect_ref_loc (loc, arg1);
11506
11507 /* Proceed if a valid pointer type was passed in. */
11508 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
11509 {
11510 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11511 tree frac, exp;
11512
11513 switch (value->cl)
11514 {
11515 case rvc_zero:
11516 /* For +-0, return (*exp = 0, +-0). */
11517 exp = integer_zero_node;
11518 frac = arg0;
11519 break;
11520 case rvc_nan:
11521 case rvc_inf:
11522 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
11523 return omit_one_operand_loc (loc, rettype, arg0, arg1);
11524 case rvc_normal:
11525 {
11526 /* Since the frexp function always expects base 2, and in
11527 GCC normalized significands are already in the range
11528 [0.5, 1.0), we have exactly what frexp wants. */
11529 REAL_VALUE_TYPE frac_rvt = *value;
11530 SET_REAL_EXP (&frac_rvt, 0);
11531 frac = build_real (rettype, frac_rvt);
11532 exp = build_int_cst (integer_type_node, REAL_EXP (value));
11533 }
11534 break;
11535 default:
11536 gcc_unreachable ();
11537 }
11538
11539 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11540 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
11541 TREE_SIDE_EFFECTS (arg1) = 1;
11542 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
11543 }
11544
11545 return NULL_TREE;
11546 }
11547
11548 /* Fold a call to builtin modf. */
11549
11550 static tree
11551 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
11552 {
11553 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11554 return NULL_TREE;
11555
11556 STRIP_NOPS (arg0);
11557
11558 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11559 return NULL_TREE;
11560
11561 arg1 = build_fold_indirect_ref_loc (loc, arg1);
11562
11563 /* Proceed if a valid pointer type was passed in. */
11564 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
11565 {
11566 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11567 REAL_VALUE_TYPE trunc, frac;
11568
11569 switch (value->cl)
11570 {
11571 case rvc_nan:
11572 case rvc_zero:
11573 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
11574 trunc = frac = *value;
11575 break;
11576 case rvc_inf:
11577 /* For +-Inf, return (*arg1 = arg0, +-0). */
11578 frac = dconst0;
11579 frac.sign = value->sign;
11580 trunc = *value;
11581 break;
11582 case rvc_normal:
11583 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
11584 real_trunc (&trunc, VOIDmode, value);
11585 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
11586 /* If the original number was negative and already
11587 integral, then the fractional part is -0.0. */
11588 if (value->sign && frac.cl == rvc_zero)
11589 frac.sign = value->sign;
11590 break;
11591 }
11592
11593 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11594 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
11595 build_real (rettype, trunc));
11596 TREE_SIDE_EFFECTS (arg1) = 1;
11597 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
11598 build_real (rettype, frac));
11599 }
11600
11601 return NULL_TREE;
11602 }
11603
11604 /* Given a location LOC, an interclass builtin function decl FNDECL
11605 and its single argument ARG, return an folded expression computing
11606 the same, or NULL_TREE if we either couldn't or didn't want to fold
11607 (the latter happen if there's an RTL instruction available). */
11608
11609 static tree
11610 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
11611 {
11612 machine_mode mode;
11613
11614 if (!validate_arg (arg, REAL_TYPE))
11615 return NULL_TREE;
11616
11617 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
11618 return NULL_TREE;
11619
11620 mode = TYPE_MODE (TREE_TYPE (arg));
11621
11622 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
11623
11624 /* If there is no optab, try generic code. */
11625 switch (DECL_FUNCTION_CODE (fndecl))
11626 {
11627 tree result;
11628
11629 CASE_FLT_FN (BUILT_IN_ISINF):
11630 {
11631 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
11632 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11633 tree type = TREE_TYPE (arg);
11634 REAL_VALUE_TYPE r;
11635 char buf[128];
11636
11637 if (is_ibm_extended)
11638 {
11639 /* NaN and Inf are encoded in the high-order double value
11640 only. The low-order value is not significant. */
11641 type = double_type_node;
11642 mode = DFmode;
11643 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11644 }
11645 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11646 real_from_string (&r, buf);
11647 result = build_call_expr (isgr_fn, 2,
11648 fold_build1_loc (loc, ABS_EXPR, type, arg),
11649 build_real (type, r));
11650 return result;
11651 }
11652 CASE_FLT_FN (BUILT_IN_FINITE):
11653 case BUILT_IN_ISFINITE:
11654 {
11655 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
11656 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11657 tree type = TREE_TYPE (arg);
11658 REAL_VALUE_TYPE r;
11659 char buf[128];
11660
11661 if (is_ibm_extended)
11662 {
11663 /* NaN and Inf are encoded in the high-order double value
11664 only. The low-order value is not significant. */
11665 type = double_type_node;
11666 mode = DFmode;
11667 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11668 }
11669 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11670 real_from_string (&r, buf);
11671 result = build_call_expr (isle_fn, 2,
11672 fold_build1_loc (loc, ABS_EXPR, type, arg),
11673 build_real (type, r));
11674 /*result = fold_build2_loc (loc, UNGT_EXPR,
11675 TREE_TYPE (TREE_TYPE (fndecl)),
11676 fold_build1_loc (loc, ABS_EXPR, type, arg),
11677 build_real (type, r));
11678 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
11679 TREE_TYPE (TREE_TYPE (fndecl)),
11680 result);*/
11681 return result;
11682 }
11683 case BUILT_IN_ISNORMAL:
11684 {
11685 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
11686 islessequal(fabs(x),DBL_MAX). */
11687 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11688 tree type = TREE_TYPE (arg);
11689 tree orig_arg, max_exp, min_exp;
11690 machine_mode orig_mode = mode;
11691 REAL_VALUE_TYPE rmax, rmin;
11692 char buf[128];
11693
11694 orig_arg = arg = builtin_save_expr (arg);
11695 if (is_ibm_extended)
11696 {
11697 /* Use double to test the normal range of IBM extended
11698 precision. Emin for IBM extended precision is
11699 different to emin for IEEE double, being 53 higher
11700 since the low double exponent is at least 53 lower
11701 than the high double exponent. */
11702 type = double_type_node;
11703 mode = DFmode;
11704 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11705 }
11706 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
11707
11708 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11709 real_from_string (&rmax, buf);
11710 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
11711 real_from_string (&rmin, buf);
11712 max_exp = build_real (type, rmax);
11713 min_exp = build_real (type, rmin);
11714
11715 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
11716 if (is_ibm_extended)
11717 {
11718 /* Testing the high end of the range is done just using
11719 the high double, using the same test as isfinite().
11720 For the subnormal end of the range we first test the
11721 high double, then if its magnitude is equal to the
11722 limit of 0x1p-969, we test whether the low double is
11723 non-zero and opposite sign to the high double. */
11724 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
11725 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11726 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
11727 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
11728 arg, min_exp);
11729 tree as_complex = build1 (VIEW_CONVERT_EXPR,
11730 complex_double_type_node, orig_arg);
11731 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
11732 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
11733 tree zero = build_real (type, dconst0);
11734 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
11735 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
11736 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
11737 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
11738 fold_build3 (COND_EXPR,
11739 integer_type_node,
11740 hilt, logt, lolt));
11741 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
11742 eq_min, ok_lo);
11743 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
11744 gt_min, eq_min);
11745 }
11746 else
11747 {
11748 tree const isge_fn
11749 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
11750 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
11751 }
11752 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
11753 max_exp, min_exp);
11754 return result;
11755 }
11756 default:
11757 break;
11758 }
11759
11760 return NULL_TREE;
11761 }
11762
11763 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
11764 ARG is the argument for the call. */
11765
11766 static tree
11767 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
11768 {
11769 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11770
11771 if (!validate_arg (arg, REAL_TYPE))
11772 return NULL_TREE;
11773
11774 switch (builtin_index)
11775 {
11776 case BUILT_IN_ISINF:
11777 if (tree_expr_infinite_p (arg))
11778 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11779 if (!tree_expr_maybe_infinite_p (arg))
11780 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11781 return NULL_TREE;
11782
11783 case BUILT_IN_ISINF_SIGN:
11784 {
11785 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
11786 /* In a boolean context, GCC will fold the inner COND_EXPR to
11787 1. So e.g. "if (isinf_sign(x))" would be folded to just
11788 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
11789 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
11790 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
11791 tree tmp = NULL_TREE;
11792
11793 arg = builtin_save_expr (arg);
11794
11795 if (signbit_fn && isinf_fn)
11796 {
11797 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
11798 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
11799
11800 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11801 signbit_call, integer_zero_node);
11802 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11803 isinf_call, integer_zero_node);
11804
11805 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
11806 integer_minus_one_node, integer_one_node);
11807 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11808 isinf_call, tmp,
11809 integer_zero_node);
11810 }
11811
11812 return tmp;
11813 }
11814
11815 case BUILT_IN_ISFINITE:
11816 if (tree_expr_finite_p (arg))
11817 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11818 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
11819 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11820 return NULL_TREE;
11821
11822 case BUILT_IN_ISNAN:
11823 if (tree_expr_nan_p (arg))
11824 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11825 if (!tree_expr_maybe_nan_p (arg))
11826 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11827
11828 {
11829 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
11830 if (is_ibm_extended)
11831 {
11832 /* NaN and Inf are encoded in the high-order double value
11833 only. The low-order value is not significant. */
11834 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
11835 }
11836 }
11837 arg = builtin_save_expr (arg);
11838 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
11839
11840 default:
11841 gcc_unreachable ();
11842 }
11843 }
11844
11845 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
11846 This builtin will generate code to return the appropriate floating
11847 point classification depending on the value of the floating point
11848 number passed in. The possible return values must be supplied as
11849 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
11850 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
11851 one floating point argument which is "type generic". */
11852
11853 static tree
11854 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
11855 {
11856 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
11857 arg, type, res, tmp;
11858 machine_mode mode;
11859 REAL_VALUE_TYPE r;
11860 char buf[128];
11861
11862 /* Verify the required arguments in the original call. */
11863 if (nargs != 6
11864 || !validate_arg (args[0], INTEGER_TYPE)
11865 || !validate_arg (args[1], INTEGER_TYPE)
11866 || !validate_arg (args[2], INTEGER_TYPE)
11867 || !validate_arg (args[3], INTEGER_TYPE)
11868 || !validate_arg (args[4], INTEGER_TYPE)
11869 || !validate_arg (args[5], REAL_TYPE))
11870 return NULL_TREE;
11871
11872 fp_nan = args[0];
11873 fp_infinite = args[1];
11874 fp_normal = args[2];
11875 fp_subnormal = args[3];
11876 fp_zero = args[4];
11877 arg = args[5];
11878 type = TREE_TYPE (arg);
11879 mode = TYPE_MODE (type);
11880 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
11881
11882 /* fpclassify(x) ->
11883 isnan(x) ? FP_NAN :
11884 (fabs(x) == Inf ? FP_INFINITE :
11885 (fabs(x) >= DBL_MIN ? FP_NORMAL :
11886 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
11887
11888 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11889 build_real (type, dconst0));
11890 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11891 tmp, fp_zero, fp_subnormal);
11892
11893 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
11894 real_from_string (&r, buf);
11895 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
11896 arg, build_real (type, r));
11897 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
11898
11899 if (tree_expr_maybe_infinite_p (arg))
11900 {
11901 real_inf (&r);
11902 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11903 build_real (type, r));
11904 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
11905 fp_infinite, res);
11906 }
11907
11908 if (tree_expr_maybe_nan_p (arg))
11909 {
11910 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
11911 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
11912 }
11913
11914 return res;
11915 }
11916
11917 /* Fold a call to an unordered comparison function such as
11918 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
11919 being called and ARG0 and ARG1 are the arguments for the call.
11920 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
11921 the opposite of the desired result. UNORDERED_CODE is used
11922 for modes that can hold NaNs and ORDERED_CODE is used for
11923 the rest. */
11924
11925 static tree
11926 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
11927 enum tree_code unordered_code,
11928 enum tree_code ordered_code)
11929 {
11930 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11931 enum tree_code code;
11932 tree type0, type1;
11933 enum tree_code code0, code1;
11934 tree cmp_type = NULL_TREE;
11935
11936 type0 = TREE_TYPE (arg0);
11937 type1 = TREE_TYPE (arg1);
11938
11939 code0 = TREE_CODE (type0);
11940 code1 = TREE_CODE (type1);
11941
11942 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
11943 /* Choose the wider of two real types. */
11944 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
11945 ? type0 : type1;
11946 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
11947 cmp_type = type0;
11948 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
11949 cmp_type = type1;
11950
11951 arg0 = fold_convert_loc (loc, cmp_type, arg0);
11952 arg1 = fold_convert_loc (loc, cmp_type, arg1);
11953
11954 if (unordered_code == UNORDERED_EXPR)
11955 {
11956 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
11957 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
11958 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
11959 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
11960 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
11961 }
11962
11963 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
11964 ? unordered_code : ordered_code;
11965 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
11966 fold_build2_loc (loc, code, type, arg0, arg1));
11967 }
11968
11969 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
11970 arithmetics if it can never overflow, or into internal functions that
11971 return both result of arithmetics and overflowed boolean flag in
11972 a complex integer result, or some other check for overflow.
11973 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
11974 checking part of that. */
11975
11976 static tree
11977 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
11978 tree arg0, tree arg1, tree arg2)
11979 {
11980 enum internal_fn ifn = IFN_LAST;
11981 /* The code of the expression corresponding to the built-in. */
11982 enum tree_code opcode = ERROR_MARK;
11983 bool ovf_only = false;
11984
11985 switch (fcode)
11986 {
11987 case BUILT_IN_ADD_OVERFLOW_P:
11988 ovf_only = true;
11989 /* FALLTHRU */
11990 case BUILT_IN_ADD_OVERFLOW:
11991 case BUILT_IN_SADD_OVERFLOW:
11992 case BUILT_IN_SADDL_OVERFLOW:
11993 case BUILT_IN_SADDLL_OVERFLOW:
11994 case BUILT_IN_UADD_OVERFLOW:
11995 case BUILT_IN_UADDL_OVERFLOW:
11996 case BUILT_IN_UADDLL_OVERFLOW:
11997 opcode = PLUS_EXPR;
11998 ifn = IFN_ADD_OVERFLOW;
11999 break;
12000 case BUILT_IN_SUB_OVERFLOW_P:
12001 ovf_only = true;
12002 /* FALLTHRU */
12003 case BUILT_IN_SUB_OVERFLOW:
12004 case BUILT_IN_SSUB_OVERFLOW:
12005 case BUILT_IN_SSUBL_OVERFLOW:
12006 case BUILT_IN_SSUBLL_OVERFLOW:
12007 case BUILT_IN_USUB_OVERFLOW:
12008 case BUILT_IN_USUBL_OVERFLOW:
12009 case BUILT_IN_USUBLL_OVERFLOW:
12010 opcode = MINUS_EXPR;
12011 ifn = IFN_SUB_OVERFLOW;
12012 break;
12013 case BUILT_IN_MUL_OVERFLOW_P:
12014 ovf_only = true;
12015 /* FALLTHRU */
12016 case BUILT_IN_MUL_OVERFLOW:
12017 case BUILT_IN_SMUL_OVERFLOW:
12018 case BUILT_IN_SMULL_OVERFLOW:
12019 case BUILT_IN_SMULLL_OVERFLOW:
12020 case BUILT_IN_UMUL_OVERFLOW:
12021 case BUILT_IN_UMULL_OVERFLOW:
12022 case BUILT_IN_UMULLL_OVERFLOW:
12023 opcode = MULT_EXPR;
12024 ifn = IFN_MUL_OVERFLOW;
12025 break;
12026 default:
12027 gcc_unreachable ();
12028 }
12029
12030 /* For the "generic" overloads, the first two arguments can have different
12031 types and the last argument determines the target type to use to check
12032 for overflow. The arguments of the other overloads all have the same
12033 type. */
12034 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
12035
12036 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
12037 arguments are constant, attempt to fold the built-in call into a constant
12038 expression indicating whether or not it detected an overflow. */
12039 if (ovf_only
12040 && TREE_CODE (arg0) == INTEGER_CST
12041 && TREE_CODE (arg1) == INTEGER_CST)
12042 /* Perform the computation in the target type and check for overflow. */
12043 return omit_one_operand_loc (loc, boolean_type_node,
12044 arith_overflowed_p (opcode, type, arg0, arg1)
12045 ? boolean_true_node : boolean_false_node,
12046 arg2);
12047
12048 tree intres, ovfres;
12049 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
12050 {
12051 intres = fold_binary_loc (loc, opcode, type,
12052 fold_convert_loc (loc, type, arg0),
12053 fold_convert_loc (loc, type, arg1));
12054 if (TREE_OVERFLOW (intres))
12055 intres = drop_tree_overflow (intres);
12056 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
12057 ? boolean_true_node : boolean_false_node);
12058 }
12059 else
12060 {
12061 tree ctype = build_complex_type (type);
12062 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
12063 arg0, arg1);
12064 tree tgt = save_expr (call);
12065 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
12066 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
12067 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
12068 }
12069
12070 if (ovf_only)
12071 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
12072
12073 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
12074 tree store
12075 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
12076 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
12077 }
12078
12079 /* Fold a call to __builtin_FILE to a constant string. */
12080
12081 static inline tree
12082 fold_builtin_FILE (location_t loc)
12083 {
12084 if (const char *fname = LOCATION_FILE (loc))
12085 {
12086 /* The documentation says this builtin is equivalent to the preprocessor
12087 __FILE__ macro so it appears appropriate to use the same file prefix
12088 mappings. */
12089 fname = remap_macro_filename (fname);
12090 return build_string_literal (strlen (fname) + 1, fname);
12091 }
12092
12093 return build_string_literal (1, "");
12094 }
12095
12096 /* Fold a call to __builtin_FUNCTION to a constant string. */
12097
12098 static inline tree
12099 fold_builtin_FUNCTION ()
12100 {
12101 const char *name = "";
12102
12103 if (current_function_decl)
12104 name = lang_hooks.decl_printable_name (current_function_decl, 0);
12105
12106 return build_string_literal (strlen (name) + 1, name);
12107 }
12108
12109 /* Fold a call to __builtin_LINE to an integer constant. */
12110
12111 static inline tree
12112 fold_builtin_LINE (location_t loc, tree type)
12113 {
12114 return build_int_cst (type, LOCATION_LINE (loc));
12115 }
12116
12117 /* Fold a call to built-in function FNDECL with 0 arguments.
12118 This function returns NULL_TREE if no simplification was possible. */
12119
12120 static tree
12121 fold_builtin_0 (location_t loc, tree fndecl)
12122 {
12123 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12124 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12125 switch (fcode)
12126 {
12127 case BUILT_IN_FILE:
12128 return fold_builtin_FILE (loc);
12129
12130 case BUILT_IN_FUNCTION:
12131 return fold_builtin_FUNCTION ();
12132
12133 case BUILT_IN_LINE:
12134 return fold_builtin_LINE (loc, type);
12135
12136 CASE_FLT_FN (BUILT_IN_INF):
12137 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
12138 case BUILT_IN_INFD32:
12139 case BUILT_IN_INFD64:
12140 case BUILT_IN_INFD128:
12141 return fold_builtin_inf (loc, type, true);
12142
12143 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
12144 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
12145 return fold_builtin_inf (loc, type, false);
12146
12147 case BUILT_IN_CLASSIFY_TYPE:
12148 return fold_builtin_classify_type (NULL_TREE);
12149
12150 default:
12151 break;
12152 }
12153 return NULL_TREE;
12154 }
12155
12156 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
12157 This function returns NULL_TREE if no simplification was possible. */
12158
12159 static tree
12160 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
12161 {
12162 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12163 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12164
12165 if (TREE_CODE (arg0) == ERROR_MARK)
12166 return NULL_TREE;
12167
12168 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
12169 return ret;
12170
12171 switch (fcode)
12172 {
12173 case BUILT_IN_CONSTANT_P:
12174 {
12175 tree val = fold_builtin_constant_p (arg0);
12176
12177 /* Gimplification will pull the CALL_EXPR for the builtin out of
12178 an if condition. When not optimizing, we'll not CSE it back.
12179 To avoid link error types of regressions, return false now. */
12180 if (!val && !optimize)
12181 val = integer_zero_node;
12182
12183 return val;
12184 }
12185
12186 case BUILT_IN_CLASSIFY_TYPE:
12187 return fold_builtin_classify_type (arg0);
12188
12189 case BUILT_IN_STRLEN:
12190 return fold_builtin_strlen (loc, expr, type, arg0);
12191
12192 CASE_FLT_FN (BUILT_IN_FABS):
12193 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12194 case BUILT_IN_FABSD32:
12195 case BUILT_IN_FABSD64:
12196 case BUILT_IN_FABSD128:
12197 return fold_builtin_fabs (loc, arg0, type);
12198
12199 case BUILT_IN_ABS:
12200 case BUILT_IN_LABS:
12201 case BUILT_IN_LLABS:
12202 case BUILT_IN_IMAXABS:
12203 return fold_builtin_abs (loc, arg0, type);
12204
12205 CASE_FLT_FN (BUILT_IN_CONJ):
12206 if (validate_arg (arg0, COMPLEX_TYPE)
12207 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
12208 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
12209 break;
12210
12211 CASE_FLT_FN (BUILT_IN_CREAL):
12212 if (validate_arg (arg0, COMPLEX_TYPE)
12213 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
12214 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
12215 break;
12216
12217 CASE_FLT_FN (BUILT_IN_CIMAG):
12218 if (validate_arg (arg0, COMPLEX_TYPE)
12219 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
12220 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
12221 break;
12222
12223 CASE_FLT_FN (BUILT_IN_CARG):
12224 return fold_builtin_carg (loc, arg0, type);
12225
12226 case BUILT_IN_ISASCII:
12227 return fold_builtin_isascii (loc, arg0);
12228
12229 case BUILT_IN_TOASCII:
12230 return fold_builtin_toascii (loc, arg0);
12231
12232 case BUILT_IN_ISDIGIT:
12233 return fold_builtin_isdigit (loc, arg0);
12234
12235 CASE_FLT_FN (BUILT_IN_FINITE):
12236 case BUILT_IN_FINITED32:
12237 case BUILT_IN_FINITED64:
12238 case BUILT_IN_FINITED128:
12239 case BUILT_IN_ISFINITE:
12240 {
12241 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
12242 if (ret)
12243 return ret;
12244 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
12245 }
12246
12247 CASE_FLT_FN (BUILT_IN_ISINF):
12248 case BUILT_IN_ISINFD32:
12249 case BUILT_IN_ISINFD64:
12250 case BUILT_IN_ISINFD128:
12251 {
12252 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
12253 if (ret)
12254 return ret;
12255 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
12256 }
12257
12258 case BUILT_IN_ISNORMAL:
12259 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
12260
12261 case BUILT_IN_ISINF_SIGN:
12262 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
12263
12264 CASE_FLT_FN (BUILT_IN_ISNAN):
12265 case BUILT_IN_ISNAND32:
12266 case BUILT_IN_ISNAND64:
12267 case BUILT_IN_ISNAND128:
12268 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
12269
12270 case BUILT_IN_FREE:
12271 if (integer_zerop (arg0))
12272 return build_empty_stmt (loc);
12273 break;
12274
12275 default:
12276 break;
12277 }
12278
12279 return NULL_TREE;
12280
12281 }
12282
12283 /* Folds a call EXPR (which may be null) to built-in function FNDECL
12284 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
12285 if no simplification was possible. */
12286
12287 static tree
12288 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
12289 {
12290 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12291 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12292
12293 if (TREE_CODE (arg0) == ERROR_MARK
12294 || TREE_CODE (arg1) == ERROR_MARK)
12295 return NULL_TREE;
12296
12297 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
12298 return ret;
12299
12300 switch (fcode)
12301 {
12302 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
12303 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
12304 if (validate_arg (arg0, REAL_TYPE)
12305 && validate_arg (arg1, POINTER_TYPE))
12306 return do_mpfr_lgamma_r (arg0, arg1, type);
12307 break;
12308
12309 CASE_FLT_FN (BUILT_IN_FREXP):
12310 return fold_builtin_frexp (loc, arg0, arg1, type);
12311
12312 CASE_FLT_FN (BUILT_IN_MODF):
12313 return fold_builtin_modf (loc, arg0, arg1, type);
12314
12315 case BUILT_IN_STRSPN:
12316 return fold_builtin_strspn (loc, expr, arg0, arg1);
12317
12318 case BUILT_IN_STRCSPN:
12319 return fold_builtin_strcspn (loc, expr, arg0, arg1);
12320
12321 case BUILT_IN_STRPBRK:
12322 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
12323
12324 case BUILT_IN_EXPECT:
12325 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
12326
12327 case BUILT_IN_ISGREATER:
12328 return fold_builtin_unordered_cmp (loc, fndecl,
12329 arg0, arg1, UNLE_EXPR, LE_EXPR);
12330 case BUILT_IN_ISGREATEREQUAL:
12331 return fold_builtin_unordered_cmp (loc, fndecl,
12332 arg0, arg1, UNLT_EXPR, LT_EXPR);
12333 case BUILT_IN_ISLESS:
12334 return fold_builtin_unordered_cmp (loc, fndecl,
12335 arg0, arg1, UNGE_EXPR, GE_EXPR);
12336 case BUILT_IN_ISLESSEQUAL:
12337 return fold_builtin_unordered_cmp (loc, fndecl,
12338 arg0, arg1, UNGT_EXPR, GT_EXPR);
12339 case BUILT_IN_ISLESSGREATER:
12340 return fold_builtin_unordered_cmp (loc, fndecl,
12341 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
12342 case BUILT_IN_ISUNORDERED:
12343 return fold_builtin_unordered_cmp (loc, fndecl,
12344 arg0, arg1, UNORDERED_EXPR,
12345 NOP_EXPR);
12346
12347 /* We do the folding for va_start in the expander. */
12348 case BUILT_IN_VA_START:
12349 break;
12350
12351 case BUILT_IN_OBJECT_SIZE:
12352 return fold_builtin_object_size (arg0, arg1);
12353
12354 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
12355 return fold_builtin_atomic_always_lock_free (arg0, arg1);
12356
12357 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
12358 return fold_builtin_atomic_is_lock_free (arg0, arg1);
12359
12360 default:
12361 break;
12362 }
12363 return NULL_TREE;
12364 }
12365
12366 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
12367 and ARG2.
12368 This function returns NULL_TREE if no simplification was possible. */
12369
12370 static tree
12371 fold_builtin_3 (location_t loc, tree fndecl,
12372 tree arg0, tree arg1, tree arg2)
12373 {
12374 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12375 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12376
12377 if (TREE_CODE (arg0) == ERROR_MARK
12378 || TREE_CODE (arg1) == ERROR_MARK
12379 || TREE_CODE (arg2) == ERROR_MARK)
12380 return NULL_TREE;
12381
12382 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
12383 arg0, arg1, arg2))
12384 return ret;
12385
12386 switch (fcode)
12387 {
12388
12389 CASE_FLT_FN (BUILT_IN_SINCOS):
12390 return fold_builtin_sincos (loc, arg0, arg1, arg2);
12391
12392 CASE_FLT_FN (BUILT_IN_REMQUO):
12393 if (validate_arg (arg0, REAL_TYPE)
12394 && validate_arg (arg1, REAL_TYPE)
12395 && validate_arg (arg2, POINTER_TYPE))
12396 return do_mpfr_remquo (arg0, arg1, arg2);
12397 break;
12398
12399 case BUILT_IN_MEMCMP:
12400 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
12401
12402 case BUILT_IN_EXPECT:
12403 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
12404
12405 case BUILT_IN_EXPECT_WITH_PROBABILITY:
12406 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
12407
12408 case BUILT_IN_ADD_OVERFLOW:
12409 case BUILT_IN_SUB_OVERFLOW:
12410 case BUILT_IN_MUL_OVERFLOW:
12411 case BUILT_IN_ADD_OVERFLOW_P:
12412 case BUILT_IN_SUB_OVERFLOW_P:
12413 case BUILT_IN_MUL_OVERFLOW_P:
12414 case BUILT_IN_SADD_OVERFLOW:
12415 case BUILT_IN_SADDL_OVERFLOW:
12416 case BUILT_IN_SADDLL_OVERFLOW:
12417 case BUILT_IN_SSUB_OVERFLOW:
12418 case BUILT_IN_SSUBL_OVERFLOW:
12419 case BUILT_IN_SSUBLL_OVERFLOW:
12420 case BUILT_IN_SMUL_OVERFLOW:
12421 case BUILT_IN_SMULL_OVERFLOW:
12422 case BUILT_IN_SMULLL_OVERFLOW:
12423 case BUILT_IN_UADD_OVERFLOW:
12424 case BUILT_IN_UADDL_OVERFLOW:
12425 case BUILT_IN_UADDLL_OVERFLOW:
12426 case BUILT_IN_USUB_OVERFLOW:
12427 case BUILT_IN_USUBL_OVERFLOW:
12428 case BUILT_IN_USUBLL_OVERFLOW:
12429 case BUILT_IN_UMUL_OVERFLOW:
12430 case BUILT_IN_UMULL_OVERFLOW:
12431 case BUILT_IN_UMULLL_OVERFLOW:
12432 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
12433
12434 default:
12435 break;
12436 }
12437 return NULL_TREE;
12438 }
12439
12440 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
12441 ARGS is an array of NARGS arguments. IGNORE is true if the result
12442 of the function call is ignored. This function returns NULL_TREE
12443 if no simplification was possible. */
12444
12445 static tree
12446 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
12447 int nargs, bool)
12448 {
12449 tree ret = NULL_TREE;
12450
12451 switch (nargs)
12452 {
12453 case 0:
12454 ret = fold_builtin_0 (loc, fndecl);
12455 break;
12456 case 1:
12457 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
12458 break;
12459 case 2:
12460 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
12461 break;
12462 case 3:
12463 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
12464 break;
12465 default:
12466 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
12467 break;
12468 }
12469 if (ret)
12470 {
12471 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
12472 SET_EXPR_LOCATION (ret, loc);
12473 return ret;
12474 }
12475 return NULL_TREE;
12476 }
12477
12478 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
12479 list ARGS along with N new arguments in NEWARGS. SKIP is the number
12480 of arguments in ARGS to be omitted. OLDNARGS is the number of
12481 elements in ARGS. */
12482
12483 static tree
12484 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
12485 int skip, tree fndecl, int n, va_list newargs)
12486 {
12487 int nargs = oldnargs - skip + n;
12488 tree *buffer;
12489
12490 if (n > 0)
12491 {
12492 int i, j;
12493
12494 buffer = XALLOCAVEC (tree, nargs);
12495 for (i = 0; i < n; i++)
12496 buffer[i] = va_arg (newargs, tree);
12497 for (j = skip; j < oldnargs; j++, i++)
12498 buffer[i] = args[j];
12499 }
12500 else
12501 buffer = args + skip;
12502
12503 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
12504 }
12505
12506 /* Return true if FNDECL shouldn't be folded right now.
12507 If a built-in function has an inline attribute always_inline
12508 wrapper, defer folding it after always_inline functions have
12509 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
12510 might not be performed. */
12511
12512 bool
12513 avoid_folding_inline_builtin (tree fndecl)
12514 {
12515 return (DECL_DECLARED_INLINE_P (fndecl)
12516 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
12517 && cfun
12518 && !cfun->always_inline_functions_inlined
12519 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
12520 }
12521
12522 /* A wrapper function for builtin folding that prevents warnings for
12523 "statement without effect" and the like, caused by removing the
12524 call node earlier than the warning is generated. */
12525
12526 tree
12527 fold_call_expr (location_t loc, tree exp, bool ignore)
12528 {
12529 tree ret = NULL_TREE;
12530 tree fndecl = get_callee_fndecl (exp);
12531 if (fndecl && fndecl_built_in_p (fndecl)
12532 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
12533 yet. Defer folding until we see all the arguments
12534 (after inlining). */
12535 && !CALL_EXPR_VA_ARG_PACK (exp))
12536 {
12537 int nargs = call_expr_nargs (exp);
12538
12539 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
12540 instead last argument is __builtin_va_arg_pack (). Defer folding
12541 even in that case, until arguments are finalized. */
12542 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
12543 {
12544 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
12545 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12546 return NULL_TREE;
12547 }
12548
12549 if (avoid_folding_inline_builtin (fndecl))
12550 return NULL_TREE;
12551
12552 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12553 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
12554 CALL_EXPR_ARGP (exp), ignore);
12555 else
12556 {
12557 tree *args = CALL_EXPR_ARGP (exp);
12558 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
12559 if (ret)
12560 return ret;
12561 }
12562 }
12563 return NULL_TREE;
12564 }
12565
12566 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
12567 N arguments are passed in the array ARGARRAY. Return a folded
12568 expression or NULL_TREE if no simplification was possible. */
12569
12570 tree
12571 fold_builtin_call_array (location_t loc, tree,
12572 tree fn,
12573 int n,
12574 tree *argarray)
12575 {
12576 if (TREE_CODE (fn) != ADDR_EXPR)
12577 return NULL_TREE;
12578
12579 tree fndecl = TREE_OPERAND (fn, 0);
12580 if (TREE_CODE (fndecl) == FUNCTION_DECL
12581 && fndecl_built_in_p (fndecl))
12582 {
12583 /* If last argument is __builtin_va_arg_pack (), arguments to this
12584 function are not finalized yet. Defer folding until they are. */
12585 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
12586 {
12587 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
12588 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12589 return NULL_TREE;
12590 }
12591 if (avoid_folding_inline_builtin (fndecl))
12592 return NULL_TREE;
12593 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12594 return targetm.fold_builtin (fndecl, n, argarray, false);
12595 else
12596 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
12597 }
12598
12599 return NULL_TREE;
12600 }
12601
12602 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
12603 along with N new arguments specified as the "..." parameters. SKIP
12604 is the number of arguments in EXP to be omitted. This function is used
12605 to do varargs-to-varargs transformations. */
12606
12607 static tree
12608 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
12609 {
12610 va_list ap;
12611 tree t;
12612
12613 va_start (ap, n);
12614 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
12615 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
12616 va_end (ap);
12617
12618 return t;
12619 }
12620
12621 /* Validate a single argument ARG against a tree code CODE representing
12622 a type. Return true when argument is valid. */
12623
12624 static bool
12625 validate_arg (const_tree arg, enum tree_code code)
12626 {
12627 if (!arg)
12628 return false;
12629 else if (code == POINTER_TYPE)
12630 return POINTER_TYPE_P (TREE_TYPE (arg));
12631 else if (code == INTEGER_TYPE)
12632 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
12633 return code == TREE_CODE (TREE_TYPE (arg));
12634 }
12635
12636 /* This function validates the types of a function call argument list
12637 against a specified list of tree_codes. If the last specifier is a 0,
12638 that represents an ellipses, otherwise the last specifier must be a
12639 VOID_TYPE.
12640
12641 This is the GIMPLE version of validate_arglist. Eventually we want to
12642 completely convert builtins.c to work from GIMPLEs and the tree based
12643 validate_arglist will then be removed. */
12644
12645 bool
12646 validate_gimple_arglist (const gcall *call, ...)
12647 {
12648 enum tree_code code;
12649 bool res = 0;
12650 va_list ap;
12651 const_tree arg;
12652 size_t i;
12653
12654 va_start (ap, call);
12655 i = 0;
12656
12657 do
12658 {
12659 code = (enum tree_code) va_arg (ap, int);
12660 switch (code)
12661 {
12662 case 0:
12663 /* This signifies an ellipses, any further arguments are all ok. */
12664 res = true;
12665 goto end;
12666 case VOID_TYPE:
12667 /* This signifies an endlink, if no arguments remain, return
12668 true, otherwise return false. */
12669 res = (i == gimple_call_num_args (call));
12670 goto end;
12671 default:
12672 /* If no parameters remain or the parameter's code does not
12673 match the specified code, return false. Otherwise continue
12674 checking any remaining arguments. */
12675 arg = gimple_call_arg (call, i++);
12676 if (!validate_arg (arg, code))
12677 goto end;
12678 break;
12679 }
12680 }
12681 while (1);
12682
12683 /* We need gotos here since we can only have one VA_CLOSE in a
12684 function. */
12685 end: ;
12686 va_end (ap);
12687
12688 return res;
12689 }
12690
12691 /* Default target-specific builtin expander that does nothing. */
12692
12693 rtx
12694 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
12695 rtx target ATTRIBUTE_UNUSED,
12696 rtx subtarget ATTRIBUTE_UNUSED,
12697 machine_mode mode ATTRIBUTE_UNUSED,
12698 int ignore ATTRIBUTE_UNUSED)
12699 {
12700 return NULL_RTX;
12701 }
12702
12703 /* Returns true is EXP represents data that would potentially reside
12704 in a readonly section. */
12705
12706 bool
12707 readonly_data_expr (tree exp)
12708 {
12709 STRIP_NOPS (exp);
12710
12711 if (TREE_CODE (exp) != ADDR_EXPR)
12712 return false;
12713
12714 exp = get_base_address (TREE_OPERAND (exp, 0));
12715 if (!exp)
12716 return false;
12717
12718 /* Make sure we call decl_readonly_section only for trees it
12719 can handle (since it returns true for everything it doesn't
12720 understand). */
12721 if (TREE_CODE (exp) == STRING_CST
12722 || TREE_CODE (exp) == CONSTRUCTOR
12723 || (VAR_P (exp) && TREE_STATIC (exp)))
12724 return decl_readonly_section (exp, 0);
12725 else
12726 return false;
12727 }
12728
12729 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
12730 to the call, and TYPE is its return type.
12731
12732 Return NULL_TREE if no simplification was possible, otherwise return the
12733 simplified form of the call as a tree.
12734
12735 The simplified form may be a constant or other expression which
12736 computes the same value, but in a more efficient manner (including
12737 calls to other builtin functions).
12738
12739 The call may contain arguments which need to be evaluated, but
12740 which are not useful to determine the result of the call. In
12741 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12742 COMPOUND_EXPR will be an argument which must be evaluated.
12743 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12744 COMPOUND_EXPR in the chain will contain the tree for the simplified
12745 form of the builtin function call. */
12746
12747 static tree
12748 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
12749 {
12750 if (!validate_arg (s1, POINTER_TYPE)
12751 || !validate_arg (s2, POINTER_TYPE))
12752 return NULL_TREE;
12753
12754 tree fn;
12755 const char *p1, *p2;
12756
12757 p2 = c_getstr (s2);
12758 if (p2 == NULL)
12759 return NULL_TREE;
12760
12761 p1 = c_getstr (s1);
12762 if (p1 != NULL)
12763 {
12764 const char *r = strpbrk (p1, p2);
12765 tree tem;
12766
12767 if (r == NULL)
12768 return build_int_cst (TREE_TYPE (s1), 0);
12769
12770 /* Return an offset into the constant string argument. */
12771 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
12772 return fold_convert_loc (loc, type, tem);
12773 }
12774
12775 if (p2[0] == '\0')
12776 /* strpbrk(x, "") == NULL.
12777 Evaluate and ignore s1 in case it had side-effects. */
12778 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
12779
12780 if (p2[1] != '\0')
12781 return NULL_TREE; /* Really call strpbrk. */
12782
12783 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
12784 if (!fn)
12785 return NULL_TREE;
12786
12787 /* New argument list transforming strpbrk(s1, s2) to
12788 strchr(s1, s2[0]). */
12789 return build_call_expr_loc (loc, fn, 2, s1,
12790 build_int_cst (integer_type_node, p2[0]));
12791 }
12792
12793 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
12794 to the call.
12795
12796 Return NULL_TREE if no simplification was possible, otherwise return the
12797 simplified form of the call as a tree.
12798
12799 The simplified form may be a constant or other expression which
12800 computes the same value, but in a more efficient manner (including
12801 calls to other builtin functions).
12802
12803 The call may contain arguments which need to be evaluated, but
12804 which are not useful to determine the result of the call. In
12805 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12806 COMPOUND_EXPR will be an argument which must be evaluated.
12807 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12808 COMPOUND_EXPR in the chain will contain the tree for the simplified
12809 form of the builtin function call. */
12810
12811 static tree
12812 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
12813 {
12814 if (!validate_arg (s1, POINTER_TYPE)
12815 || !validate_arg (s2, POINTER_TYPE))
12816 return NULL_TREE;
12817
12818 if (!check_nul_terminated_array (expr, s1)
12819 || !check_nul_terminated_array (expr, s2))
12820 return NULL_TREE;
12821
12822 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12823
12824 /* If either argument is "", return NULL_TREE. */
12825 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
12826 /* Evaluate and ignore both arguments in case either one has
12827 side-effects. */
12828 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
12829 s1, s2);
12830 return NULL_TREE;
12831 }
12832
12833 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12834 to the call.
12835
12836 Return NULL_TREE if no simplification was possible, otherwise return the
12837 simplified form of the call as a tree.
12838
12839 The simplified form may be a constant or other expression which
12840 computes the same value, but in a more efficient manner (including
12841 calls to other builtin functions).
12842
12843 The call may contain arguments which need to be evaluated, but
12844 which are not useful to determine the result of the call. In
12845 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12846 COMPOUND_EXPR will be an argument which must be evaluated.
12847 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12848 COMPOUND_EXPR in the chain will contain the tree for the simplified
12849 form of the builtin function call. */
12850
12851 static tree
12852 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
12853 {
12854 if (!validate_arg (s1, POINTER_TYPE)
12855 || !validate_arg (s2, POINTER_TYPE))
12856 return NULL_TREE;
12857
12858 if (!check_nul_terminated_array (expr, s1)
12859 || !check_nul_terminated_array (expr, s2))
12860 return NULL_TREE;
12861
12862 /* If the first argument is "", return NULL_TREE. */
12863 const char *p1 = c_getstr (s1);
12864 if (p1 && *p1 == '\0')
12865 {
12866 /* Evaluate and ignore argument s2 in case it has
12867 side-effects. */
12868 return omit_one_operand_loc (loc, size_type_node,
12869 size_zero_node, s2);
12870 }
12871
12872 /* If the second argument is "", return __builtin_strlen(s1). */
12873 const char *p2 = c_getstr (s2);
12874 if (p2 && *p2 == '\0')
12875 {
12876 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12877
12878 /* If the replacement _DECL isn't initialized, don't do the
12879 transformation. */
12880 if (!fn)
12881 return NULL_TREE;
12882
12883 return build_call_expr_loc (loc, fn, 1, s1);
12884 }
12885 return NULL_TREE;
12886 }
12887
12888 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12889 produced. False otherwise. This is done so that we don't output the error
12890 or warning twice or three times. */
12891
12892 bool
12893 fold_builtin_next_arg (tree exp, bool va_start_p)
12894 {
12895 tree fntype = TREE_TYPE (current_function_decl);
12896 int nargs = call_expr_nargs (exp);
12897 tree arg;
12898 /* There is good chance the current input_location points inside the
12899 definition of the va_start macro (perhaps on the token for
12900 builtin) in a system header, so warnings will not be emitted.
12901 Use the location in real source code. */
12902 location_t current_location =
12903 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12904 NULL);
12905
12906 if (!stdarg_p (fntype))
12907 {
12908 error ("%<va_start%> used in function with fixed arguments");
12909 return true;
12910 }
12911
12912 if (va_start_p)
12913 {
12914 if (va_start_p && (nargs != 2))
12915 {
12916 error ("wrong number of arguments to function %<va_start%>");
12917 return true;
12918 }
12919 arg = CALL_EXPR_ARG (exp, 1);
12920 }
12921 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12922 when we checked the arguments and if needed issued a warning. */
12923 else
12924 {
12925 if (nargs == 0)
12926 {
12927 /* Evidently an out of date version of <stdarg.h>; can't validate
12928 va_start's second argument, but can still work as intended. */
12929 warning_at (current_location,
12930 OPT_Wvarargs,
12931 "%<__builtin_next_arg%> called without an argument");
12932 return true;
12933 }
12934 else if (nargs > 1)
12935 {
12936 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12937 return true;
12938 }
12939 arg = CALL_EXPR_ARG (exp, 0);
12940 }
12941
12942 if (TREE_CODE (arg) == SSA_NAME
12943 && SSA_NAME_VAR (arg))
12944 arg = SSA_NAME_VAR (arg);
12945
12946 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12947 or __builtin_next_arg (0) the first time we see it, after checking
12948 the arguments and if needed issuing a warning. */
12949 if (!integer_zerop (arg))
12950 {
12951 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12952
12953 /* Strip off all nops for the sake of the comparison. This
12954 is not quite the same as STRIP_NOPS. It does more.
12955 We must also strip off INDIRECT_EXPR for C++ reference
12956 parameters. */
12957 while (CONVERT_EXPR_P (arg)
12958 || TREE_CODE (arg) == INDIRECT_REF)
12959 arg = TREE_OPERAND (arg, 0);
12960 if (arg != last_parm)
12961 {
12962 /* FIXME: Sometimes with the tree optimizers we can get the
12963 not the last argument even though the user used the last
12964 argument. We just warn and set the arg to be the last
12965 argument so that we will get wrong-code because of
12966 it. */
12967 warning_at (current_location,
12968 OPT_Wvarargs,
12969 "second parameter of %<va_start%> not last named argument");
12970 }
12971
12972 /* Undefined by C99 7.15.1.4p4 (va_start):
12973 "If the parameter parmN is declared with the register storage
12974 class, with a function or array type, or with a type that is
12975 not compatible with the type that results after application of
12976 the default argument promotions, the behavior is undefined."
12977 */
12978 else if (DECL_REGISTER (arg))
12979 {
12980 warning_at (current_location,
12981 OPT_Wvarargs,
12982 "undefined behavior when second parameter of "
12983 "%<va_start%> is declared with %<register%> storage");
12984 }
12985
12986 /* We want to verify the second parameter just once before the tree
12987 optimizers are run and then avoid keeping it in the tree,
12988 as otherwise we could warn even for correct code like:
12989 void foo (int i, ...)
12990 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12991 if (va_start_p)
12992 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12993 else
12994 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12995 }
12996 return false;
12997 }
12998
12999
13000 /* Expand a call EXP to __builtin_object_size. */
13001
13002 static rtx
13003 expand_builtin_object_size (tree exp)
13004 {
13005 tree ost;
13006 int object_size_type;
13007 tree fndecl = get_callee_fndecl (exp);
13008
13009 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
13010 {
13011 error ("first argument of %qD must be a pointer, second integer constant",
13012 fndecl);
13013 expand_builtin_trap ();
13014 return const0_rtx;
13015 }
13016
13017 ost = CALL_EXPR_ARG (exp, 1);
13018 STRIP_NOPS (ost);
13019
13020 if (TREE_CODE (ost) != INTEGER_CST
13021 || tree_int_cst_sgn (ost) < 0
13022 || compare_tree_int (ost, 3) > 0)
13023 {
13024 error ("last argument of %qD is not integer constant between 0 and 3",
13025 fndecl);
13026 expand_builtin_trap ();
13027 return const0_rtx;
13028 }
13029
13030 object_size_type = tree_to_shwi (ost);
13031
13032 return object_size_type < 2 ? constm1_rtx : const0_rtx;
13033 }
13034
13035 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
13036 FCODE is the BUILT_IN_* to use.
13037 Return NULL_RTX if we failed; the caller should emit a normal call,
13038 otherwise try to get the result in TARGET, if convenient (and in
13039 mode MODE if that's convenient). */
13040
13041 static rtx
13042 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
13043 enum built_in_function fcode)
13044 {
13045 if (!validate_arglist (exp,
13046 POINTER_TYPE,
13047 fcode == BUILT_IN_MEMSET_CHK
13048 ? INTEGER_TYPE : POINTER_TYPE,
13049 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
13050 return NULL_RTX;
13051
13052 tree dest = CALL_EXPR_ARG (exp, 0);
13053 tree src = CALL_EXPR_ARG (exp, 1);
13054 tree len = CALL_EXPR_ARG (exp, 2);
13055 tree size = CALL_EXPR_ARG (exp, 3);
13056
13057 /* FIXME: Set access mode to write only for memset et al. */
13058 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
13059 /*srcstr=*/NULL_TREE, size, access_read_write);
13060
13061 if (!tree_fits_uhwi_p (size))
13062 return NULL_RTX;
13063
13064 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
13065 {
13066 /* Avoid transforming the checking call to an ordinary one when
13067 an overflow has been detected or when the call couldn't be
13068 validated because the size is not constant. */
13069 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
13070 return NULL_RTX;
13071
13072 tree fn = NULL_TREE;
13073 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
13074 mem{cpy,pcpy,move,set} is available. */
13075 switch (fcode)
13076 {
13077 case BUILT_IN_MEMCPY_CHK:
13078 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
13079 break;
13080 case BUILT_IN_MEMPCPY_CHK:
13081 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
13082 break;
13083 case BUILT_IN_MEMMOVE_CHK:
13084 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
13085 break;
13086 case BUILT_IN_MEMSET_CHK:
13087 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
13088 break;
13089 default:
13090 break;
13091 }
13092
13093 if (! fn)
13094 return NULL_RTX;
13095
13096 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
13097 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
13098 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
13099 return expand_expr (fn, target, mode, EXPAND_NORMAL);
13100 }
13101 else if (fcode == BUILT_IN_MEMSET_CHK)
13102 return NULL_RTX;
13103 else
13104 {
13105 unsigned int dest_align = get_pointer_alignment (dest);
13106
13107 /* If DEST is not a pointer type, call the normal function. */
13108 if (dest_align == 0)
13109 return NULL_RTX;
13110
13111 /* If SRC and DEST are the same (and not volatile), do nothing. */
13112 if (operand_equal_p (src, dest, 0))
13113 {
13114 tree expr;
13115
13116 if (fcode != BUILT_IN_MEMPCPY_CHK)
13117 {
13118 /* Evaluate and ignore LEN in case it has side-effects. */
13119 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
13120 return expand_expr (dest, target, mode, EXPAND_NORMAL);
13121 }
13122
13123 expr = fold_build_pointer_plus (dest, len);
13124 return expand_expr (expr, target, mode, EXPAND_NORMAL);
13125 }
13126
13127 /* __memmove_chk special case. */
13128 if (fcode == BUILT_IN_MEMMOVE_CHK)
13129 {
13130 unsigned int src_align = get_pointer_alignment (src);
13131
13132 if (src_align == 0)
13133 return NULL_RTX;
13134
13135 /* If src is categorized for a readonly section we can use
13136 normal __memcpy_chk. */
13137 if (readonly_data_expr (src))
13138 {
13139 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
13140 if (!fn)
13141 return NULL_RTX;
13142 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
13143 dest, src, len, size);
13144 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
13145 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
13146 return expand_expr (fn, target, mode, EXPAND_NORMAL);
13147 }
13148 }
13149 return NULL_RTX;
13150 }
13151 }
13152
13153 /* Emit warning if a buffer overflow is detected at compile time. */
13154
13155 static void
13156 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
13157 {
13158 /* The source string. */
13159 tree srcstr = NULL_TREE;
13160 /* The size of the destination object returned by __builtin_object_size. */
13161 tree objsize = NULL_TREE;
13162 /* The string that is being concatenated with (as in __strcat_chk)
13163 or null if it isn't. */
13164 tree catstr = NULL_TREE;
13165 /* The maximum length of the source sequence in a bounded operation
13166 (such as __strncat_chk) or null if the operation isn't bounded
13167 (such as __strcat_chk). */
13168 tree maxread = NULL_TREE;
13169 /* The exact size of the access (such as in __strncpy_chk). */
13170 tree size = NULL_TREE;
13171 /* The access by the function that's checked. Except for snprintf
13172 both writing and reading is checked. */
13173 access_mode mode = access_read_write;
13174
13175 switch (fcode)
13176 {
13177 case BUILT_IN_STRCPY_CHK:
13178 case BUILT_IN_STPCPY_CHK:
13179 srcstr = CALL_EXPR_ARG (exp, 1);
13180 objsize = CALL_EXPR_ARG (exp, 2);
13181 break;
13182
13183 case BUILT_IN_STRCAT_CHK:
13184 /* For __strcat_chk the warning will be emitted only if overflowing
13185 by at least strlen (dest) + 1 bytes. */
13186 catstr = CALL_EXPR_ARG (exp, 0);
13187 srcstr = CALL_EXPR_ARG (exp, 1);
13188 objsize = CALL_EXPR_ARG (exp, 2);
13189 break;
13190
13191 case BUILT_IN_STRNCAT_CHK:
13192 catstr = CALL_EXPR_ARG (exp, 0);
13193 srcstr = CALL_EXPR_ARG (exp, 1);
13194 maxread = CALL_EXPR_ARG (exp, 2);
13195 objsize = CALL_EXPR_ARG (exp, 3);
13196 break;
13197
13198 case BUILT_IN_STRNCPY_CHK:
13199 case BUILT_IN_STPNCPY_CHK:
13200 srcstr = CALL_EXPR_ARG (exp, 1);
13201 size = CALL_EXPR_ARG (exp, 2);
13202 objsize = CALL_EXPR_ARG (exp, 3);
13203 break;
13204
13205 case BUILT_IN_SNPRINTF_CHK:
13206 case BUILT_IN_VSNPRINTF_CHK:
13207 maxread = CALL_EXPR_ARG (exp, 1);
13208 objsize = CALL_EXPR_ARG (exp, 3);
13209 /* The only checked access the write to the destination. */
13210 mode = access_write_only;
13211 break;
13212 default:
13213 gcc_unreachable ();
13214 }
13215
13216 if (catstr && maxread)
13217 {
13218 /* Check __strncat_chk. There is no way to determine the length
13219 of the string to which the source string is being appended so
13220 just warn when the length of the source string is not known. */
13221 check_strncat_sizes (exp, objsize);
13222 return;
13223 }
13224
13225 check_access (exp, size, maxread, srcstr, objsize, mode);
13226 }
13227
13228 /* Emit warning if a buffer overflow is detected at compile time
13229 in __sprintf_chk/__vsprintf_chk calls. */
13230
13231 static void
13232 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
13233 {
13234 tree size, len, fmt;
13235 const char *fmt_str;
13236 int nargs = call_expr_nargs (exp);
13237
13238 /* Verify the required arguments in the original call. */
13239
13240 if (nargs < 4)
13241 return;
13242 size = CALL_EXPR_ARG (exp, 2);
13243 fmt = CALL_EXPR_ARG (exp, 3);
13244
13245 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
13246 return;
13247
13248 /* Check whether the format is a literal string constant. */
13249 fmt_str = c_getstr (fmt);
13250 if (fmt_str == NULL)
13251 return;
13252
13253 if (!init_target_chars ())
13254 return;
13255
13256 /* If the format doesn't contain % args or %%, we know its size. */
13257 if (strchr (fmt_str, target_percent) == 0)
13258 len = build_int_cstu (size_type_node, strlen (fmt_str));
13259 /* If the format is "%s" and first ... argument is a string literal,
13260 we know it too. */
13261 else if (fcode == BUILT_IN_SPRINTF_CHK
13262 && strcmp (fmt_str, target_percent_s) == 0)
13263 {
13264 tree arg;
13265
13266 if (nargs < 5)
13267 return;
13268 arg = CALL_EXPR_ARG (exp, 4);
13269 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
13270 return;
13271
13272 len = c_strlen (arg, 1);
13273 if (!len || ! tree_fits_uhwi_p (len))
13274 return;
13275 }
13276 else
13277 return;
13278
13279 /* Add one for the terminating nul. */
13280 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
13281
13282 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
13283 access_write_only);
13284 }
13285
13286 /* Return true if STMT is a call to an allocation function. Unless
13287 ALL_ALLOC is set, consider only functions that return dynmamically
13288 allocated objects. Otherwise return true even for all forms of
13289 alloca (including VLA). */
13290
13291 static bool
13292 fndecl_alloc_p (tree fndecl, bool all_alloc)
13293 {
13294 if (!fndecl)
13295 return false;
13296
13297 /* A call to operator new isn't recognized as one to a built-in. */
13298 if (DECL_IS_OPERATOR_NEW_P (fndecl))
13299 return true;
13300
13301 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13302 {
13303 switch (DECL_FUNCTION_CODE (fndecl))
13304 {
13305 case BUILT_IN_ALLOCA:
13306 case BUILT_IN_ALLOCA_WITH_ALIGN:
13307 return all_alloc;
13308 case BUILT_IN_ALIGNED_ALLOC:
13309 case BUILT_IN_CALLOC:
13310 case BUILT_IN_GOMP_ALLOC:
13311 case BUILT_IN_MALLOC:
13312 case BUILT_IN_REALLOC:
13313 case BUILT_IN_STRDUP:
13314 case BUILT_IN_STRNDUP:
13315 return true;
13316 default:
13317 break;
13318 }
13319 }
13320
13321 /* A function is considered an allocation function if it's declared
13322 with attribute malloc with an argument naming its associated
13323 deallocation function. */
13324 tree attrs = DECL_ATTRIBUTES (fndecl);
13325 if (!attrs)
13326 return false;
13327
13328 for (tree allocs = attrs;
13329 (allocs = lookup_attribute ("malloc", allocs));
13330 allocs = TREE_CHAIN (allocs))
13331 {
13332 tree args = TREE_VALUE (allocs);
13333 if (!args)
13334 continue;
13335
13336 if (TREE_VALUE (args))
13337 return true;
13338 }
13339
13340 return false;
13341 }
13342
13343 /* Return true if STMT is a call to an allocation function. A wrapper
13344 around fndecl_alloc_p. */
13345
13346 static bool
13347 gimple_call_alloc_p (gimple *stmt, bool all_alloc = false)
13348 {
13349 return fndecl_alloc_p (gimple_call_fndecl (stmt), all_alloc);
13350 }
13351
13352 /* Return the zero-based number corresponding to the argument being
13353 deallocated if STMT is a call to a deallocation function or UINT_MAX
13354 if it isn't. */
13355
13356 static unsigned
13357 call_dealloc_argno (tree exp)
13358 {
13359 tree fndecl = get_callee_fndecl (exp);
13360 if (!fndecl)
13361 return UINT_MAX;
13362
13363 return fndecl_dealloc_argno (fndecl);
13364 }
13365
13366 /* Return the zero-based number corresponding to the argument being
13367 deallocated if FNDECL is a deallocation function or UINT_MAX
13368 if it isn't. */
13369
13370 unsigned
13371 fndecl_dealloc_argno (tree fndecl)
13372 {
13373 /* A call to operator delete isn't recognized as one to a built-in. */
13374 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
13375 {
13376 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
13377 return 0;
13378
13379 /* Avoid placement delete that's not been inlined. */
13380 tree fname = DECL_ASSEMBLER_NAME (fndecl);
13381 if (id_equal (fname, "_ZdlPvS_") // ordinary form
13382 || id_equal (fname, "_ZdaPvS_")) // array form
13383 return UINT_MAX;
13384 return 0;
13385 }
13386
13387 /* TODO: Handle user-defined functions with attribute malloc? Handle
13388 known non-built-ins like fopen? */
13389 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13390 {
13391 switch (DECL_FUNCTION_CODE (fndecl))
13392 {
13393 case BUILT_IN_FREE:
13394 case BUILT_IN_REALLOC:
13395 return 0;
13396 default:
13397 break;
13398 }
13399 return UINT_MAX;
13400 }
13401
13402 tree attrs = DECL_ATTRIBUTES (fndecl);
13403 if (!attrs)
13404 return UINT_MAX;
13405
13406 for (tree atfree = attrs;
13407 (atfree = lookup_attribute ("*dealloc", atfree));
13408 atfree = TREE_CHAIN (atfree))
13409 {
13410 tree alloc = TREE_VALUE (atfree);
13411 if (!alloc)
13412 continue;
13413
13414 tree pos = TREE_CHAIN (alloc);
13415 if (!pos)
13416 return 0;
13417
13418 pos = TREE_VALUE (pos);
13419 return TREE_INT_CST_LOW (pos) - 1;
13420 }
13421
13422 return UINT_MAX;
13423 }
13424
13425 /* Return true if DELC doesn't refer to an operator delete that's
13426 suitable to call with a pointer returned from the operator new
13427 described by NEWC. */
13428
13429 static bool
13430 new_delete_mismatch_p (const demangle_component &newc,
13431 const demangle_component &delc)
13432 {
13433 if (newc.type != delc.type)
13434 return true;
13435
13436 switch (newc.type)
13437 {
13438 case DEMANGLE_COMPONENT_NAME:
13439 {
13440 int len = newc.u.s_name.len;
13441 const char *news = newc.u.s_name.s;
13442 const char *dels = delc.u.s_name.s;
13443 if (len != delc.u.s_name.len || memcmp (news, dels, len))
13444 return true;
13445
13446 if (news[len] == 'n')
13447 {
13448 if (news[len + 1] == 'a')
13449 return dels[len] != 'd' || dels[len + 1] != 'a';
13450 if (news[len + 1] == 'w')
13451 return dels[len] != 'd' || dels[len + 1] != 'l';
13452 }
13453 return false;
13454 }
13455
13456 case DEMANGLE_COMPONENT_OPERATOR:
13457 /* Operator mismatches are handled above. */
13458 return false;
13459
13460 case DEMANGLE_COMPONENT_EXTENDED_OPERATOR:
13461 if (newc.u.s_extended_operator.args != delc.u.s_extended_operator.args)
13462 return true;
13463 return new_delete_mismatch_p (*newc.u.s_extended_operator.name,
13464 *delc.u.s_extended_operator.name);
13465
13466 case DEMANGLE_COMPONENT_FIXED_TYPE:
13467 if (newc.u.s_fixed.accum != delc.u.s_fixed.accum
13468 || newc.u.s_fixed.sat != delc.u.s_fixed.sat)
13469 return true;
13470 return new_delete_mismatch_p (*newc.u.s_fixed.length,
13471 *delc.u.s_fixed.length);
13472
13473 case DEMANGLE_COMPONENT_CTOR:
13474 if (newc.u.s_ctor.kind != delc.u.s_ctor.kind)
13475 return true;
13476 return new_delete_mismatch_p (*newc.u.s_ctor.name,
13477 *delc.u.s_ctor.name);
13478
13479 case DEMANGLE_COMPONENT_DTOR:
13480 if (newc.u.s_dtor.kind != delc.u.s_dtor.kind)
13481 return true;
13482 return new_delete_mismatch_p (*newc.u.s_dtor.name,
13483 *delc.u.s_dtor.name);
13484
13485 case DEMANGLE_COMPONENT_BUILTIN_TYPE:
13486 {
13487 /* The demangler API provides no better way to compare built-in
13488 types except to by comparing their demangled names. */
13489 size_t nsz, dsz;
13490 demangle_component *pnc = const_cast<demangle_component *>(&newc);
13491 demangle_component *pdc = const_cast<demangle_component *>(&delc);
13492 char *nts = cplus_demangle_print (0, pnc, 16, &nsz);
13493 char *dts = cplus_demangle_print (0, pdc, 16, &dsz);
13494 if (!nts != !dts)
13495 return true;
13496 bool mismatch = strcmp (nts, dts);
13497 free (nts);
13498 free (dts);
13499 return mismatch;
13500 }
13501
13502 case DEMANGLE_COMPONENT_SUB_STD:
13503 if (newc.u.s_string.len != delc.u.s_string.len)
13504 return true;
13505 return memcmp (newc.u.s_string.string, delc.u.s_string.string,
13506 newc.u.s_string.len);
13507
13508 case DEMANGLE_COMPONENT_FUNCTION_PARAM:
13509 case DEMANGLE_COMPONENT_TEMPLATE_PARAM:
13510 return newc.u.s_number.number != delc.u.s_number.number;
13511
13512 case DEMANGLE_COMPONENT_CHARACTER:
13513 return newc.u.s_character.character != delc.u.s_character.character;
13514
13515 case DEMANGLE_COMPONENT_DEFAULT_ARG:
13516 case DEMANGLE_COMPONENT_LAMBDA:
13517 if (newc.u.s_unary_num.num != delc.u.s_unary_num.num)
13518 return true;
13519 return new_delete_mismatch_p (*newc.u.s_unary_num.sub,
13520 *delc.u.s_unary_num.sub);
13521 default:
13522 break;
13523 }
13524
13525 if (!newc.u.s_binary.left != !delc.u.s_binary.left)
13526 return true;
13527
13528 if (!newc.u.s_binary.left)
13529 return false;
13530
13531 if (new_delete_mismatch_p (*newc.u.s_binary.left, *delc.u.s_binary.left)
13532 || !newc.u.s_binary.right != !delc.u.s_binary.right)
13533 return true;
13534
13535 if (newc.u.s_binary.right)
13536 return new_delete_mismatch_p (*newc.u.s_binary.right,
13537 *delc.u.s_binary.right);
13538 return false;
13539 }
13540
13541 /* Return true if DELETE_DECL is an operator delete that's not suitable
13542 to call with a pointer returned fron NEW_DECL. */
13543
13544 static bool
13545 new_delete_mismatch_p (tree new_decl, tree delete_decl)
13546 {
13547 tree new_name = DECL_ASSEMBLER_NAME (new_decl);
13548 tree delete_name = DECL_ASSEMBLER_NAME (delete_decl);
13549
13550 /* valid_new_delete_pair_p() returns a conservative result (currently
13551 it only handles global operators). A true result is reliable but
13552 a false result doesn't necessarily mean the operators don't match. */
13553 if (valid_new_delete_pair_p (new_name, delete_name))
13554 return false;
13555
13556 /* For anything not handled by valid_new_delete_pair_p() such as member
13557 operators compare the individual demangled components of the mangled
13558 name. */
13559 const char *new_str = IDENTIFIER_POINTER (new_name);
13560 const char *del_str = IDENTIFIER_POINTER (delete_name);
13561
13562 void *np = NULL, *dp = NULL;
13563 demangle_component *ndc = cplus_demangle_v3_components (new_str, 0, &np);
13564 demangle_component *ddc = cplus_demangle_v3_components (del_str, 0, &dp);
13565 bool mismatch = new_delete_mismatch_p (*ndc, *ddc);
13566 free (np);
13567 free (dp);
13568 return mismatch;
13569 }
13570
13571 /* ALLOC_DECL and DEALLOC_DECL are pair of allocation and deallocation
13572 functions. Return true if the latter is suitable to deallocate objects
13573 allocated by calls to the former. */
13574
13575 static bool
13576 matching_alloc_calls_p (tree alloc_decl, tree dealloc_decl)
13577 {
13578 /* Set to alloc_kind_t::builtin if ALLOC_DECL is associated with
13579 a built-in deallocator. */
13580 enum class alloc_kind_t { none, builtin, user }
13581 alloc_dealloc_kind = alloc_kind_t::none;
13582
13583 if (DECL_IS_OPERATOR_NEW_P (alloc_decl))
13584 {
13585 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13586 /* Return true iff both functions are of the same array or
13587 singleton form and false otherwise. */
13588 return !new_delete_mismatch_p (alloc_decl, dealloc_decl);
13589
13590 /* Return false for deallocation functions that are known not
13591 to match. */
13592 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13593 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13594 return false;
13595 /* Otherwise proceed below to check the deallocation function's
13596 "*dealloc" attributes to look for one that mentions this operator
13597 new. */
13598 }
13599 else if (fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL))
13600 {
13601 switch (DECL_FUNCTION_CODE (alloc_decl))
13602 {
13603 case BUILT_IN_ALLOCA:
13604 case BUILT_IN_ALLOCA_WITH_ALIGN:
13605 return false;
13606
13607 case BUILT_IN_ALIGNED_ALLOC:
13608 case BUILT_IN_CALLOC:
13609 case BUILT_IN_GOMP_ALLOC:
13610 case BUILT_IN_MALLOC:
13611 case BUILT_IN_REALLOC:
13612 case BUILT_IN_STRDUP:
13613 case BUILT_IN_STRNDUP:
13614 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13615 return false;
13616
13617 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13618 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13619 return true;
13620
13621 alloc_dealloc_kind = alloc_kind_t::builtin;
13622 break;
13623
13624 default:
13625 break;
13626 }
13627 }
13628
13629 /* Set if DEALLOC_DECL both allocates and deallocates. */
13630 alloc_kind_t realloc_kind = alloc_kind_t::none;
13631
13632 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_NORMAL))
13633 {
13634 built_in_function dealloc_code = DECL_FUNCTION_CODE (dealloc_decl);
13635 if (dealloc_code == BUILT_IN_REALLOC)
13636 realloc_kind = alloc_kind_t::builtin;
13637
13638 for (tree amats = DECL_ATTRIBUTES (alloc_decl);
13639 (amats = lookup_attribute ("malloc", amats));
13640 amats = TREE_CHAIN (amats))
13641 {
13642 tree args = TREE_VALUE (amats);
13643 if (!args)
13644 continue;
13645
13646 tree fndecl = TREE_VALUE (args);
13647 if (!fndecl || !DECL_P (fndecl))
13648 continue;
13649
13650 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
13651 && dealloc_code == DECL_FUNCTION_CODE (fndecl))
13652 return true;
13653 }
13654 }
13655
13656 const bool alloc_builtin = fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL);
13657 alloc_kind_t realloc_dealloc_kind = alloc_kind_t::none;
13658
13659 /* If DEALLOC_DECL has an internal "*dealloc" attribute scan the list
13660 of its associated allocation functions for ALLOC_DECL.
13661 If the corresponding ALLOC_DECL is found they're a matching pair,
13662 otherwise they're not.
13663 With DDATS set to the Deallocator's *Dealloc ATtributes... */
13664 for (tree ddats = DECL_ATTRIBUTES (dealloc_decl);
13665 (ddats = lookup_attribute ("*dealloc", ddats));
13666 ddats = TREE_CHAIN (ddats))
13667 {
13668 tree args = TREE_VALUE (ddats);
13669 if (!args)
13670 continue;
13671
13672 tree alloc = TREE_VALUE (args);
13673 if (!alloc)
13674 continue;
13675
13676 if (alloc == DECL_NAME (dealloc_decl))
13677 realloc_kind = alloc_kind_t::user;
13678
13679 if (DECL_P (alloc))
13680 {
13681 gcc_checking_assert (fndecl_built_in_p (alloc, BUILT_IN_NORMAL));
13682
13683 switch (DECL_FUNCTION_CODE (alloc))
13684 {
13685 case BUILT_IN_ALIGNED_ALLOC:
13686 case BUILT_IN_CALLOC:
13687 case BUILT_IN_GOMP_ALLOC:
13688 case BUILT_IN_MALLOC:
13689 case BUILT_IN_REALLOC:
13690 case BUILT_IN_STRDUP:
13691 case BUILT_IN_STRNDUP:
13692 realloc_dealloc_kind = alloc_kind_t::builtin;
13693 break;
13694 default:
13695 break;
13696 }
13697
13698 if (!alloc_builtin)
13699 continue;
13700
13701 if (DECL_FUNCTION_CODE (alloc) != DECL_FUNCTION_CODE (alloc_decl))
13702 continue;
13703
13704 return true;
13705 }
13706
13707 if (alloc == DECL_NAME (alloc_decl))
13708 return true;
13709 }
13710
13711 if (realloc_kind == alloc_kind_t::none)
13712 return false;
13713
13714 hash_set<tree> common_deallocs;
13715 /* Special handling for deallocators. Iterate over both the allocator's
13716 and the reallocator's associated deallocator functions looking for
13717 the first one in common. If one is found, the de/reallocator is
13718 a match for the allocator even though the latter isn't directly
13719 associated with the former. This simplifies declarations in system
13720 headers.
13721 With AMATS set to the Allocator's Malloc ATtributes,
13722 and RMATS set to Reallocator's Malloc ATtributes... */
13723 for (tree amats = DECL_ATTRIBUTES (alloc_decl),
13724 rmats = DECL_ATTRIBUTES (dealloc_decl);
13725 (amats = lookup_attribute ("malloc", amats))
13726 || (rmats = lookup_attribute ("malloc", rmats));
13727 amats = amats ? TREE_CHAIN (amats) : NULL_TREE,
13728 rmats = rmats ? TREE_CHAIN (rmats) : NULL_TREE)
13729 {
13730 if (tree args = amats ? TREE_VALUE (amats) : NULL_TREE)
13731 if (tree adealloc = TREE_VALUE (args))
13732 {
13733 if (DECL_P (adealloc)
13734 && fndecl_built_in_p (adealloc, BUILT_IN_NORMAL))
13735 {
13736 built_in_function fncode = DECL_FUNCTION_CODE (adealloc);
13737 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13738 {
13739 if (realloc_kind == alloc_kind_t::builtin)
13740 return true;
13741 alloc_dealloc_kind = alloc_kind_t::builtin;
13742 }
13743 continue;
13744 }
13745
13746 common_deallocs.add (adealloc);
13747 }
13748
13749 if (tree args = rmats ? TREE_VALUE (rmats) : NULL_TREE)
13750 if (tree ddealloc = TREE_VALUE (args))
13751 {
13752 if (DECL_P (ddealloc)
13753 && fndecl_built_in_p (ddealloc, BUILT_IN_NORMAL))
13754 {
13755 built_in_function fncode = DECL_FUNCTION_CODE (ddealloc);
13756 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13757 {
13758 if (alloc_dealloc_kind == alloc_kind_t::builtin)
13759 return true;
13760 realloc_dealloc_kind = alloc_kind_t::builtin;
13761 }
13762 continue;
13763 }
13764
13765 if (common_deallocs.add (ddealloc))
13766 return true;
13767 }
13768 }
13769
13770 /* Succeed only if ALLOC_DECL and the reallocator DEALLOC_DECL share
13771 a built-in deallocator. */
13772 return (alloc_dealloc_kind == alloc_kind_t::builtin
13773 && realloc_dealloc_kind == alloc_kind_t::builtin);
13774 }
13775
13776 /* Return true if DEALLOC_DECL is a function suitable to deallocate
13777 objectes allocated by the ALLOC call. */
13778
13779 static bool
13780 matching_alloc_calls_p (gimple *alloc, tree dealloc_decl)
13781 {
13782 tree alloc_decl = gimple_call_fndecl (alloc);
13783 if (!alloc_decl)
13784 return true;
13785
13786 return matching_alloc_calls_p (alloc_decl, dealloc_decl);
13787 }
13788
13789 /* Diagnose a call EXP to deallocate a pointer referenced by AREF if it
13790 includes a nonzero offset. Such a pointer cannot refer to the beginning
13791 of an allocated object. A negative offset may refer to it only if
13792 the target pointer is unknown. */
13793
13794 static bool
13795 warn_dealloc_offset (location_t loc, tree exp, const access_ref &aref)
13796 {
13797 if (aref.deref || aref.offrng[0] <= 0 || aref.offrng[1] <= 0)
13798 return false;
13799
13800 tree dealloc_decl = get_callee_fndecl (exp);
13801 if (!dealloc_decl)
13802 return false;
13803
13804 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13805 && !DECL_IS_REPLACEABLE_OPERATOR (dealloc_decl))
13806 {
13807 /* A call to a user-defined operator delete with a pointer plus offset
13808 may be valid if it's returned from an unknown function (i.e., one
13809 that's not operator new). */
13810 if (TREE_CODE (aref.ref) == SSA_NAME)
13811 {
13812 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13813 if (is_gimple_call (def_stmt))
13814 {
13815 tree alloc_decl = gimple_call_fndecl (def_stmt);
13816 if (!alloc_decl || !DECL_IS_OPERATOR_NEW_P (alloc_decl))
13817 return false;
13818 }
13819 }
13820 }
13821
13822 char offstr[80];
13823 offstr[0] = '\0';
13824 if (wi::fits_shwi_p (aref.offrng[0]))
13825 {
13826 if (aref.offrng[0] == aref.offrng[1]
13827 || !wi::fits_shwi_p (aref.offrng[1]))
13828 sprintf (offstr, " %lli",
13829 (long long)aref.offrng[0].to_shwi ());
13830 else
13831 sprintf (offstr, " [%lli, %lli]",
13832 (long long)aref.offrng[0].to_shwi (),
13833 (long long)aref.offrng[1].to_shwi ());
13834 }
13835
13836 if (!warning_at (loc, OPT_Wfree_nonheap_object,
13837 "%qD called on pointer %qE with nonzero offset%s",
13838 dealloc_decl, aref.ref, offstr))
13839 return false;
13840
13841 if (DECL_P (aref.ref))
13842 inform (DECL_SOURCE_LOCATION (aref.ref), "declared here");
13843 else if (TREE_CODE (aref.ref) == SSA_NAME)
13844 {
13845 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13846 if (is_gimple_call (def_stmt))
13847 {
13848 location_t def_loc = gimple_location (def_stmt);
13849 tree alloc_decl = gimple_call_fndecl (def_stmt);
13850 if (alloc_decl)
13851 inform (def_loc,
13852 "returned from %qD", alloc_decl);
13853 else if (tree alloc_fntype = gimple_call_fntype (def_stmt))
13854 inform (def_loc,
13855 "returned from %qT", alloc_fntype);
13856 else
13857 inform (def_loc, "obtained here");
13858 }
13859 }
13860
13861 return true;
13862 }
13863
13864 /* Issue a warning if a deallocation function such as free, realloc,
13865 or C++ operator delete is called with an argument not returned by
13866 a matching allocation function such as malloc or the corresponding
13867 form of C++ operatorn new. */
13868
13869 void
13870 maybe_emit_free_warning (tree exp)
13871 {
13872 tree fndecl = get_callee_fndecl (exp);
13873 if (!fndecl)
13874 return;
13875
13876 unsigned argno = call_dealloc_argno (exp);
13877 if ((unsigned) call_expr_nargs (exp) <= argno)
13878 return;
13879
13880 tree ptr = CALL_EXPR_ARG (exp, argno);
13881 if (integer_zerop (ptr))
13882 return;
13883
13884 access_ref aref;
13885 if (!compute_objsize (ptr, 0, &aref))
13886 return;
13887
13888 tree ref = aref.ref;
13889 if (integer_zerop (ref))
13890 return;
13891
13892 tree dealloc_decl = get_callee_fndecl (exp);
13893 location_t loc = EXPR_LOCATION (exp);
13894
13895 if (DECL_P (ref) || EXPR_P (ref))
13896 {
13897 /* Diagnose freeing a declared object. */
13898 if (aref.ref_declared ()
13899 && warning_at (loc, OPT_Wfree_nonheap_object,
13900 "%qD called on unallocated object %qD",
13901 dealloc_decl, ref))
13902 {
13903 loc = (DECL_P (ref)
13904 ? DECL_SOURCE_LOCATION (ref)
13905 : EXPR_LOCATION (ref));
13906 inform (loc, "declared here");
13907 return;
13908 }
13909
13910 /* Diagnose freeing a pointer that includes a positive offset.
13911 Such a pointer cannot refer to the beginning of an allocated
13912 object. A negative offset may refer to it. */
13913 if (aref.sizrng[0] != aref.sizrng[1]
13914 && warn_dealloc_offset (loc, exp, aref))
13915 return;
13916 }
13917 else if (CONSTANT_CLASS_P (ref))
13918 {
13919 if (warning_at (loc, OPT_Wfree_nonheap_object,
13920 "%qD called on a pointer to an unallocated "
13921 "object %qE", dealloc_decl, ref))
13922 {
13923 if (TREE_CODE (ptr) == SSA_NAME)
13924 {
13925 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
13926 if (is_gimple_assign (def_stmt))
13927 {
13928 location_t loc = gimple_location (def_stmt);
13929 inform (loc, "assigned here");
13930 }
13931 }
13932 return;
13933 }
13934 }
13935 else if (TREE_CODE (ref) == SSA_NAME)
13936 {
13937 /* Also warn if the pointer argument refers to the result
13938 of an allocation call like alloca or VLA. */
13939 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
13940 if (is_gimple_call (def_stmt))
13941 {
13942 bool warned = false;
13943 if (gimple_call_alloc_p (def_stmt))
13944 {
13945 if (matching_alloc_calls_p (def_stmt, dealloc_decl))
13946 {
13947 if (warn_dealloc_offset (loc, exp, aref))
13948 return;
13949 }
13950 else
13951 {
13952 tree alloc_decl = gimple_call_fndecl (def_stmt);
13953 const opt_code opt =
13954 (DECL_IS_OPERATOR_NEW_P (alloc_decl)
13955 || DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13956 ? OPT_Wmismatched_new_delete
13957 : OPT_Wmismatched_dealloc);
13958 warned = warning_at (loc, opt,
13959 "%qD called on pointer returned "
13960 "from a mismatched allocation "
13961 "function", dealloc_decl);
13962 }
13963 }
13964 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_ALLOCA)
13965 || gimple_call_builtin_p (def_stmt,
13966 BUILT_IN_ALLOCA_WITH_ALIGN))
13967 warned = warning_at (loc, OPT_Wfree_nonheap_object,
13968 "%qD called on pointer to "
13969 "an unallocated object",
13970 dealloc_decl);
13971 else if (warn_dealloc_offset (loc, exp, aref))
13972 return;
13973
13974 if (warned)
13975 {
13976 tree fndecl = gimple_call_fndecl (def_stmt);
13977 inform (gimple_location (def_stmt),
13978 "returned from %qD", fndecl);
13979 return;
13980 }
13981 }
13982 else if (gimple_nop_p (def_stmt))
13983 {
13984 ref = SSA_NAME_VAR (ref);
13985 /* Diagnose freeing a pointer that includes a positive offset. */
13986 if (TREE_CODE (ref) == PARM_DECL
13987 && !aref.deref
13988 && aref.sizrng[0] != aref.sizrng[1]
13989 && aref.offrng[0] > 0 && aref.offrng[1] > 0
13990 && warn_dealloc_offset (loc, exp, aref))
13991 return;
13992 }
13993 }
13994 }
13995
13996 /* Fold a call to __builtin_object_size with arguments PTR and OST,
13997 if possible. */
13998
13999 static tree
14000 fold_builtin_object_size (tree ptr, tree ost)
14001 {
14002 unsigned HOST_WIDE_INT bytes;
14003 int object_size_type;
14004
14005 if (!validate_arg (ptr, POINTER_TYPE)
14006 || !validate_arg (ost, INTEGER_TYPE))
14007 return NULL_TREE;
14008
14009 STRIP_NOPS (ost);
14010
14011 if (TREE_CODE (ost) != INTEGER_CST
14012 || tree_int_cst_sgn (ost) < 0
14013 || compare_tree_int (ost, 3) > 0)
14014 return NULL_TREE;
14015
14016 object_size_type = tree_to_shwi (ost);
14017
14018 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
14019 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
14020 and (size_t) 0 for types 2 and 3. */
14021 if (TREE_SIDE_EFFECTS (ptr))
14022 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
14023
14024 if (TREE_CODE (ptr) == ADDR_EXPR)
14025 {
14026 compute_builtin_object_size (ptr, object_size_type, &bytes);
14027 if (wi::fits_to_tree_p (bytes, size_type_node))
14028 return build_int_cstu (size_type_node, bytes);
14029 }
14030 else if (TREE_CODE (ptr) == SSA_NAME)
14031 {
14032 /* If object size is not known yet, delay folding until
14033 later. Maybe subsequent passes will help determining
14034 it. */
14035 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
14036 && wi::fits_to_tree_p (bytes, size_type_node))
14037 return build_int_cstu (size_type_node, bytes);
14038 }
14039
14040 return NULL_TREE;
14041 }
14042
14043 /* Builtins with folding operations that operate on "..." arguments
14044 need special handling; we need to store the arguments in a convenient
14045 data structure before attempting any folding. Fortunately there are
14046 only a few builtins that fall into this category. FNDECL is the
14047 function, EXP is the CALL_EXPR for the call. */
14048
14049 static tree
14050 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
14051 {
14052 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14053 tree ret = NULL_TREE;
14054
14055 switch (fcode)
14056 {
14057 case BUILT_IN_FPCLASSIFY:
14058 ret = fold_builtin_fpclassify (loc, args, nargs);
14059 break;
14060
14061 default:
14062 break;
14063 }
14064 if (ret)
14065 {
14066 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14067 SET_EXPR_LOCATION (ret, loc);
14068 suppress_warning (ret);
14069 return ret;
14070 }
14071 return NULL_TREE;
14072 }
14073
14074 /* Initialize format string characters in the target charset. */
14075
14076 bool
14077 init_target_chars (void)
14078 {
14079 static bool init;
14080 if (!init)
14081 {
14082 target_newline = lang_hooks.to_target_charset ('\n');
14083 target_percent = lang_hooks.to_target_charset ('%');
14084 target_c = lang_hooks.to_target_charset ('c');
14085 target_s = lang_hooks.to_target_charset ('s');
14086 if (target_newline == 0 || target_percent == 0 || target_c == 0
14087 || target_s == 0)
14088 return false;
14089
14090 target_percent_c[0] = target_percent;
14091 target_percent_c[1] = target_c;
14092 target_percent_c[2] = '\0';
14093
14094 target_percent_s[0] = target_percent;
14095 target_percent_s[1] = target_s;
14096 target_percent_s[2] = '\0';
14097
14098 target_percent_s_newline[0] = target_percent;
14099 target_percent_s_newline[1] = target_s;
14100 target_percent_s_newline[2] = target_newline;
14101 target_percent_s_newline[3] = '\0';
14102
14103 init = true;
14104 }
14105 return true;
14106 }
14107
14108 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
14109 and no overflow/underflow occurred. INEXACT is true if M was not
14110 exactly calculated. TYPE is the tree type for the result. This
14111 function assumes that you cleared the MPFR flags and then
14112 calculated M to see if anything subsequently set a flag prior to
14113 entering this function. Return NULL_TREE if any checks fail. */
14114
14115 static tree
14116 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
14117 {
14118 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
14119 overflow/underflow occurred. If -frounding-math, proceed iff the
14120 result of calling FUNC was exact. */
14121 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
14122 && (!flag_rounding_math || !inexact))
14123 {
14124 REAL_VALUE_TYPE rr;
14125
14126 real_from_mpfr (&rr, m, type, MPFR_RNDN);
14127 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
14128 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
14129 but the mpft_t is not, then we underflowed in the
14130 conversion. */
14131 if (real_isfinite (&rr)
14132 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
14133 {
14134 REAL_VALUE_TYPE rmode;
14135
14136 real_convert (&rmode, TYPE_MODE (type), &rr);
14137 /* Proceed iff the specified mode can hold the value. */
14138 if (real_identical (&rmode, &rr))
14139 return build_real (type, rmode);
14140 }
14141 }
14142 return NULL_TREE;
14143 }
14144
14145 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
14146 number and no overflow/underflow occurred. INEXACT is true if M
14147 was not exactly calculated. TYPE is the tree type for the result.
14148 This function assumes that you cleared the MPFR flags and then
14149 calculated M to see if anything subsequently set a flag prior to
14150 entering this function. Return NULL_TREE if any checks fail, if
14151 FORCE_CONVERT is true, then bypass the checks. */
14152
14153 static tree
14154 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
14155 {
14156 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
14157 overflow/underflow occurred. If -frounding-math, proceed iff the
14158 result of calling FUNC was exact. */
14159 if (force_convert
14160 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
14161 && !mpfr_overflow_p () && !mpfr_underflow_p ()
14162 && (!flag_rounding_math || !inexact)))
14163 {
14164 REAL_VALUE_TYPE re, im;
14165
14166 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
14167 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
14168 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
14169 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
14170 but the mpft_t is not, then we underflowed in the
14171 conversion. */
14172 if (force_convert
14173 || (real_isfinite (&re) && real_isfinite (&im)
14174 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
14175 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
14176 {
14177 REAL_VALUE_TYPE re_mode, im_mode;
14178
14179 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
14180 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
14181 /* Proceed iff the specified mode can hold the value. */
14182 if (force_convert
14183 || (real_identical (&re_mode, &re)
14184 && real_identical (&im_mode, &im)))
14185 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
14186 build_real (TREE_TYPE (type), im_mode));
14187 }
14188 }
14189 return NULL_TREE;
14190 }
14191
14192 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
14193 the pointer *(ARG_QUO) and return the result. The type is taken
14194 from the type of ARG0 and is used for setting the precision of the
14195 calculation and results. */
14196
14197 static tree
14198 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
14199 {
14200 tree const type = TREE_TYPE (arg0);
14201 tree result = NULL_TREE;
14202
14203 STRIP_NOPS (arg0);
14204 STRIP_NOPS (arg1);
14205
14206 /* To proceed, MPFR must exactly represent the target floating point
14207 format, which only happens when the target base equals two. */
14208 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14209 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
14210 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
14211 {
14212 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
14213 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
14214
14215 if (real_isfinite (ra0) && real_isfinite (ra1))
14216 {
14217 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14218 const int prec = fmt->p;
14219 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
14220 tree result_rem;
14221 long integer_quo;
14222 mpfr_t m0, m1;
14223
14224 mpfr_inits2 (prec, m0, m1, NULL);
14225 mpfr_from_real (m0, ra0, MPFR_RNDN);
14226 mpfr_from_real (m1, ra1, MPFR_RNDN);
14227 mpfr_clear_flags ();
14228 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
14229 /* Remquo is independent of the rounding mode, so pass
14230 inexact=0 to do_mpfr_ckconv(). */
14231 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
14232 mpfr_clears (m0, m1, NULL);
14233 if (result_rem)
14234 {
14235 /* MPFR calculates quo in the host's long so it may
14236 return more bits in quo than the target int can hold
14237 if sizeof(host long) > sizeof(target int). This can
14238 happen even for native compilers in LP64 mode. In
14239 these cases, modulo the quo value with the largest
14240 number that the target int can hold while leaving one
14241 bit for the sign. */
14242 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
14243 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
14244
14245 /* Dereference the quo pointer argument. */
14246 arg_quo = build_fold_indirect_ref (arg_quo);
14247 /* Proceed iff a valid pointer type was passed in. */
14248 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
14249 {
14250 /* Set the value. */
14251 tree result_quo
14252 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
14253 build_int_cst (TREE_TYPE (arg_quo),
14254 integer_quo));
14255 TREE_SIDE_EFFECTS (result_quo) = 1;
14256 /* Combine the quo assignment with the rem. */
14257 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14258 result_quo, result_rem));
14259 }
14260 }
14261 }
14262 }
14263 return result;
14264 }
14265
14266 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
14267 resulting value as a tree with type TYPE. The mpfr precision is
14268 set to the precision of TYPE. We assume that this mpfr function
14269 returns zero if the result could be calculated exactly within the
14270 requested precision. In addition, the integer pointer represented
14271 by ARG_SG will be dereferenced and set to the appropriate signgam
14272 (-1,1) value. */
14273
14274 static tree
14275 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
14276 {
14277 tree result = NULL_TREE;
14278
14279 STRIP_NOPS (arg);
14280
14281 /* To proceed, MPFR must exactly represent the target floating point
14282 format, which only happens when the target base equals two. Also
14283 verify ARG is a constant and that ARG_SG is an int pointer. */
14284 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14285 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14286 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14287 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14288 {
14289 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14290
14291 /* In addition to NaN and Inf, the argument cannot be zero or a
14292 negative integer. */
14293 if (real_isfinite (ra)
14294 && ra->cl != rvc_zero
14295 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
14296 {
14297 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14298 const int prec = fmt->p;
14299 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
14300 int inexact, sg;
14301 mpfr_t m;
14302 tree result_lg;
14303
14304 mpfr_init2 (m, prec);
14305 mpfr_from_real (m, ra, MPFR_RNDN);
14306 mpfr_clear_flags ();
14307 inexact = mpfr_lgamma (m, &sg, m, rnd);
14308 result_lg = do_mpfr_ckconv (m, type, inexact);
14309 mpfr_clear (m);
14310 if (result_lg)
14311 {
14312 tree result_sg;
14313
14314 /* Dereference the arg_sg pointer argument. */
14315 arg_sg = build_fold_indirect_ref (arg_sg);
14316 /* Assign the signgam value into *arg_sg. */
14317 result_sg = fold_build2 (MODIFY_EXPR,
14318 TREE_TYPE (arg_sg), arg_sg,
14319 build_int_cst (TREE_TYPE (arg_sg), sg));
14320 TREE_SIDE_EFFECTS (result_sg) = 1;
14321 /* Combine the signgam assignment with the lgamma result. */
14322 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14323 result_sg, result_lg));
14324 }
14325 }
14326 }
14327
14328 return result;
14329 }
14330
14331 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14332 mpc function FUNC on it and return the resulting value as a tree
14333 with type TYPE. The mpfr precision is set to the precision of
14334 TYPE. We assume that function FUNC returns zero if the result
14335 could be calculated exactly within the requested precision. If
14336 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14337 in the arguments and/or results. */
14338
14339 tree
14340 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14341 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14342 {
14343 tree result = NULL_TREE;
14344
14345 STRIP_NOPS (arg0);
14346 STRIP_NOPS (arg1);
14347
14348 /* To proceed, MPFR must exactly represent the target floating point
14349 format, which only happens when the target base equals two. */
14350 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14351 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14352 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14353 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14354 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14355 {
14356 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14357 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14358 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14359 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14360
14361 if (do_nonfinite
14362 || (real_isfinite (re0) && real_isfinite (im0)
14363 && real_isfinite (re1) && real_isfinite (im1)))
14364 {
14365 const struct real_format *const fmt =
14366 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14367 const int prec = fmt->p;
14368 const mpfr_rnd_t rnd = fmt->round_towards_zero
14369 ? MPFR_RNDZ : MPFR_RNDN;
14370 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14371 int inexact;
14372 mpc_t m0, m1;
14373
14374 mpc_init2 (m0, prec);
14375 mpc_init2 (m1, prec);
14376 mpfr_from_real (mpc_realref (m0), re0, rnd);
14377 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14378 mpfr_from_real (mpc_realref (m1), re1, rnd);
14379 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14380 mpfr_clear_flags ();
14381 inexact = func (m0, m0, m1, crnd);
14382 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14383 mpc_clear (m0);
14384 mpc_clear (m1);
14385 }
14386 }
14387
14388 return result;
14389 }
14390
14391 /* A wrapper function for builtin folding that prevents warnings for
14392 "statement without effect" and the like, caused by removing the
14393 call node earlier than the warning is generated. */
14394
14395 tree
14396 fold_call_stmt (gcall *stmt, bool ignore)
14397 {
14398 tree ret = NULL_TREE;
14399 tree fndecl = gimple_call_fndecl (stmt);
14400 location_t loc = gimple_location (stmt);
14401 if (fndecl && fndecl_built_in_p (fndecl)
14402 && !gimple_call_va_arg_pack_p (stmt))
14403 {
14404 int nargs = gimple_call_num_args (stmt);
14405 tree *args = (nargs > 0
14406 ? gimple_call_arg_ptr (stmt, 0)
14407 : &error_mark_node);
14408
14409 if (avoid_folding_inline_builtin (fndecl))
14410 return NULL_TREE;
14411 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14412 {
14413 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14414 }
14415 else
14416 {
14417 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
14418 if (ret)
14419 {
14420 /* Propagate location information from original call to
14421 expansion of builtin. Otherwise things like
14422 maybe_emit_chk_warning, that operate on the expansion
14423 of a builtin, will use the wrong location information. */
14424 if (gimple_has_location (stmt))
14425 {
14426 tree realret = ret;
14427 if (TREE_CODE (ret) == NOP_EXPR)
14428 realret = TREE_OPERAND (ret, 0);
14429 if (CAN_HAVE_LOCATION_P (realret)
14430 && !EXPR_HAS_LOCATION (realret))
14431 SET_EXPR_LOCATION (realret, loc);
14432 return realret;
14433 }
14434 return ret;
14435 }
14436 }
14437 }
14438 return NULL_TREE;
14439 }
14440
14441 /* Look up the function in builtin_decl that corresponds to DECL
14442 and set ASMSPEC as its user assembler name. DECL must be a
14443 function decl that declares a builtin. */
14444
14445 void
14446 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14447 {
14448 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
14449 && asmspec != 0);
14450
14451 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14452 set_user_assembler_name (builtin, asmspec);
14453
14454 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
14455 && INT_TYPE_SIZE < BITS_PER_WORD)
14456 {
14457 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
14458 set_user_assembler_libfunc ("ffs", asmspec);
14459 set_optab_libfunc (ffs_optab, mode, "ffs");
14460 }
14461 }
14462
14463 /* Return true if DECL is a builtin that expands to a constant or similarly
14464 simple code. */
14465 bool
14466 is_simple_builtin (tree decl)
14467 {
14468 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
14469 switch (DECL_FUNCTION_CODE (decl))
14470 {
14471 /* Builtins that expand to constants. */
14472 case BUILT_IN_CONSTANT_P:
14473 case BUILT_IN_EXPECT:
14474 case BUILT_IN_OBJECT_SIZE:
14475 case BUILT_IN_UNREACHABLE:
14476 /* Simple register moves or loads from stack. */
14477 case BUILT_IN_ASSUME_ALIGNED:
14478 case BUILT_IN_RETURN_ADDRESS:
14479 case BUILT_IN_EXTRACT_RETURN_ADDR:
14480 case BUILT_IN_FROB_RETURN_ADDR:
14481 case BUILT_IN_RETURN:
14482 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14483 case BUILT_IN_FRAME_ADDRESS:
14484 case BUILT_IN_VA_END:
14485 case BUILT_IN_STACK_SAVE:
14486 case BUILT_IN_STACK_RESTORE:
14487 /* Exception state returns or moves registers around. */
14488 case BUILT_IN_EH_FILTER:
14489 case BUILT_IN_EH_POINTER:
14490 case BUILT_IN_EH_COPY_VALUES:
14491 return true;
14492
14493 default:
14494 return false;
14495 }
14496
14497 return false;
14498 }
14499
14500 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14501 most probably expanded inline into reasonably simple code. This is a
14502 superset of is_simple_builtin. */
14503 bool
14504 is_inexpensive_builtin (tree decl)
14505 {
14506 if (!decl)
14507 return false;
14508 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14509 return true;
14510 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14511 switch (DECL_FUNCTION_CODE (decl))
14512 {
14513 case BUILT_IN_ABS:
14514 CASE_BUILT_IN_ALLOCA:
14515 case BUILT_IN_BSWAP16:
14516 case BUILT_IN_BSWAP32:
14517 case BUILT_IN_BSWAP64:
14518 case BUILT_IN_BSWAP128:
14519 case BUILT_IN_CLZ:
14520 case BUILT_IN_CLZIMAX:
14521 case BUILT_IN_CLZL:
14522 case BUILT_IN_CLZLL:
14523 case BUILT_IN_CTZ:
14524 case BUILT_IN_CTZIMAX:
14525 case BUILT_IN_CTZL:
14526 case BUILT_IN_CTZLL:
14527 case BUILT_IN_FFS:
14528 case BUILT_IN_FFSIMAX:
14529 case BUILT_IN_FFSL:
14530 case BUILT_IN_FFSLL:
14531 case BUILT_IN_IMAXABS:
14532 case BUILT_IN_FINITE:
14533 case BUILT_IN_FINITEF:
14534 case BUILT_IN_FINITEL:
14535 case BUILT_IN_FINITED32:
14536 case BUILT_IN_FINITED64:
14537 case BUILT_IN_FINITED128:
14538 case BUILT_IN_FPCLASSIFY:
14539 case BUILT_IN_ISFINITE:
14540 case BUILT_IN_ISINF_SIGN:
14541 case BUILT_IN_ISINF:
14542 case BUILT_IN_ISINFF:
14543 case BUILT_IN_ISINFL:
14544 case BUILT_IN_ISINFD32:
14545 case BUILT_IN_ISINFD64:
14546 case BUILT_IN_ISINFD128:
14547 case BUILT_IN_ISNAN:
14548 case BUILT_IN_ISNANF:
14549 case BUILT_IN_ISNANL:
14550 case BUILT_IN_ISNAND32:
14551 case BUILT_IN_ISNAND64:
14552 case BUILT_IN_ISNAND128:
14553 case BUILT_IN_ISNORMAL:
14554 case BUILT_IN_ISGREATER:
14555 case BUILT_IN_ISGREATEREQUAL:
14556 case BUILT_IN_ISLESS:
14557 case BUILT_IN_ISLESSEQUAL:
14558 case BUILT_IN_ISLESSGREATER:
14559 case BUILT_IN_ISUNORDERED:
14560 case BUILT_IN_VA_ARG_PACK:
14561 case BUILT_IN_VA_ARG_PACK_LEN:
14562 case BUILT_IN_VA_COPY:
14563 case BUILT_IN_TRAP:
14564 case BUILT_IN_SAVEREGS:
14565 case BUILT_IN_POPCOUNTL:
14566 case BUILT_IN_POPCOUNTLL:
14567 case BUILT_IN_POPCOUNTIMAX:
14568 case BUILT_IN_POPCOUNT:
14569 case BUILT_IN_PARITYL:
14570 case BUILT_IN_PARITYLL:
14571 case BUILT_IN_PARITYIMAX:
14572 case BUILT_IN_PARITY:
14573 case BUILT_IN_LABS:
14574 case BUILT_IN_LLABS:
14575 case BUILT_IN_PREFETCH:
14576 case BUILT_IN_ACC_ON_DEVICE:
14577 return true;
14578
14579 default:
14580 return is_simple_builtin (decl);
14581 }
14582
14583 return false;
14584 }
14585
14586 /* Return true if T is a constant and the value cast to a target char
14587 can be represented by a host char.
14588 Store the casted char constant in *P if so. */
14589
14590 bool
14591 target_char_cst_p (tree t, char *p)
14592 {
14593 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
14594 return false;
14595
14596 *p = (char)tree_to_uhwi (t);
14597 return true;
14598 }
14599
14600 /* Return true if the builtin DECL is implemented in a standard library.
14601 Otherwise return false which doesn't guarantee it is not (thus the list
14602 of handled builtins below may be incomplete). */
14603
14604 bool
14605 builtin_with_linkage_p (tree decl)
14606 {
14607 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14608 switch (DECL_FUNCTION_CODE (decl))
14609 {
14610 CASE_FLT_FN (BUILT_IN_ACOS):
14611 CASE_FLT_FN (BUILT_IN_ACOSH):
14612 CASE_FLT_FN (BUILT_IN_ASIN):
14613 CASE_FLT_FN (BUILT_IN_ASINH):
14614 CASE_FLT_FN (BUILT_IN_ATAN):
14615 CASE_FLT_FN (BUILT_IN_ATANH):
14616 CASE_FLT_FN (BUILT_IN_ATAN2):
14617 CASE_FLT_FN (BUILT_IN_CBRT):
14618 CASE_FLT_FN (BUILT_IN_CEIL):
14619 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
14620 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14621 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
14622 CASE_FLT_FN (BUILT_IN_COS):
14623 CASE_FLT_FN (BUILT_IN_COSH):
14624 CASE_FLT_FN (BUILT_IN_ERF):
14625 CASE_FLT_FN (BUILT_IN_ERFC):
14626 CASE_FLT_FN (BUILT_IN_EXP):
14627 CASE_FLT_FN (BUILT_IN_EXP2):
14628 CASE_FLT_FN (BUILT_IN_EXPM1):
14629 CASE_FLT_FN (BUILT_IN_FABS):
14630 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
14631 CASE_FLT_FN (BUILT_IN_FDIM):
14632 CASE_FLT_FN (BUILT_IN_FLOOR):
14633 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
14634 CASE_FLT_FN (BUILT_IN_FMA):
14635 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
14636 CASE_FLT_FN (BUILT_IN_FMAX):
14637 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
14638 CASE_FLT_FN (BUILT_IN_FMIN):
14639 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
14640 CASE_FLT_FN (BUILT_IN_FMOD):
14641 CASE_FLT_FN (BUILT_IN_FREXP):
14642 CASE_FLT_FN (BUILT_IN_HYPOT):
14643 CASE_FLT_FN (BUILT_IN_ILOGB):
14644 CASE_FLT_FN (BUILT_IN_LDEXP):
14645 CASE_FLT_FN (BUILT_IN_LGAMMA):
14646 CASE_FLT_FN (BUILT_IN_LLRINT):
14647 CASE_FLT_FN (BUILT_IN_LLROUND):
14648 CASE_FLT_FN (BUILT_IN_LOG):
14649 CASE_FLT_FN (BUILT_IN_LOG10):
14650 CASE_FLT_FN (BUILT_IN_LOG1P):
14651 CASE_FLT_FN (BUILT_IN_LOG2):
14652 CASE_FLT_FN (BUILT_IN_LOGB):
14653 CASE_FLT_FN (BUILT_IN_LRINT):
14654 CASE_FLT_FN (BUILT_IN_LROUND):
14655 CASE_FLT_FN (BUILT_IN_MODF):
14656 CASE_FLT_FN (BUILT_IN_NAN):
14657 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14658 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
14659 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
14660 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
14661 CASE_FLT_FN (BUILT_IN_POW):
14662 CASE_FLT_FN (BUILT_IN_REMAINDER):
14663 CASE_FLT_FN (BUILT_IN_REMQUO):
14664 CASE_FLT_FN (BUILT_IN_RINT):
14665 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
14666 CASE_FLT_FN (BUILT_IN_ROUND):
14667 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
14668 CASE_FLT_FN (BUILT_IN_SCALBLN):
14669 CASE_FLT_FN (BUILT_IN_SCALBN):
14670 CASE_FLT_FN (BUILT_IN_SIN):
14671 CASE_FLT_FN (BUILT_IN_SINH):
14672 CASE_FLT_FN (BUILT_IN_SINCOS):
14673 CASE_FLT_FN (BUILT_IN_SQRT):
14674 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
14675 CASE_FLT_FN (BUILT_IN_TAN):
14676 CASE_FLT_FN (BUILT_IN_TANH):
14677 CASE_FLT_FN (BUILT_IN_TGAMMA):
14678 CASE_FLT_FN (BUILT_IN_TRUNC):
14679 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
14680 return true;
14681
14682 case BUILT_IN_STPCPY:
14683 case BUILT_IN_STPNCPY:
14684 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
14685 by libiberty's stpcpy.c for MinGW targets so we need to return true
14686 in order to be able to build libiberty in LTO mode for them. */
14687 return true;
14688
14689 default:
14690 break;
14691 }
14692 return false;
14693 }
14694
14695 /* Return true if OFFRNG is bounded to a subrange of offset values
14696 valid for the largest possible object. */
14697
14698 bool
14699 access_ref::offset_bounded () const
14700 {
14701 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
14702 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
14703 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
14704 }
14705
14706 /* If CALLEE has known side effects, fill in INFO and return true.
14707 See tree-ssa-structalias.c:find_func_aliases
14708 for the list of builtins we might need to handle here. */
14709
14710 attr_fnspec
14711 builtin_fnspec (tree callee)
14712 {
14713 built_in_function code = DECL_FUNCTION_CODE (callee);
14714
14715 switch (code)
14716 {
14717 /* All the following functions read memory pointed to by
14718 their second argument and write memory pointed to by first
14719 argument.
14720 strcat/strncat additionally reads memory pointed to by the first
14721 argument. */
14722 case BUILT_IN_STRCAT:
14723 case BUILT_IN_STRCAT_CHK:
14724 return "1cW 1 ";
14725 case BUILT_IN_STRNCAT:
14726 case BUILT_IN_STRNCAT_CHK:
14727 return "1cW 13";
14728 case BUILT_IN_STRCPY:
14729 case BUILT_IN_STRCPY_CHK:
14730 return "1cO 1 ";
14731 case BUILT_IN_STPCPY:
14732 case BUILT_IN_STPCPY_CHK:
14733 return ".cO 1 ";
14734 case BUILT_IN_STRNCPY:
14735 case BUILT_IN_MEMCPY:
14736 case BUILT_IN_MEMMOVE:
14737 case BUILT_IN_TM_MEMCPY:
14738 case BUILT_IN_TM_MEMMOVE:
14739 case BUILT_IN_STRNCPY_CHK:
14740 case BUILT_IN_MEMCPY_CHK:
14741 case BUILT_IN_MEMMOVE_CHK:
14742 return "1cO313";
14743 case BUILT_IN_MEMPCPY:
14744 case BUILT_IN_MEMPCPY_CHK:
14745 return ".cO313";
14746 case BUILT_IN_STPNCPY:
14747 case BUILT_IN_STPNCPY_CHK:
14748 return ".cO313";
14749 case BUILT_IN_BCOPY:
14750 return ".c23O3";
14751 case BUILT_IN_BZERO:
14752 return ".cO2";
14753 case BUILT_IN_MEMCMP:
14754 case BUILT_IN_MEMCMP_EQ:
14755 case BUILT_IN_BCMP:
14756 case BUILT_IN_STRNCMP:
14757 case BUILT_IN_STRNCMP_EQ:
14758 case BUILT_IN_STRNCASECMP:
14759 return ".cR3R3";
14760
14761 /* The following functions read memory pointed to by their
14762 first argument. */
14763 CASE_BUILT_IN_TM_LOAD (1):
14764 CASE_BUILT_IN_TM_LOAD (2):
14765 CASE_BUILT_IN_TM_LOAD (4):
14766 CASE_BUILT_IN_TM_LOAD (8):
14767 CASE_BUILT_IN_TM_LOAD (FLOAT):
14768 CASE_BUILT_IN_TM_LOAD (DOUBLE):
14769 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
14770 CASE_BUILT_IN_TM_LOAD (M64):
14771 CASE_BUILT_IN_TM_LOAD (M128):
14772 CASE_BUILT_IN_TM_LOAD (M256):
14773 case BUILT_IN_TM_LOG:
14774 case BUILT_IN_TM_LOG_1:
14775 case BUILT_IN_TM_LOG_2:
14776 case BUILT_IN_TM_LOG_4:
14777 case BUILT_IN_TM_LOG_8:
14778 case BUILT_IN_TM_LOG_FLOAT:
14779 case BUILT_IN_TM_LOG_DOUBLE:
14780 case BUILT_IN_TM_LOG_LDOUBLE:
14781 case BUILT_IN_TM_LOG_M64:
14782 case BUILT_IN_TM_LOG_M128:
14783 case BUILT_IN_TM_LOG_M256:
14784 return ".cR ";
14785
14786 case BUILT_IN_INDEX:
14787 case BUILT_IN_RINDEX:
14788 case BUILT_IN_STRCHR:
14789 case BUILT_IN_STRLEN:
14790 case BUILT_IN_STRRCHR:
14791 return ".cR ";
14792 case BUILT_IN_STRNLEN:
14793 return ".cR2";
14794
14795 /* These read memory pointed to by the first argument.
14796 Allocating memory does not have any side-effects apart from
14797 being the definition point for the pointer.
14798 Unix98 specifies that errno is set on allocation failure. */
14799 case BUILT_IN_STRDUP:
14800 return "mCR ";
14801 case BUILT_IN_STRNDUP:
14802 return "mCR2";
14803 /* Allocating memory does not have any side-effects apart from
14804 being the definition point for the pointer. */
14805 case BUILT_IN_MALLOC:
14806 case BUILT_IN_ALIGNED_ALLOC:
14807 case BUILT_IN_CALLOC:
14808 case BUILT_IN_GOMP_ALLOC:
14809 return "mC";
14810 CASE_BUILT_IN_ALLOCA:
14811 return "mc";
14812 /* These read memory pointed to by the first argument with size
14813 in the third argument. */
14814 case BUILT_IN_MEMCHR:
14815 return ".cR3";
14816 /* These read memory pointed to by the first and second arguments. */
14817 case BUILT_IN_STRSTR:
14818 case BUILT_IN_STRPBRK:
14819 case BUILT_IN_STRCASECMP:
14820 case BUILT_IN_STRCSPN:
14821 case BUILT_IN_STRSPN:
14822 case BUILT_IN_STRCMP:
14823 case BUILT_IN_STRCMP_EQ:
14824 return ".cR R ";
14825 /* Freeing memory kills the pointed-to memory. More importantly
14826 the call has to serve as a barrier for moving loads and stores
14827 across it. */
14828 case BUILT_IN_STACK_RESTORE:
14829 case BUILT_IN_FREE:
14830 case BUILT_IN_GOMP_FREE:
14831 return ".co ";
14832 case BUILT_IN_VA_END:
14833 return ".cO ";
14834 /* Realloc serves both as allocation point and deallocation point. */
14835 case BUILT_IN_REALLOC:
14836 return ".Cw ";
14837 case BUILT_IN_GAMMA_R:
14838 case BUILT_IN_GAMMAF_R:
14839 case BUILT_IN_GAMMAL_R:
14840 case BUILT_IN_LGAMMA_R:
14841 case BUILT_IN_LGAMMAF_R:
14842 case BUILT_IN_LGAMMAL_R:
14843 return ".C. Ot";
14844 case BUILT_IN_FREXP:
14845 case BUILT_IN_FREXPF:
14846 case BUILT_IN_FREXPL:
14847 case BUILT_IN_MODF:
14848 case BUILT_IN_MODFF:
14849 case BUILT_IN_MODFL:
14850 return ".c. Ot";
14851 case BUILT_IN_REMQUO:
14852 case BUILT_IN_REMQUOF:
14853 case BUILT_IN_REMQUOL:
14854 return ".c. . Ot";
14855 case BUILT_IN_SINCOS:
14856 case BUILT_IN_SINCOSF:
14857 case BUILT_IN_SINCOSL:
14858 return ".c. OtOt";
14859 case BUILT_IN_MEMSET:
14860 case BUILT_IN_MEMSET_CHK:
14861 case BUILT_IN_TM_MEMSET:
14862 return "1cO3";
14863 CASE_BUILT_IN_TM_STORE (1):
14864 CASE_BUILT_IN_TM_STORE (2):
14865 CASE_BUILT_IN_TM_STORE (4):
14866 CASE_BUILT_IN_TM_STORE (8):
14867 CASE_BUILT_IN_TM_STORE (FLOAT):
14868 CASE_BUILT_IN_TM_STORE (DOUBLE):
14869 CASE_BUILT_IN_TM_STORE (LDOUBLE):
14870 CASE_BUILT_IN_TM_STORE (M64):
14871 CASE_BUILT_IN_TM_STORE (M128):
14872 CASE_BUILT_IN_TM_STORE (M256):
14873 return ".cO ";
14874 case BUILT_IN_STACK_SAVE:
14875 return ".c";
14876 case BUILT_IN_ASSUME_ALIGNED:
14877 return "1cX ";
14878 /* But posix_memalign stores a pointer into the memory pointed to
14879 by its first argument. */
14880 case BUILT_IN_POSIX_MEMALIGN:
14881 return ".cOt";
14882
14883 default:
14884 return "";
14885 }
14886 }