]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/pointer-query.cc
c++: Implement P0466R5 __cpp_lib_is_layout_compatible compiler helpers [PR101539]
[thirdparty/gcc.git] / gcc / pointer-query.cc
CommitLineData
2a837de2
MS
1/* Definitions of the pointer_query and related classes.
2
3 Copyright (C) 2020-2021 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "target.h"
26#include "rtl.h"
27#include "tree.h"
28#include "memmodel.h"
29#include "gimple.h"
30#include "predict.h"
31#include "tm_p.h"
32#include "stringpool.h"
33#include "tree-vrp.h"
34#include "tree-ssanames.h"
35#include "expmed.h"
36#include "optabs.h"
37#include "emit-rtl.h"
38#include "recog.h"
39#include "diagnostic-core.h"
40#include "alias.h"
41#include "fold-const.h"
42#include "fold-const-call.h"
43#include "gimple-ssa-warn-restrict.h"
44#include "stor-layout.h"
45#include "calls.h"
46#include "varasm.h"
47#include "tree-object-size.h"
48#include "tree-ssa-strlen.h"
49#include "realmpfr.h"
50#include "cfgrtl.h"
51#include "except.h"
52#include "dojump.h"
53#include "explow.h"
54#include "stmt.h"
55#include "expr.h"
56#include "libfuncs.h"
57#include "output.h"
58#include "typeclass.h"
59#include "langhooks.h"
60#include "value-prof.h"
61#include "builtins.h"
62#include "stringpool.h"
63#include "attribs.h"
64#include "asan.h"
65#include "internal-fn.h"
66#include "case-cfn-macros.h"
67#include "gimple-fold.h"
68#include "intl.h"
69#include "tree-dfa.h"
70#include "gimple-iterator.h"
71#include "gimple-ssa.h"
72#include "tree-ssa-live.h"
73#include "tree-outof-ssa.h"
74#include "attr-fnspec.h"
75#include "gimple-range.h"
76
77#include "pointer-query.h"
78
79static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
80 pointer_query *);
81
82/* Wrapper around the wide_int overload of get_range that accepts
83 offset_int instead. For middle end expressions returns the same
84 result. For a subset of nonconstamt expressions emitted by the front
85 end determines a more precise range than would be possible otherwise. */
86
87static bool
88get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
89{
90 offset_int add = 0;
91 if (TREE_CODE (x) == PLUS_EXPR)
92 {
93 /* Handle constant offsets in pointer addition expressions seen
94 n the front end IL. */
95 tree op = TREE_OPERAND (x, 1);
96 if (TREE_CODE (op) == INTEGER_CST)
97 {
98 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
99 add = wi::to_offset (op);
100 x = TREE_OPERAND (x, 0);
101 }
102 }
103
104 if (TREE_CODE (x) == NOP_EXPR)
105 /* Also handle conversions to sizetype seen in the front end IL. */
106 x = TREE_OPERAND (x, 0);
107
108 tree type = TREE_TYPE (x);
109 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
110 return false;
111
112 if (TREE_CODE (x) != INTEGER_CST
113 && TREE_CODE (x) != SSA_NAME)
114 {
115 if (TYPE_UNSIGNED (type)
116 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
117 type = signed_type_for (type);
118
119 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
120 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
121 return x;
122 }
123
124 wide_int wr[2];
125 if (!get_range (x, stmt, wr, rvals))
126 return false;
127
128 signop sgn = SIGNED;
129 /* Only convert signed integers or unsigned sizetype to a signed
130 offset and avoid converting large positive values in narrower
131 types to negative offsets. */
132 if (TYPE_UNSIGNED (type)
133 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
134 sgn = UNSIGNED;
135
136 r[0] = offset_int::from (wr[0], sgn);
137 r[1] = offset_int::from (wr[1], sgn);
138 return true;
139}
140
141/* Return the argument that the call STMT to a built-in function returns
142 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
143 from the argument reflected in the value returned by the built-in if it
144 can be determined, otherwise to 0 and HWI_M1U respectively. Set
145 *PAST_END for functions like mempcpy that might return a past the end
146 pointer (most functions return a dereferenceable pointer to an existing
147 element of an array). */
148
149static tree
150gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end,
151 range_query *rvals)
152{
153 /* Clear and set below for the rare function(s) that might return
154 a past-the-end pointer. */
155 *past_end = false;
156
157 {
158 /* Check for attribute fn spec to see if the function returns one
159 of its arguments. */
160 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
161 unsigned int argno;
162 if (fnspec.returns_arg (&argno))
163 {
164 /* Functions return the first argument (not a range). */
165 offrng[0] = offrng[1] = 0;
166 return gimple_call_arg (stmt, argno);
167 }
168 }
169
170 if (gimple_call_num_args (stmt) < 1)
171 return NULL_TREE;
172
173 tree fn = gimple_call_fndecl (stmt);
174 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
175 {
176 /* See if this is a call to placement new. */
177 if (!fn
178 || !DECL_IS_OPERATOR_NEW_P (fn)
179 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
180 return NULL_TREE;
181
182 /* Check the mangling, keeping in mind that operator new takes
183 a size_t which could be unsigned int or unsigned long. */
184 tree fname = DECL_ASSEMBLER_NAME (fn);
185 if (!id_equal (fname, "_ZnwjPv") // ordinary form
186 && !id_equal (fname, "_ZnwmPv") // ordinary form
187 && !id_equal (fname, "_ZnajPv") // array form
188 && !id_equal (fname, "_ZnamPv")) // array form
189 return NULL_TREE;
190
191 if (gimple_call_num_args (stmt) != 2)
192 return NULL_TREE;
193
194 /* Allocation functions return a pointer to the beginning. */
195 offrng[0] = offrng[1] = 0;
196 return gimple_call_arg (stmt, 1);
197 }
198
199 switch (DECL_FUNCTION_CODE (fn))
200 {
201 case BUILT_IN_MEMCPY:
202 case BUILT_IN_MEMCPY_CHK:
203 case BUILT_IN_MEMMOVE:
204 case BUILT_IN_MEMMOVE_CHK:
205 case BUILT_IN_MEMSET:
206 case BUILT_IN_STRCAT:
207 case BUILT_IN_STRCAT_CHK:
208 case BUILT_IN_STRCPY:
209 case BUILT_IN_STRCPY_CHK:
210 case BUILT_IN_STRNCAT:
211 case BUILT_IN_STRNCAT_CHK:
212 case BUILT_IN_STRNCPY:
213 case BUILT_IN_STRNCPY_CHK:
214 /* Functions return the first argument (not a range). */
215 offrng[0] = offrng[1] = 0;
216 return gimple_call_arg (stmt, 0);
217
218 case BUILT_IN_MEMPCPY:
219 case BUILT_IN_MEMPCPY_CHK:
220 {
221 /* The returned pointer is in a range constrained by the smaller
222 of the upper bound of the size argument and the source object
223 size. */
224 offrng[0] = 0;
225 offrng[1] = HOST_WIDE_INT_M1U;
226 tree off = gimple_call_arg (stmt, 2);
227 bool off_valid = get_offset_range (off, stmt, offrng, rvals);
228 if (!off_valid || offrng[0] != offrng[1])
229 {
230 /* If the offset is either indeterminate or in some range,
231 try to constrain its upper bound to at most the size
232 of the source object. */
233 access_ref aref;
234 tree src = gimple_call_arg (stmt, 1);
235 if (compute_objsize (src, 1, &aref, rvals)
236 && aref.sizrng[1] < offrng[1])
237 offrng[1] = aref.sizrng[1];
238 }
239
240 /* Mempcpy may return a past-the-end pointer. */
241 *past_end = true;
242 return gimple_call_arg (stmt, 0);
243 }
244
245 case BUILT_IN_MEMCHR:
246 {
247 tree off = gimple_call_arg (stmt, 2);
248 if (get_offset_range (off, stmt, offrng, rvals))
249 offrng[1] -= 1;
250 else
251 offrng[1] = HOST_WIDE_INT_M1U;
252
253 offrng[0] = 0;
254 return gimple_call_arg (stmt, 0);
255 }
256
257 case BUILT_IN_STRCHR:
258 case BUILT_IN_STRRCHR:
259 case BUILT_IN_STRSTR:
260 offrng[0] = 0;
261 offrng[1] = HOST_WIDE_INT_M1U;
262 return gimple_call_arg (stmt, 0);
263
264 case BUILT_IN_STPCPY:
265 case BUILT_IN_STPCPY_CHK:
266 {
267 access_ref aref;
268 tree src = gimple_call_arg (stmt, 1);
269 if (compute_objsize (src, 1, &aref, rvals))
270 offrng[1] = aref.sizrng[1] - 1;
271 else
272 offrng[1] = HOST_WIDE_INT_M1U;
273
274 offrng[0] = 0;
275 return gimple_call_arg (stmt, 0);
276 }
277
278 case BUILT_IN_STPNCPY:
279 case BUILT_IN_STPNCPY_CHK:
280 {
281 /* The returned pointer is in a range between the first argument
282 and it plus the smaller of the upper bound of the size argument
283 and the source object size. */
284 offrng[1] = HOST_WIDE_INT_M1U;
285 tree off = gimple_call_arg (stmt, 2);
286 if (!get_offset_range (off, stmt, offrng, rvals)
287 || offrng[0] != offrng[1])
288 {
289 /* If the offset is either indeterminate or in some range,
290 try to constrain its upper bound to at most the size
291 of the source object. */
292 access_ref aref;
293 tree src = gimple_call_arg (stmt, 1);
294 if (compute_objsize (src, 1, &aref, rvals)
295 && aref.sizrng[1] < offrng[1])
296 offrng[1] = aref.sizrng[1];
297 }
298
299 /* When the source is the empty string the returned pointer is
300 a copy of the argument. Otherwise stpcpy can also return
301 a past-the-end pointer. */
302 offrng[0] = 0;
303 *past_end = true;
304 return gimple_call_arg (stmt, 0);
305 }
306
307 default:
308 break;
309 }
310
311 return NULL_TREE;
312}
313
314/* If STMT is a call to an allocation function, returns the constant
315 maximum size of the object allocated by the call represented as
316 sizetype. If nonnull, sets RNG1[] to the range of the size.
317 When nonnull, uses RVALS for range information, otherwise gets global
318 range info.
319 Returns null when STMT is not a call to a valid allocation function. */
320
321tree
322gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
323 range_query * /* = NULL */)
324{
325 if (!stmt || !is_gimple_call (stmt))
326 return NULL_TREE;
327
328 tree allocfntype;
329 if (tree fndecl = gimple_call_fndecl (stmt))
330 allocfntype = TREE_TYPE (fndecl);
331 else
332 allocfntype = gimple_call_fntype (stmt);
333
334 if (!allocfntype)
335 return NULL_TREE;
336
337 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
338 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
339 if (!at)
340 {
341 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
342 return NULL_TREE;
343
344 argidx1 = 0;
345 }
346
347 unsigned nargs = gimple_call_num_args (stmt);
348
349 if (argidx1 == UINT_MAX)
350 {
351 tree atval = TREE_VALUE (at);
352 if (!atval)
353 return NULL_TREE;
354
355 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
356 if (nargs <= argidx1)
357 return NULL_TREE;
358
359 atval = TREE_CHAIN (atval);
360 if (atval)
361 {
362 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
363 if (nargs <= argidx2)
364 return NULL_TREE;
365 }
366 }
367
368 tree size = gimple_call_arg (stmt, argidx1);
369
370 wide_int rng1_buf[2];
371 /* If RNG1 is not set, use the buffer. */
372 if (!rng1)
373 rng1 = rng1_buf;
374
375 /* Use maximum precision to avoid overflow below. */
376 const int prec = ADDR_MAX_PRECISION;
377
378 {
379 tree r[2];
380 /* Determine the largest valid range size, including zero. */
381 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
382 return NULL_TREE;
383 rng1[0] = wi::to_wide (r[0], prec);
384 rng1[1] = wi::to_wide (r[1], prec);
385 }
386
387 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
388 return fold_convert (sizetype, size);
389
390 /* To handle ranges do the math in wide_int and return the product
391 of the upper bounds as a constant. Ignore anti-ranges. */
392 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
393 wide_int rng2[2];
394 {
395 tree r[2];
396 /* As above, use the full non-negative range on failure. */
397 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
398 return NULL_TREE;
399 rng2[0] = wi::to_wide (r[0], prec);
400 rng2[1] = wi::to_wide (r[1], prec);
401 }
402
403 /* Compute products of both bounds for the caller but return the lesser
404 of SIZE_MAX and the product of the upper bounds as a constant. */
405 rng1[0] = rng1[0] * rng2[0];
406 rng1[1] = rng1[1] * rng2[1];
407
408 const tree size_max = TYPE_MAX_VALUE (sizetype);
409 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
410 {
411 rng1[1] = wi::to_wide (size_max, prec);
412 return size_max;
413 }
414
415 return wide_int_to_tree (sizetype, rng1[1]);
416}
417
418/* For an access to an object referenced to by the function parameter PTR
419 of pointer type, and set RNG[] to the range of sizes of the object
420 obtainedfrom the attribute access specification for the current function.
421 Set STATIC_ARRAY if the array parameter has been declared [static].
422 Return the function parameter on success and null otherwise. */
423
424tree
425gimple_parm_array_size (tree ptr, wide_int rng[2],
426 bool *static_array /* = NULL */)
427{
428 /* For a function argument try to determine the byte size of the array
429 from the current function declaratation (e.g., attribute access or
430 related). */
431 tree var = SSA_NAME_VAR (ptr);
432 if (TREE_CODE (var) != PARM_DECL)
433 return NULL_TREE;
434
435 const unsigned prec = TYPE_PRECISION (sizetype);
436
437 rdwr_map rdwr_idx;
438 attr_access *access = get_parm_access (rdwr_idx, var);
439 if (!access)
440 return NULL_TREE;
441
442 if (access->sizarg != UINT_MAX)
443 {
444 /* TODO: Try to extract the range from the argument based on
445 those of subsequent assertions or based on known calls to
446 the current function. */
447 return NULL_TREE;
448 }
449
450 if (!access->minsize)
451 return NULL_TREE;
452
453 /* Only consider ordinary array bound at level 2 (or above if it's
454 ever added). */
455 if (warn_array_parameter < 2 && !access->static_p)
456 return NULL_TREE;
457
458 if (static_array)
459 *static_array = access->static_p;
460
461 rng[0] = wi::zero (prec);
462 rng[1] = wi::uhwi (access->minsize, prec);
463 /* Multiply the array bound encoded in the attribute by the size
464 of what the pointer argument to which it decays points to. */
465 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
466 tree size = TYPE_SIZE_UNIT (eltype);
467 if (!size || TREE_CODE (size) != INTEGER_CST)
468 return NULL_TREE;
469
470 rng[1] *= wi::to_wide (size, prec);
471 return var;
472}
473
474access_ref::access_ref (tree bound /* = NULL_TREE */,
475 bool minaccess /* = false */)
476: ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
477 base0 (true), parmarray ()
478{
479 /* Set to valid. */
480 offrng[0] = offrng[1] = 0;
481 offmax[0] = offmax[1] = 0;
482 /* Invalidate. */
483 sizrng[0] = sizrng[1] = -1;
484
485 /* Set the default bounds of the access and adjust below. */
486 bndrng[0] = minaccess ? 1 : 0;
487 bndrng[1] = HOST_WIDE_INT_M1U;
488
489 /* When BOUND is nonnull and a range can be extracted from it,
490 set the bounds of the access to reflect both it and MINACCESS.
491 BNDRNG[0] is the size of the minimum access. */
492 tree rng[2];
493 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
494 {
495 bndrng[0] = wi::to_offset (rng[0]);
496 bndrng[1] = wi::to_offset (rng[1]);
497 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
498 }
499}
500
501/* Return the PHI node REF refers to or null if it doesn't. */
502
503gphi *
504access_ref::phi () const
505{
506 if (!ref || TREE_CODE (ref) != SSA_NAME)
507 return NULL;
508
509 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
510 if (gimple_code (def_stmt) != GIMPLE_PHI)
511 return NULL;
512
513 return as_a <gphi *> (def_stmt);
514}
515
516/* Determine and return the largest object to which *THIS. If *THIS
517 refers to a PHI and PREF is nonnull, fill *PREF with the details
518 of the object determined by compute_objsize(ARG, OSTYPE) for each
519 PHI argument ARG. */
520
521tree
522access_ref::get_ref (vec<access_ref> *all_refs,
523 access_ref *pref /* = NULL */,
524 int ostype /* = 1 */,
525 ssa_name_limit_t *psnlim /* = NULL */,
526 pointer_query *qry /* = NULL */) const
527{
528 gphi *phi_stmt = this->phi ();
529 if (!phi_stmt)
530 return ref;
531
532 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
533 cause unbounded recursion. */
534 ssa_name_limit_t snlim_buf;
535 if (!psnlim)
536 psnlim = &snlim_buf;
537
538 if (!psnlim->visit_phi (ref))
539 return NULL_TREE;
540
541 /* Reflects the range of offsets of all PHI arguments refer to the same
542 object (i.e., have the same REF). */
543 access_ref same_ref;
544 /* The conservative result of the PHI reflecting the offset and size
545 of the largest PHI argument, regardless of whether or not they all
546 refer to the same object. */
547 pointer_query empty_qry;
548 if (!qry)
549 qry = &empty_qry;
550
551 access_ref phi_ref;
552 if (pref)
553 {
554 phi_ref = *pref;
555 same_ref = *pref;
556 }
557
558 /* Set if any argument is a function array (or VLA) parameter not
559 declared [static]. */
560 bool parmarray = false;
561 /* The size of the smallest object referenced by the PHI arguments. */
562 offset_int minsize = 0;
563 const offset_int maxobjsize = wi::to_offset (max_object_size ());
564 /* The offset of the PHI, not reflecting those of its arguments. */
565 const offset_int orng[2] = { phi_ref.offrng[0], phi_ref.offrng[1] };
566
567 const unsigned nargs = gimple_phi_num_args (phi_stmt);
568 for (unsigned i = 0; i < nargs; ++i)
569 {
570 access_ref phi_arg_ref;
571 tree arg = gimple_phi_arg_def (phi_stmt, i);
572 if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
573 || phi_arg_ref.sizrng[0] < 0)
574 /* A PHI with all null pointer arguments. */
575 return NULL_TREE;
576
577 /* Add PREF's offset to that of the argument. */
578 phi_arg_ref.add_offset (orng[0], orng[1]);
579 if (TREE_CODE (arg) == SSA_NAME)
580 qry->put_ref (arg, phi_arg_ref);
581
582 if (all_refs)
583 all_refs->safe_push (phi_arg_ref);
584
585 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
586 || phi_arg_ref.sizrng[1] != maxobjsize);
587
588 parmarray |= phi_arg_ref.parmarray;
589
590 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
591
592 if (phi_ref.sizrng[0] < 0)
593 {
594 if (!nullp)
595 same_ref = phi_arg_ref;
596 phi_ref = phi_arg_ref;
597 if (arg_known_size)
598 minsize = phi_arg_ref.sizrng[0];
599 continue;
600 }
601
602 const bool phi_known_size = (phi_ref.sizrng[0] != 0
603 || phi_ref.sizrng[1] != maxobjsize);
604
605 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
606 minsize = phi_arg_ref.sizrng[0];
607
608 /* Disregard null pointers in PHIs with two or more arguments.
609 TODO: Handle this better! */
610 if (nullp)
611 continue;
612
613 /* Determine the amount of remaining space in the argument. */
614 offset_int argrem[2];
615 argrem[1] = phi_arg_ref.size_remaining (argrem);
616
617 /* Determine the amount of remaining space computed so far and
618 if the remaining space in the argument is more use it instead. */
619 offset_int phirem[2];
620 phirem[1] = phi_ref.size_remaining (phirem);
621
622 if (phi_arg_ref.ref != same_ref.ref)
623 same_ref.ref = NULL_TREE;
624
625 if (phirem[1] < argrem[1]
626 || (phirem[1] == argrem[1]
627 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
628 /* Use the argument with the most space remaining as the result,
629 or the larger one if the space is equal. */
630 phi_ref = phi_arg_ref;
631
632 /* Set SAME_REF.OFFRNG to the maximum range of all arguments. */
633 if (phi_arg_ref.offrng[0] < same_ref.offrng[0])
634 same_ref.offrng[0] = phi_arg_ref.offrng[0];
635 if (same_ref.offrng[1] < phi_arg_ref.offrng[1])
636 same_ref.offrng[1] = phi_arg_ref.offrng[1];
637 }
638
639 if (!same_ref.ref && same_ref.offrng[0] != 0)
640 /* Clear BASE0 if not all the arguments refer to the same object and
641 if not all their offsets are zero-based. This allows the final
642 PHI offset to out of bounds for some arguments but not for others
643 (or negative even of all the arguments are BASE0), which is overly
644 permissive. */
645 phi_ref.base0 = false;
646
647 if (same_ref.ref)
648 phi_ref = same_ref;
649 else
650 {
651 /* Replace the lower bound of the largest argument with the size
652 of the smallest argument, and set PARMARRAY if any argument
653 was one. */
654 phi_ref.sizrng[0] = minsize;
655 phi_ref.parmarray = parmarray;
656 }
657
658 if (phi_ref.sizrng[0] < 0)
659 {
660 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
661 (perhaps because they have all been already visited by prior
662 recursive calls). */
663 psnlim->leave_phi (ref);
664 return NULL_TREE;
665 }
666
667 /* Avoid changing *THIS. */
668 if (pref && pref != this)
669 *pref = phi_ref;
670
671 psnlim->leave_phi (ref);
672
673 return phi_ref.ref;
674}
675
676/* Return the maximum amount of space remaining and if non-null, set
677 argument to the minimum. */
678
679offset_int
680access_ref::size_remaining (offset_int *pmin /* = NULL */) const
681{
682 offset_int minbuf;
683 if (!pmin)
684 pmin = &minbuf;
685
686 /* add_offset() ensures the offset range isn't inverted. */
687 gcc_checking_assert (offrng[0] <= offrng[1]);
688
689 if (base0)
690 {
691 /* The offset into referenced object is zero-based (i.e., it's
692 not referenced by a pointer into middle of some unknown object). */
693 if (offrng[0] < 0 && offrng[1] < 0)
694 {
695 /* If the offset is negative the remaining size is zero. */
696 *pmin = 0;
697 return 0;
698 }
699
700 if (sizrng[1] <= offrng[0])
701 {
702 /* If the starting offset is greater than or equal to the upper
703 bound on the size of the object, the space remaining is zero.
704 As a special case, if it's equal, set *PMIN to -1 to let
705 the caller know the offset is valid and just past the end. */
706 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
707 return 0;
708 }
709
710 /* Otherwise return the size minus the lower bound of the offset. */
711 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
712
713 *pmin = sizrng[0] - or0;
714 return sizrng[1] - or0;
715 }
716
717 /* The offset to the referenced object isn't zero-based (i.e., it may
718 refer to a byte other than the first. The size of such an object
719 is constrained only by the size of the address space (the result
720 of max_object_size()). */
721 if (sizrng[1] <= offrng[0])
722 {
723 *pmin = 0;
724 return 0;
725 }
726
727 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
728
729 *pmin = sizrng[0] - or0;
730 return sizrng[1] - or0;
731}
732
733/* Return true if the offset and object size are in range for SIZE. */
734
735bool
736access_ref::offset_in_range (const offset_int &size) const
737{
738 if (size_remaining () < size)
739 return false;
740
741 if (base0)
742 return offmax[0] >= 0 && offmax[1] <= sizrng[1];
743
744 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
745 return offmax[0] > -maxoff && offmax[1] < maxoff;
746}
747
748/* Add the range [MIN, MAX] to the offset range. For known objects (with
749 zero-based offsets) at least one of whose offset's bounds is in range,
750 constrain the other (or both) to the bounds of the object (i.e., zero
751 and the upper bound of its size). This improves the quality of
752 diagnostics. */
753
754void access_ref::add_offset (const offset_int &min, const offset_int &max)
755{
756 if (min <= max)
757 {
758 /* To add an ordinary range just add it to the bounds. */
759 offrng[0] += min;
760 offrng[1] += max;
761 }
762 else if (!base0)
763 {
764 /* To add an inverted range to an offset to an unknown object
765 expand it to the maximum. */
766 add_max_offset ();
767 return;
768 }
769 else
770 {
771 /* To add an inverted range to an offset to an known object set
772 the upper bound to the maximum representable offset value
773 (which may be greater than MAX_OBJECT_SIZE).
774 The lower bound is either the sum of the current offset and
775 MIN when abs(MAX) is greater than the former, or zero otherwise.
776 Zero because then then inverted range includes the negative of
777 the lower bound. */
778 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
779 offrng[1] = maxoff;
780
781 if (max >= 0)
782 {
783 offrng[0] = 0;
784 if (offmax[0] > 0)
785 offmax[0] = 0;
786 return;
787 }
788
789 offset_int absmax = wi::abs (max);
790 if (offrng[0] < absmax)
791 {
792 offrng[0] += min;
793 /* Cap the lower bound at the upper (set to MAXOFF above)
794 to avoid inadvertently recreating an inverted range. */
795 if (offrng[1] < offrng[0])
796 offrng[0] = offrng[1];
797 }
798 else
799 offrng[0] = 0;
800 }
801
802 /* Set the minimum and maximmum computed so far. */
803 if (offrng[1] < 0 && offrng[1] < offmax[0])
804 offmax[0] = offrng[1];
805 if (offrng[0] > 0 && offrng[0] > offmax[1])
806 offmax[1] = offrng[0];
807
808 if (!base0)
809 return;
810
811 /* When referencing a known object check to see if the offset computed
812 so far is in bounds... */
813 offset_int remrng[2];
814 remrng[1] = size_remaining (remrng);
815 if (remrng[1] > 0 || remrng[0] < 0)
816 {
817 /* ...if so, constrain it so that neither bound exceeds the size of
818 the object. Out of bounds offsets are left unchanged, and, for
819 better or worse, become in bounds later. They should be detected
820 and diagnosed at the point they first become invalid by
821 -Warray-bounds. */
822 if (offrng[0] < 0)
823 offrng[0] = 0;
824 if (offrng[1] > sizrng[1])
825 offrng[1] = sizrng[1];
826 }
827}
828
829/* Issue one inform message describing each target of an access REF.
830 WRITE is set for a write access and clear for a read access. */
831
832void
833access_ref::inform_access (access_mode mode) const
834{
835 const access_ref &aref = *this;
836 if (!aref.ref)
837 return;
838
839 if (aref.phi ())
840 {
841 /* Set MAXREF to refer to the largest object and fill ALL_REFS
842 with data for all objects referenced by the PHI arguments. */
843 access_ref maxref;
844 auto_vec<access_ref> all_refs;
845 if (!get_ref (&all_refs, &maxref))
846 return;
847
848 /* Except for MAXREF, the rest of the arguments' offsets need not
849 reflect one added to the PHI itself. Determine the latter from
850 MAXREF on which the result is based. */
851 const offset_int orng[] =
852 {
853 offrng[0] - maxref.offrng[0],
854 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
855 };
856
857 /* Add the final PHI's offset to that of each of the arguments
858 and recurse to issue an inform message for it. */
859 for (unsigned i = 0; i != all_refs.length (); ++i)
860 {
861 /* Skip any PHIs; those could lead to infinite recursion. */
862 if (all_refs[i].phi ())
863 continue;
864
865 all_refs[i].add_offset (orng[0], orng[1]);
866 all_refs[i].inform_access (mode);
867 }
868 return;
869 }
870
871 /* Convert offset range and avoid including a zero range since it
872 isn't necessarily meaningful. */
873 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
874 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
875 HOST_WIDE_INT minoff;
876 HOST_WIDE_INT maxoff = diff_max;
877 if (wi::fits_shwi_p (aref.offrng[0]))
878 minoff = aref.offrng[0].to_shwi ();
879 else
880 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
881
882 if (wi::fits_shwi_p (aref.offrng[1]))
883 maxoff = aref.offrng[1].to_shwi ();
884
885 if (maxoff <= diff_min || maxoff >= diff_max)
886 /* Avoid mentioning an upper bound that's equal to or in excess
887 of the maximum of ptrdiff_t. */
888 maxoff = minoff;
889
890 /* Convert size range and always include it since all sizes are
891 meaningful. */
892 unsigned long long minsize = 0, maxsize = 0;
893 if (wi::fits_shwi_p (aref.sizrng[0])
894 && wi::fits_shwi_p (aref.sizrng[1]))
895 {
896 minsize = aref.sizrng[0].to_shwi ();
897 maxsize = aref.sizrng[1].to_shwi ();
898 }
899
900 /* SIZRNG doesn't necessarily have the same range as the allocation
901 size determined by gimple_call_alloc_size (). */
902 char sizestr[80];
903 if (minsize == maxsize)
904 sprintf (sizestr, "%llu", minsize);
905 else
906 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
907
908 char offstr[80];
909 if (minoff == 0
910 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
911 offstr[0] = '\0';
912 else if (minoff == maxoff)
913 sprintf (offstr, "%lli", (long long) minoff);
914 else
915 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
916
917 location_t loc = UNKNOWN_LOCATION;
918
919 tree ref = this->ref;
920 tree allocfn = NULL_TREE;
921 if (TREE_CODE (ref) == SSA_NAME)
922 {
923 gimple *stmt = SSA_NAME_DEF_STMT (ref);
924 if (is_gimple_call (stmt))
925 {
926 loc = gimple_location (stmt);
927 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
928 {
929 /* Strip the SSA_NAME suffix from the variable name and
930 recreate an identifier with the VLA's original name. */
931 ref = gimple_call_lhs (stmt);
932 if (SSA_NAME_IDENTIFIER (ref))
933 {
934 ref = SSA_NAME_IDENTIFIER (ref);
935 const char *id = IDENTIFIER_POINTER (ref);
936 size_t len = strcspn (id, ".$");
937 if (!len)
938 len = strlen (id);
939 ref = get_identifier_with_length (id, len);
940 }
941 }
942 else
943 {
944 /* Except for VLAs, retrieve the allocation function. */
945 allocfn = gimple_call_fndecl (stmt);
946 if (!allocfn)
947 allocfn = gimple_call_fn (stmt);
948 if (TREE_CODE (allocfn) == SSA_NAME)
949 {
950 /* For an ALLOC_CALL via a function pointer make a small
951 effort to determine the destination of the pointer. */
952 gimple *def = SSA_NAME_DEF_STMT (allocfn);
953 if (gimple_assign_single_p (def))
954 {
955 tree rhs = gimple_assign_rhs1 (def);
956 if (DECL_P (rhs))
957 allocfn = rhs;
958 else if (TREE_CODE (rhs) == COMPONENT_REF)
959 allocfn = TREE_OPERAND (rhs, 1);
960 }
961 }
962 }
963 }
964 else if (gimple_nop_p (stmt))
965 /* Handle DECL_PARM below. */
966 ref = SSA_NAME_VAR (ref);
967 }
968
969 if (DECL_P (ref))
970 loc = DECL_SOURCE_LOCATION (ref);
971 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
972 loc = EXPR_LOCATION (ref);
973 else if (TREE_CODE (ref) != IDENTIFIER_NODE
974 && TREE_CODE (ref) != SSA_NAME)
975 return;
976
977 if (mode == access_read_write || mode == access_write_only)
978 {
979 if (allocfn == NULL_TREE)
980 {
981 if (*offstr)
982 inform (loc, "at offset %s into destination object %qE of size %s",
983 offstr, ref, sizestr);
984 else
985 inform (loc, "destination object %qE of size %s", ref, sizestr);
986 return;
987 }
988
989 if (*offstr)
990 inform (loc,
991 "at offset %s into destination object of size %s "
992 "allocated by %qE", offstr, sizestr, allocfn);
993 else
994 inform (loc, "destination object of size %s allocated by %qE",
995 sizestr, allocfn);
996 return;
997 }
998
999 if (mode == access_read_only)
1000 {
1001 if (allocfn == NULL_TREE)
1002 {
1003 if (*offstr)
1004 inform (loc, "at offset %s into source object %qE of size %s",
1005 offstr, ref, sizestr);
1006 else
1007 inform (loc, "source object %qE of size %s", ref, sizestr);
1008
1009 return;
1010 }
1011
1012 if (*offstr)
1013 inform (loc,
1014 "at offset %s into source object of size %s allocated by %qE",
1015 offstr, sizestr, allocfn);
1016 else
1017 inform (loc, "source object of size %s allocated by %qE",
1018 sizestr, allocfn);
1019 return;
1020 }
1021
1022 if (allocfn == NULL_TREE)
1023 {
1024 if (*offstr)
1025 inform (loc, "at offset %s into object %qE of size %s",
1026 offstr, ref, sizestr);
1027 else
1028 inform (loc, "object %qE of size %s", ref, sizestr);
1029
1030 return;
1031 }
1032
1033 if (*offstr)
1034 inform (loc,
1035 "at offset %s into object of size %s allocated by %qE",
1036 offstr, sizestr, allocfn);
1037 else
1038 inform (loc, "object of size %s allocated by %qE",
1039 sizestr, allocfn);
1040}
1041
1042/* Set a bit for the PHI in VISITED and return true if it wasn't
1043 already set. */
1044
1045bool
1046ssa_name_limit_t::visit_phi (tree ssa_name)
1047{
1048 if (!visited)
1049 visited = BITMAP_ALLOC (NULL);
1050
1051 /* Return false if SSA_NAME has already been visited. */
1052 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
1053}
1054
1055/* Clear a bit for the PHI in VISITED. */
1056
1057void
1058ssa_name_limit_t::leave_phi (tree ssa_name)
1059{
1060 /* Return false if SSA_NAME has already been visited. */
1061 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
1062}
1063
1064/* Return false if the SSA_NAME chain length counter has reached
1065 the limit, otherwise increment the counter and return true. */
1066
1067bool
1068ssa_name_limit_t::next ()
1069{
1070 /* Return a negative value to let caller avoid recursing beyond
1071 the specified limit. */
1072 if (ssa_def_max == 0)
1073 return false;
1074
1075 --ssa_def_max;
1076 return true;
1077}
1078
1079/* If the SSA_NAME has already been "seen" return a positive value.
1080 Otherwise add it to VISITED. If the SSA_NAME limit has been
1081 reached, return a negative value. Otherwise return zero. */
1082
1083int
1084ssa_name_limit_t::next_phi (tree ssa_name)
1085{
1086 {
1087 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
1088 /* Return a positive value if the PHI has already been visited. */
1089 if (gimple_code (def_stmt) == GIMPLE_PHI
1090 && !visit_phi (ssa_name))
1091 return 1;
1092 }
1093
1094 /* Return a negative value to let caller avoid recursing beyond
1095 the specified limit. */
1096 if (ssa_def_max == 0)
1097 return -1;
1098
1099 --ssa_def_max;
1100
1101 return 0;
1102}
1103
1104ssa_name_limit_t::~ssa_name_limit_t ()
1105{
1106 if (visited)
1107 BITMAP_FREE (visited);
1108}
1109
1110/* Default ctor. Initialize object with pointers to the range_query
1111 and cache_type instances to use or null. */
1112
1113pointer_query::pointer_query (range_query *qry /* = NULL */,
1114 cache_type *cache /* = NULL */)
1115: rvals (qry), var_cache (cache), hits (), misses (),
1116 failures (), depth (), max_depth ()
1117{
1118 /* No op. */
1119}
1120
1121/* Return a pointer to the cached access_ref instance for the SSA_NAME
1122 PTR if it's there or null otherwise. */
1123
1124const access_ref *
1125pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
1126{
1127 if (!var_cache)
1128 {
1129 ++misses;
1130 return NULL;
1131 }
1132
1133 unsigned version = SSA_NAME_VERSION (ptr);
1134 unsigned idx = version << 1 | (ostype & 1);
1135 if (var_cache->indices.length () <= idx)
1136 {
1137 ++misses;
1138 return NULL;
1139 }
1140
1141 unsigned cache_idx = var_cache->indices[idx];
1142 if (var_cache->access_refs.length () <= cache_idx)
1143 {
1144 ++misses;
1145 return NULL;
1146 }
1147
1148 access_ref &cache_ref = var_cache->access_refs[cache_idx];
1149 if (cache_ref.ref)
1150 {
1151 ++hits;
1152 return &cache_ref;
1153 }
1154
1155 ++misses;
1156 return NULL;
1157}
1158
1159/* Retrieve the access_ref instance for a variable from the cache if it's
1160 there or compute it and insert it into the cache if it's nonnonull. */
1161
1162bool
1163pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
1164{
1165 const unsigned version
1166 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
1167
1168 if (var_cache && version)
1169 {
1170 unsigned idx = version << 1 | (ostype & 1);
1171 if (idx < var_cache->indices.length ())
1172 {
1173 unsigned cache_idx = var_cache->indices[idx] - 1;
1174 if (cache_idx < var_cache->access_refs.length ()
1175 && var_cache->access_refs[cache_idx].ref)
1176 {
1177 ++hits;
1178 *pref = var_cache->access_refs[cache_idx];
1179 return true;
1180 }
1181 }
1182
1183 ++misses;
1184 }
1185
1186 if (!compute_objsize (ptr, ostype, pref, this))
1187 {
1188 ++failures;
1189 return false;
1190 }
1191
1192 return true;
1193}
1194
1195/* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1196 nonnull. */
1197
1198void
1199pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
1200{
1201 /* Only add populated/valid entries. */
1202 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
1203 return;
1204
1205 /* Add REF to the two-level cache. */
1206 unsigned version = SSA_NAME_VERSION (ptr);
1207 unsigned idx = version << 1 | (ostype & 1);
1208
1209 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1210 Its value minus one is the index into ACCESS_REFS. Not all
1211 entries are valid. */
1212 if (var_cache->indices.length () <= idx)
1213 var_cache->indices.safe_grow_cleared (idx + 1);
1214
1215 if (!var_cache->indices[idx])
1216 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
1217
1218 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1219 REF member is nonnull. All entries except for the last two
1220 are valid. Once nonnull, the REF value must stay unchanged. */
1221 unsigned cache_idx = var_cache->indices[idx];
1222 if (var_cache->access_refs.length () <= cache_idx)
1223 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
1224
1225 access_ref cache_ref = var_cache->access_refs[cache_idx - 1];
1226 if (cache_ref.ref)
1227 {
1228 gcc_checking_assert (cache_ref.ref == ref.ref);
1229 return;
1230 }
1231
1232 cache_ref = ref;
1233}
1234
1235/* Flush the cache if it's nonnull. */
1236
1237void
1238pointer_query::flush_cache ()
1239{
1240 if (!var_cache)
1241 return;
1242 var_cache->indices.release ();
1243 var_cache->access_refs.release ();
1244}
1245
1246/* A helper of compute_objsize_r() to determine the size from an assignment
1247 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. */
1248
1249static bool
1250handle_min_max_size (gimple *stmt, int ostype, access_ref *pref,
1251 ssa_name_limit_t &snlim, pointer_query *qry)
1252{
1253 tree_code code = gimple_assign_rhs_code (stmt);
1254
1255 tree ptr = gimple_assign_rhs1 (stmt);
1256
1257 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1258 Determine the size/offset of each and use the one with more or less
1259 space remaining, respectively. If either fails, use the information
1260 determined from the other instead, adjusted up or down as appropriate
1261 for the expression. */
1262 access_ref aref[2] = { *pref, *pref };
1263 if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry))
1264 {
1265 aref[0].base0 = false;
1266 aref[0].offrng[0] = aref[0].offrng[1] = 0;
1267 aref[0].add_max_offset ();
1268 aref[0].set_max_size_range ();
1269 }
1270
1271 ptr = gimple_assign_rhs2 (stmt);
1272 if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry))
1273 {
1274 aref[1].base0 = false;
1275 aref[1].offrng[0] = aref[1].offrng[1] = 0;
1276 aref[1].add_max_offset ();
1277 aref[1].set_max_size_range ();
1278 }
1279
1280 if (!aref[0].ref && !aref[1].ref)
1281 /* Fail if the identity of neither argument could be determined. */
1282 return false;
1283
1284 bool i0 = false;
1285 if (aref[0].ref && aref[0].base0)
1286 {
1287 if (aref[1].ref && aref[1].base0)
1288 {
1289 /* If the object referenced by both arguments has been determined
1290 set *PREF to the one with more or less space remainng, whichever
1291 is appopriate for CODE.
1292 TODO: Indicate when the objects are distinct so it can be
1293 diagnosed. */
1294 i0 = code == MAX_EXPR;
1295 const bool i1 = !i0;
1296
1297 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
1298 *pref = aref[i1];
1299 else
1300 *pref = aref[i0];
1301 return true;
1302 }
1303
1304 /* If only the object referenced by one of the arguments could be
1305 determined, use it and... */
1306 *pref = aref[0];
1307 i0 = true;
1308 }
1309 else
1310 *pref = aref[1];
1311
1312 const bool i1 = !i0;
1313 /* ...see if the offset obtained from the other pointer can be used
1314 to tighten up the bound on the offset obtained from the first. */
1315 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
1316 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
1317 {
1318 pref->offrng[0] = aref[i0].offrng[0];
1319 pref->offrng[1] = aref[i0].offrng[1];
1320 }
1321 return true;
1322}
1323
1324/* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1325 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1326 on success and false on failure. */
1327
1328static bool
1329handle_array_ref (tree aref, bool addr, int ostype, access_ref *pref,
1330 ssa_name_limit_t &snlim, pointer_query *qry)
1331{
1332 gcc_assert (TREE_CODE (aref) == ARRAY_REF);
1333
1334 ++pref->deref;
1335
1336 tree arefop = TREE_OPERAND (aref, 0);
1337 tree reftype = TREE_TYPE (arefop);
1338 if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
1339 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1340 of known bound. */
1341 return false;
1342
1343 if (!compute_objsize_r (arefop, ostype, pref, snlim, qry))
1344 return false;
1345
1346 offset_int orng[2];
1347 tree off = pref->eval (TREE_OPERAND (aref, 1));
1348 range_query *const rvals = qry ? qry->rvals : NULL;
1349 if (!get_offset_range (off, NULL, orng, rvals))
1350 {
1351 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1352 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1353 orng[0] = -orng[1] - 1;
1354 }
1355
1356 /* Convert the array index range determined above to a byte
1357 offset. */
1358 tree lowbnd = array_ref_low_bound (aref);
1359 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
1360 {
1361 /* Adjust the index by the low bound of the array domain
1362 (normally zero but 1 in Fortran). */
1363 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
1364 orng[0] -= lb;
1365 orng[1] -= lb;
1366 }
1367
1368 tree eltype = TREE_TYPE (aref);
1369 tree tpsize = TYPE_SIZE_UNIT (eltype);
1370 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
1371 {
1372 pref->add_max_offset ();
1373 return true;
1374 }
1375
1376 offset_int sz = wi::to_offset (tpsize);
1377 orng[0] *= sz;
1378 orng[1] *= sz;
1379
1380 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
1381 {
1382 /* Except for the permissive raw memory functions which use
1383 the size of the whole object determined above, use the size
1384 of the referenced array. Because the overall offset is from
1385 the beginning of the complete array object add this overall
1386 offset to the size of array. */
1387 offset_int sizrng[2] =
1388 {
1389 pref->offrng[0] + orng[0] + sz,
1390 pref->offrng[1] + orng[1] + sz
1391 };
1392 if (sizrng[1] < sizrng[0])
1393 std::swap (sizrng[0], sizrng[1]);
1394 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
1395 pref->sizrng[0] = sizrng[0];
1396 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
1397 pref->sizrng[1] = sizrng[1];
1398 }
1399
1400 pref->add_offset (orng[0], orng[1]);
1401 return true;
1402}
1403
1404/* A helper of compute_objsize_r() to determine the size from MEM_REF
1405 MREF. Return true on success and false on failure. */
1406
1407static bool
1408handle_mem_ref (tree mref, int ostype, access_ref *pref,
1409 ssa_name_limit_t &snlim, pointer_query *qry)
1410{
1411 gcc_assert (TREE_CODE (mref) == MEM_REF);
1412
1413 ++pref->deref;
1414
1415 if (VECTOR_TYPE_P (TREE_TYPE (mref)))
1416 {
1417 /* Hack: Handle MEM_REFs of vector types as those to complete
1418 objects; those may be synthesized from multiple assignments
1419 to consecutive data members (see PR 93200 and 96963).
1420 FIXME: Vectorized assignments should only be present after
1421 vectorization so this hack is only necessary after it has
1422 run and could be avoided in calls from prior passes (e.g.,
1423 tree-ssa-strlen.c).
1424 FIXME: Deal with this more generally, e.g., by marking up
1425 such MEM_REFs at the time they're created. */
1426 ostype = 0;
1427 }
1428
1429 tree mrefop = TREE_OPERAND (mref, 0);
1430 if (!compute_objsize_r (mrefop, ostype, pref, snlim, qry))
1431 return false;
1432
1433 offset_int orng[2];
1434 tree off = pref->eval (TREE_OPERAND (mref, 1));
1435 range_query *const rvals = qry ? qry->rvals : NULL;
1436 if (!get_offset_range (off, NULL, orng, rvals))
1437 {
1438 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1439 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1440 orng[0] = -orng[1] - 1;
1441 }
1442
1443 pref->add_offset (orng[0], orng[1]);
1444 return true;
1445}
1446
1447/* Helper to compute the size of the object referenced by the PTR
1448 expression which must have pointer type, using Object Size type
1449 OSTYPE (only the least significant 2 bits are used).
1450 On success, sets PREF->REF to the DECL of the referenced object
1451 if it's unique, otherwise to null, PREF->OFFRNG to the range of
1452 offsets into it, and PREF->SIZRNG to the range of sizes of
1453 the object(s).
1454 SNLIM is used to avoid visiting the same PHI operand multiple
1455 times, and, when nonnull, RVALS to determine range information.
1456 Returns true on success, false when a meaningful size (or range)
1457 cannot be determined.
1458
1459 The function is intended for diagnostics and should not be used
1460 to influence code generation or optimization. */
1461
1462static bool
1463compute_objsize_r (tree ptr, int ostype, access_ref *pref,
1464 ssa_name_limit_t &snlim, pointer_query *qry)
1465{
1466 STRIP_NOPS (ptr);
1467
1468 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
1469 if (addr)
1470 {
1471 --pref->deref;
1472 ptr = TREE_OPERAND (ptr, 0);
1473 }
1474
1475 if (DECL_P (ptr))
1476 {
1477 pref->ref = ptr;
1478
1479 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
1480 {
1481 /* Set the maximum size if the reference is to the pointer
1482 itself (as opposed to what it points to), and clear
1483 BASE0 since the offset isn't necessarily zero-based. */
1484 pref->set_max_size_range ();
1485 pref->base0 = false;
1486 return true;
1487 }
1488
1489 if (tree size = decl_init_size (ptr, false))
1490 if (TREE_CODE (size) == INTEGER_CST)
1491 {
1492 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1493 return true;
1494 }
1495
1496 pref->set_max_size_range ();
1497 return true;
1498 }
1499
1500 const tree_code code = TREE_CODE (ptr);
1501 range_query *const rvals = qry ? qry->rvals : NULL;
1502
1503 if (code == BIT_FIELD_REF)
1504 {
1505 tree ref = TREE_OPERAND (ptr, 0);
1506 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1507 return false;
1508
1509 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
1510 pref->add_offset (off / BITS_PER_UNIT);
1511 return true;
1512 }
1513
1514 if (code == COMPONENT_REF)
1515 {
1516 tree ref = TREE_OPERAND (ptr, 0);
1517 if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
1518 /* In accesses through union types consider the entire unions
1519 rather than just their members. */
1520 ostype = 0;
1521 tree field = TREE_OPERAND (ptr, 1);
1522
1523 if (ostype == 0)
1524 {
1525 /* In OSTYPE zero (for raw memory functions like memcpy), use
1526 the maximum size instead if the identity of the enclosing
1527 object cannot be determined. */
1528 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1529 return false;
1530
1531 /* Otherwise, use the size of the enclosing object and add
1532 the offset of the member to the offset computed so far. */
1533 tree offset = byte_position (field);
1534 if (TREE_CODE (offset) == INTEGER_CST)
1535 pref->add_offset (wi::to_offset (offset));
1536 else
1537 pref->add_max_offset ();
1538
1539 if (!pref->ref)
1540 /* REF may have been already set to an SSA_NAME earlier
1541 to provide better context for diagnostics. In that case,
1542 leave it unchanged. */
1543 pref->ref = ref;
1544 return true;
1545 }
1546
1547 pref->ref = field;
1548
1549 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
1550 {
1551 /* Set maximum size if the reference is to the pointer member
1552 itself (as opposed to what it points to). */
1553 pref->set_max_size_range ();
1554 return true;
1555 }
1556
1557 /* SAM is set for array members that might need special treatment. */
1558 special_array_member sam;
1559 tree size = component_ref_size (ptr, &sam);
1560 if (sam == special_array_member::int_0)
1561 pref->sizrng[0] = pref->sizrng[1] = 0;
1562 else if (!pref->trail1special && sam == special_array_member::trail_1)
1563 pref->sizrng[0] = pref->sizrng[1] = 1;
1564 else if (size && TREE_CODE (size) == INTEGER_CST)
1565 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1566 else
1567 {
1568 /* When the size of the member is unknown it's either a flexible
1569 array member or a trailing special array member (either zero
1570 length or one-element). Set the size to the maximum minus
1571 the constant size of the type. */
1572 pref->sizrng[0] = 0;
1573 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1574 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
1575 if (TREE_CODE (recsize) == INTEGER_CST)
1576 pref->sizrng[1] -= wi::to_offset (recsize);
1577 }
1578 return true;
1579 }
1580
1581 if (code == ARRAY_REF)
1582 return handle_array_ref (ptr, addr, ostype, pref, snlim, qry);
1583
1584 if (code == MEM_REF)
1585 return handle_mem_ref (ptr, ostype, pref, snlim, qry);
1586
1587 if (code == TARGET_MEM_REF)
1588 {
1589 tree ref = TREE_OPERAND (ptr, 0);
1590 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1591 return false;
1592
1593 /* TODO: Handle remaining operands. Until then, add maximum offset. */
1594 pref->ref = ptr;
1595 pref->add_max_offset ();
1596 return true;
1597 }
1598
1599 if (code == INTEGER_CST)
1600 {
1601 /* Pointer constants other than null are most likely the result
1602 of erroneous null pointer addition/subtraction. Set size to
1603 zero. For null pointers, set size to the maximum for now
1604 since those may be the result of jump threading. */
1605 if (integer_zerop (ptr))
1606 pref->set_max_size_range ();
1607 else
1608 pref->sizrng[0] = pref->sizrng[1] = 0;
1609 pref->ref = ptr;
1610
1611 return true;
1612 }
1613
1614 if (code == STRING_CST)
1615 {
1616 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
1617 pref->ref = ptr;
1618 return true;
1619 }
1620
1621 if (code == POINTER_PLUS_EXPR)
1622 {
1623 tree ref = TREE_OPERAND (ptr, 0);
1624 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1625 return false;
1626
1627 /* Clear DEREF since the offset is being applied to the target
1628 of the dereference. */
1629 pref->deref = 0;
1630
1631 offset_int orng[2];
1632 tree off = pref->eval (TREE_OPERAND (ptr, 1));
1633 if (get_offset_range (off, NULL, orng, rvals))
1634 pref->add_offset (orng[0], orng[1]);
1635 else
1636 pref->add_max_offset ();
1637 return true;
1638 }
1639
1640 if (code == VIEW_CONVERT_EXPR)
1641 {
1642 ptr = TREE_OPERAND (ptr, 0);
1643 return compute_objsize_r (ptr, ostype, pref, snlim, qry);
1644 }
1645
1646 if (code == SSA_NAME)
1647 {
1648 if (!snlim.next ())
1649 return false;
1650
1651 /* Only process an SSA_NAME if the recursion limit has not yet
1652 been reached. */
1653 if (qry)
1654 {
1655 if (++qry->depth)
1656 qry->max_depth = qry->depth;
1657 if (const access_ref *cache_ref = qry->get_ref (ptr))
1658 {
1659 /* If the pointer is in the cache set *PREF to what it refers
1660 to and return success. */
1661 *pref = *cache_ref;
1662 return true;
1663 }
1664 }
1665
1666 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
1667 if (is_gimple_call (stmt))
1668 {
1669 /* If STMT is a call to an allocation function get the size
1670 from its argument(s). If successful, also set *PREF->REF
1671 to PTR for the caller to include in diagnostics. */
1672 wide_int wr[2];
1673 if (gimple_call_alloc_size (stmt, wr, rvals))
1674 {
1675 pref->ref = ptr;
1676 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
1677 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
1678 /* Constrain both bounds to a valid size. */
1679 offset_int maxsize = wi::to_offset (max_object_size ());
1680 if (pref->sizrng[0] > maxsize)
1681 pref->sizrng[0] = maxsize;
1682 if (pref->sizrng[1] > maxsize)
1683 pref->sizrng[1] = maxsize;
1684 }
1685 else
1686 {
1687 /* For functions known to return one of their pointer arguments
1688 try to determine what the returned pointer points to, and on
1689 success add OFFRNG which was set to the offset added by
1690 the function (e.g., memchr) to the overall offset. */
1691 bool past_end;
1692 offset_int offrng[2];
1693 if (tree ret = gimple_call_return_array (stmt, offrng,
1694 &past_end, rvals))
1695 {
1696 if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
1697 return false;
1698
1699 /* Cap OFFRNG[1] to at most the remaining size of
1700 the object. */
1701 offset_int remrng[2];
1702 remrng[1] = pref->size_remaining (remrng);
1703 if (remrng[1] != 0 && !past_end)
1704 /* Decrement the size for functions that never return
1705 a past-the-end pointer. */
1706 remrng[1] -= 1;
1707
1708 if (remrng[1] < offrng[1])
1709 offrng[1] = remrng[1];
1710 pref->add_offset (offrng[0], offrng[1]);
1711 }
1712 else
1713 {
1714 /* For other calls that might return arbitrary pointers
1715 including into the middle of objects set the size
1716 range to maximum, clear PREF->BASE0, and also set
1717 PREF->REF to include in diagnostics. */
1718 pref->set_max_size_range ();
1719 pref->base0 = false;
1720 pref->ref = ptr;
1721 }
1722 }
1723 qry->put_ref (ptr, *pref);
1724 return true;
1725 }
1726
1727 if (gimple_nop_p (stmt))
1728 {
1729 /* For a function argument try to determine the byte size
1730 of the array from the current function declaratation
1731 (e.g., attribute access or related). */
1732 wide_int wr[2];
1733 bool static_array = false;
1734 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
1735 {
1736 pref->parmarray = !static_array;
1737 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
1738 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
1739 pref->ref = ref;
1740 qry->put_ref (ptr, *pref);
1741 return true;
1742 }
1743
1744 pref->set_max_size_range ();
1745 pref->base0 = false;
1746 pref->ref = ptr;
1747 qry->put_ref (ptr, *pref);
1748 return true;
1749 }
1750
1751 if (gimple_code (stmt) == GIMPLE_PHI)
1752 {
1753 pref->ref = ptr;
1754 access_ref phi_ref = *pref;
1755 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
1756 return false;
1757 *pref = phi_ref;
1758 pref->ref = ptr;
1759 qry->put_ref (ptr, *pref);
1760 return true;
1761 }
1762
1763 if (!is_gimple_assign (stmt))
1764 {
1765 /* Clear BASE0 since the assigned pointer might point into
1766 the middle of the object, set the maximum size range and,
1767 if the SSA_NAME refers to a function argumnent, set
1768 PREF->REF to it. */
1769 pref->base0 = false;
1770 pref->set_max_size_range ();
1771 pref->ref = ptr;
1772 return true;
1773 }
1774
1775 tree_code code = gimple_assign_rhs_code (stmt);
1776
1777 if (code == MAX_EXPR || code == MIN_EXPR)
1778 {
1779 if (!handle_min_max_size (stmt, ostype, pref, snlim, qry))
1780 return false;
1781 qry->put_ref (ptr, *pref);
1782 return true;
1783 }
1784
1785 tree rhs = gimple_assign_rhs1 (stmt);
1786
1787 if (code == ASSERT_EXPR)
1788 {
1789 rhs = TREE_OPERAND (rhs, 0);
1790 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
1791 }
1792
1793 if (code == POINTER_PLUS_EXPR
1794 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
1795 {
1796 /* Compute the size of the object first. */
1797 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
1798 return false;
1799
1800 offset_int orng[2];
1801 tree off = gimple_assign_rhs2 (stmt);
1802 if (get_offset_range (off, stmt, orng, rvals))
1803 pref->add_offset (orng[0], orng[1]);
1804 else
1805 pref->add_max_offset ();
1806 qry->put_ref (ptr, *pref);
1807 return true;
1808 }
1809
1810 if (code == ADDR_EXPR
1811 || code == SSA_NAME)
1812 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
1813
1814 /* (This could also be an assignment from a nonlocal pointer.) Save
1815 PTR to mention in diagnostics but otherwise treat it as a pointer
1816 to an unknown object. */
1817 pref->ref = rhs;
1818 pref->base0 = false;
1819 pref->set_max_size_range ();
1820 return true;
1821 }
1822
1823 /* Assume all other expressions point into an unknown object
1824 of the maximum valid size. */
1825 pref->ref = ptr;
1826 pref->base0 = false;
1827 pref->set_max_size_range ();
1828 if (TREE_CODE (ptr) == SSA_NAME)
1829 qry->put_ref (ptr, *pref);
1830 return true;
1831}
1832
1833/* A "public" wrapper around the above. Clients should use this overload
1834 instead. */
1835
1836tree
1837compute_objsize (tree ptr, int ostype, access_ref *pref,
1838 range_query *rvals /* = NULL */)
1839{
1840 pointer_query qry;
1841 qry.rvals = rvals;
1842 ssa_name_limit_t snlim;
1843 if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
1844 return NULL_TREE;
1845
1846 offset_int maxsize = pref->size_remaining ();
1847 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
1848 pref->offrng[0] = 0;
1849 return wide_int_to_tree (sizetype, maxsize);
1850}
1851
1852/* Transitional wrapper. The function should be removed once callers
1853 transition to the pointer_query API. */
1854
1855tree
1856compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
1857{
1858 pointer_query qry;
1859 if (ptr_qry)
1860 ptr_qry->depth = 0;
1861 else
1862 ptr_qry = &qry;
1863
1864 ssa_name_limit_t snlim;
1865 if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
1866 return NULL_TREE;
1867
1868 offset_int maxsize = pref->size_remaining ();
1869 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
1870 pref->offrng[0] = 0;
1871 return wide_int_to_tree (sizetype, maxsize);
1872}
1873
1874/* Legacy wrapper around the above. The function should be removed
1875 once callers transition to one of the two above. */
1876
1877tree
1878compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
1879 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
1880{
1881 /* Set the initial offsets to zero and size to negative to indicate
1882 none has been computed yet. */
1883 access_ref ref;
1884 tree size = compute_objsize (ptr, ostype, &ref, rvals);
1885 if (!size || !ref.base0)
1886 return NULL_TREE;
1887
1888 if (pdecl)
1889 *pdecl = ref.ref;
1890
1891 if (poff)
1892 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
1893
1894 return size;
1895}