]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/pointer-query.cc
Correct a function pre/postcondition [PR102403].
[thirdparty/gcc.git] / gcc / pointer-query.cc
1 /* Definitions of the pointer_query and related classes.
2
3 Copyright (C) 2020-2021 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "stringpool.h"
28 #include "tree-vrp.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "tree-ssa-strlen.h"
33 #include "langhooks.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "gimple-fold.h"
37 #include "gimple-ssa.h"
38 #include "intl.h"
39 #include "attr-fnspec.h"
40 #include "gimple-range.h"
41 #include "pointer-query.h"
42 #include "tree-pretty-print.h"
43 #include "tree-ssanames.h"
44
45 static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
46 pointer_query *);
47
48 /* Wrapper around the wide_int overload of get_range that accepts
49 offset_int instead. For middle end expressions returns the same
50 result. For a subset of nonconstamt expressions emitted by the front
51 end determines a more precise range than would be possible otherwise. */
52
53 static bool
54 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
55 {
56 offset_int add = 0;
57 if (TREE_CODE (x) == PLUS_EXPR)
58 {
59 /* Handle constant offsets in pointer addition expressions seen
60 n the front end IL. */
61 tree op = TREE_OPERAND (x, 1);
62 if (TREE_CODE (op) == INTEGER_CST)
63 {
64 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
65 add = wi::to_offset (op);
66 x = TREE_OPERAND (x, 0);
67 }
68 }
69
70 if (TREE_CODE (x) == NOP_EXPR)
71 /* Also handle conversions to sizetype seen in the front end IL. */
72 x = TREE_OPERAND (x, 0);
73
74 tree type = TREE_TYPE (x);
75 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
76 return false;
77
78 if (TREE_CODE (x) != INTEGER_CST
79 && TREE_CODE (x) != SSA_NAME)
80 {
81 if (TYPE_UNSIGNED (type)
82 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
83 type = signed_type_for (type);
84
85 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
86 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
87 return x;
88 }
89
90 wide_int wr[2];
91 if (!get_range (x, stmt, wr, rvals))
92 return false;
93
94 signop sgn = SIGNED;
95 /* Only convert signed integers or unsigned sizetype to a signed
96 offset and avoid converting large positive values in narrower
97 types to negative offsets. */
98 if (TYPE_UNSIGNED (type)
99 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
100 sgn = UNSIGNED;
101
102 r[0] = offset_int::from (wr[0], sgn);
103 r[1] = offset_int::from (wr[1], sgn);
104 return true;
105 }
106
107 /* Return the argument that the call STMT to a built-in function returns
108 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
109 from the argument reflected in the value returned by the built-in if it
110 can be determined, otherwise to 0 and HWI_M1U respectively. Set
111 *PAST_END for functions like mempcpy that might return a past the end
112 pointer (most functions return a dereferenceable pointer to an existing
113 element of an array). */
114
115 static tree
116 gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end,
117 range_query *rvals)
118 {
119 /* Clear and set below for the rare function(s) that might return
120 a past-the-end pointer. */
121 *past_end = false;
122
123 {
124 /* Check for attribute fn spec to see if the function returns one
125 of its arguments. */
126 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
127 unsigned int argno;
128 if (fnspec.returns_arg (&argno))
129 {
130 /* Functions return the first argument (not a range). */
131 offrng[0] = offrng[1] = 0;
132 return gimple_call_arg (stmt, argno);
133 }
134 }
135
136 if (gimple_call_num_args (stmt) < 1)
137 return NULL_TREE;
138
139 tree fn = gimple_call_fndecl (stmt);
140 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
141 {
142 /* See if this is a call to placement new. */
143 if (!fn
144 || !DECL_IS_OPERATOR_NEW_P (fn)
145 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
146 return NULL_TREE;
147
148 /* Check the mangling, keeping in mind that operator new takes
149 a size_t which could be unsigned int or unsigned long. */
150 tree fname = DECL_ASSEMBLER_NAME (fn);
151 if (!id_equal (fname, "_ZnwjPv") // ordinary form
152 && !id_equal (fname, "_ZnwmPv") // ordinary form
153 && !id_equal (fname, "_ZnajPv") // array form
154 && !id_equal (fname, "_ZnamPv")) // array form
155 return NULL_TREE;
156
157 if (gimple_call_num_args (stmt) != 2)
158 return NULL_TREE;
159
160 /* Allocation functions return a pointer to the beginning. */
161 offrng[0] = offrng[1] = 0;
162 return gimple_call_arg (stmt, 1);
163 }
164
165 switch (DECL_FUNCTION_CODE (fn))
166 {
167 case BUILT_IN_MEMCPY:
168 case BUILT_IN_MEMCPY_CHK:
169 case BUILT_IN_MEMMOVE:
170 case BUILT_IN_MEMMOVE_CHK:
171 case BUILT_IN_MEMSET:
172 case BUILT_IN_STRCAT:
173 case BUILT_IN_STRCAT_CHK:
174 case BUILT_IN_STRCPY:
175 case BUILT_IN_STRCPY_CHK:
176 case BUILT_IN_STRNCAT:
177 case BUILT_IN_STRNCAT_CHK:
178 case BUILT_IN_STRNCPY:
179 case BUILT_IN_STRNCPY_CHK:
180 /* Functions return the first argument (not a range). */
181 offrng[0] = offrng[1] = 0;
182 return gimple_call_arg (stmt, 0);
183
184 case BUILT_IN_MEMPCPY:
185 case BUILT_IN_MEMPCPY_CHK:
186 {
187 /* The returned pointer is in a range constrained by the smaller
188 of the upper bound of the size argument and the source object
189 size. */
190 offrng[0] = 0;
191 offrng[1] = HOST_WIDE_INT_M1U;
192 tree off = gimple_call_arg (stmt, 2);
193 bool off_valid = get_offset_range (off, stmt, offrng, rvals);
194 if (!off_valid || offrng[0] != offrng[1])
195 {
196 /* If the offset is either indeterminate or in some range,
197 try to constrain its upper bound to at most the size
198 of the source object. */
199 access_ref aref;
200 tree src = gimple_call_arg (stmt, 1);
201 if (compute_objsize (src, 1, &aref, rvals)
202 && aref.sizrng[1] < offrng[1])
203 offrng[1] = aref.sizrng[1];
204 }
205
206 /* Mempcpy may return a past-the-end pointer. */
207 *past_end = true;
208 return gimple_call_arg (stmt, 0);
209 }
210
211 case BUILT_IN_MEMCHR:
212 {
213 tree off = gimple_call_arg (stmt, 2);
214 if (get_offset_range (off, stmt, offrng, rvals))
215 offrng[1] -= 1;
216 else
217 offrng[1] = HOST_WIDE_INT_M1U;
218
219 offrng[0] = 0;
220 return gimple_call_arg (stmt, 0);
221 }
222
223 case BUILT_IN_STRCHR:
224 case BUILT_IN_STRRCHR:
225 case BUILT_IN_STRSTR:
226 offrng[0] = 0;
227 offrng[1] = HOST_WIDE_INT_M1U;
228 return gimple_call_arg (stmt, 0);
229
230 case BUILT_IN_STPCPY:
231 case BUILT_IN_STPCPY_CHK:
232 {
233 access_ref aref;
234 tree src = gimple_call_arg (stmt, 1);
235 if (compute_objsize (src, 1, &aref, rvals))
236 offrng[1] = aref.sizrng[1] - 1;
237 else
238 offrng[1] = HOST_WIDE_INT_M1U;
239
240 offrng[0] = 0;
241 return gimple_call_arg (stmt, 0);
242 }
243
244 case BUILT_IN_STPNCPY:
245 case BUILT_IN_STPNCPY_CHK:
246 {
247 /* The returned pointer is in a range between the first argument
248 and it plus the smaller of the upper bound of the size argument
249 and the source object size. */
250 offrng[1] = HOST_WIDE_INT_M1U;
251 tree off = gimple_call_arg (stmt, 2);
252 if (!get_offset_range (off, stmt, offrng, rvals)
253 || offrng[0] != offrng[1])
254 {
255 /* If the offset is either indeterminate or in some range,
256 try to constrain its upper bound to at most the size
257 of the source object. */
258 access_ref aref;
259 tree src = gimple_call_arg (stmt, 1);
260 if (compute_objsize (src, 1, &aref, rvals)
261 && aref.sizrng[1] < offrng[1])
262 offrng[1] = aref.sizrng[1];
263 }
264
265 /* When the source is the empty string the returned pointer is
266 a copy of the argument. Otherwise stpcpy can also return
267 a past-the-end pointer. */
268 offrng[0] = 0;
269 *past_end = true;
270 return gimple_call_arg (stmt, 0);
271 }
272
273 default:
274 break;
275 }
276
277 return NULL_TREE;
278 }
279
280 /* Return true when EXP's range can be determined and set RANGE[] to it
281 after adjusting it if necessary to make EXP a represents a valid size
282 of object, or a valid size argument to an allocation function declared
283 with attribute alloc_size (whose argument may be signed), or to a string
284 manipulation function like memset.
285 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
286 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
287 a (nearly) invalid argument to allocation functions like malloc but it
288 is a valid argument to functions like memset.
289 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
290 in a multi-range, otherwise to the smallest valid subrange. */
291
292 bool
293 get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
294 int flags /* = 0 */)
295 {
296 if (!exp)
297 return false;
298
299 if (tree_fits_uhwi_p (exp))
300 {
301 /* EXP is a constant. */
302 range[0] = range[1] = exp;
303 return true;
304 }
305
306 tree exptype = TREE_TYPE (exp);
307 bool integral = INTEGRAL_TYPE_P (exptype);
308
309 wide_int min, max;
310 enum value_range_kind range_type;
311
312 if (!query)
313 query = get_range_query (cfun);
314
315 if (integral)
316 {
317 value_range vr;
318
319 query->range_of_expr (vr, exp, stmt);
320
321 if (vr.undefined_p ())
322 vr.set_varying (TREE_TYPE (exp));
323 range_type = vr.kind ();
324 min = wi::to_wide (vr.min ());
325 max = wi::to_wide (vr.max ());
326 }
327 else
328 range_type = VR_VARYING;
329
330 if (range_type == VR_VARYING)
331 {
332 if (integral)
333 {
334 /* Use the full range of the type of the expression when
335 no value range information is available. */
336 range[0] = TYPE_MIN_VALUE (exptype);
337 range[1] = TYPE_MAX_VALUE (exptype);
338 return true;
339 }
340
341 range[0] = NULL_TREE;
342 range[1] = NULL_TREE;
343 return false;
344 }
345
346 unsigned expprec = TYPE_PRECISION (exptype);
347
348 bool signed_p = !TYPE_UNSIGNED (exptype);
349
350 if (range_type == VR_ANTI_RANGE)
351 {
352 if (signed_p)
353 {
354 if (wi::les_p (max, 0))
355 {
356 /* EXP is not in a strictly negative range. That means
357 it must be in some (not necessarily strictly) positive
358 range which includes zero. Since in signed to unsigned
359 conversions negative values end up converted to large
360 positive values, and otherwise they are not valid sizes,
361 the resulting range is in both cases [0, TYPE_MAX]. */
362 min = wi::zero (expprec);
363 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
364 }
365 else if (wi::les_p (min - 1, 0))
366 {
367 /* EXP is not in a negative-positive range. That means EXP
368 is either negative, or greater than max. Since negative
369 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
370 min = max + 1;
371 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
372 }
373 else
374 {
375 max = min - 1;
376 min = wi::zero (expprec);
377 }
378 }
379 else
380 {
381 wide_int maxsize = wi::to_wide (max_object_size ());
382 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
383 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
384 if (wi::eq_p (0, min - 1))
385 {
386 /* EXP is unsigned and not in the range [1, MAX]. That means
387 it's either zero or greater than MAX. Even though 0 would
388 normally be detected by -Walloc-zero, unless ALLOW_ZERO
389 is set, set the range to [MAX, TYPE_MAX] so that when MAX
390 is greater than the limit the whole range is diagnosed. */
391 wide_int maxsize = wi::to_wide (max_object_size ());
392 if (flags & SR_ALLOW_ZERO)
393 {
394 if (wi::leu_p (maxsize, max + 1)
395 || !(flags & SR_USE_LARGEST))
396 min = max = wi::zero (expprec);
397 else
398 {
399 min = max + 1;
400 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
401 }
402 }
403 else
404 {
405 min = max + 1;
406 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
407 }
408 }
409 else if ((flags & SR_USE_LARGEST)
410 && wi::ltu_p (max + 1, maxsize))
411 {
412 /* When USE_LARGEST is set and the larger of the two subranges
413 is a valid size, use it... */
414 min = max + 1;
415 max = maxsize;
416 }
417 else
418 {
419 /* ...otherwise use the smaller subrange. */
420 max = min - 1;
421 min = wi::zero (expprec);
422 }
423 }
424 }
425
426 range[0] = wide_int_to_tree (exptype, min);
427 range[1] = wide_int_to_tree (exptype, max);
428
429 return true;
430 }
431
432 bool
433 get_size_range (tree exp, tree range[2], int flags /* = 0 */)
434 {
435 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
436 }
437
438 /* If STMT is a call to an allocation function, returns the constant
439 maximum size of the object allocated by the call represented as
440 sizetype. If nonnull, sets RNG1[] to the range of the size.
441 When nonnull, uses RVALS for range information, otherwise gets global
442 range info.
443 Returns null when STMT is not a call to a valid allocation function. */
444
445 tree
446 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
447 range_query * /* = NULL */)
448 {
449 if (!stmt || !is_gimple_call (stmt))
450 return NULL_TREE;
451
452 tree allocfntype;
453 if (tree fndecl = gimple_call_fndecl (stmt))
454 allocfntype = TREE_TYPE (fndecl);
455 else
456 allocfntype = gimple_call_fntype (stmt);
457
458 if (!allocfntype)
459 return NULL_TREE;
460
461 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
462 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
463 if (!at)
464 {
465 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
466 return NULL_TREE;
467
468 argidx1 = 0;
469 }
470
471 unsigned nargs = gimple_call_num_args (stmt);
472
473 if (argidx1 == UINT_MAX)
474 {
475 tree atval = TREE_VALUE (at);
476 if (!atval)
477 return NULL_TREE;
478
479 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
480 if (nargs <= argidx1)
481 return NULL_TREE;
482
483 atval = TREE_CHAIN (atval);
484 if (atval)
485 {
486 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
487 if (nargs <= argidx2)
488 return NULL_TREE;
489 }
490 }
491
492 tree size = gimple_call_arg (stmt, argidx1);
493
494 wide_int rng1_buf[2];
495 /* If RNG1 is not set, use the buffer. */
496 if (!rng1)
497 rng1 = rng1_buf;
498
499 /* Use maximum precision to avoid overflow below. */
500 const int prec = ADDR_MAX_PRECISION;
501
502 {
503 tree r[2];
504 /* Determine the largest valid range size, including zero. */
505 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
506 return NULL_TREE;
507 rng1[0] = wi::to_wide (r[0], prec);
508 rng1[1] = wi::to_wide (r[1], prec);
509 }
510
511 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
512 return fold_convert (sizetype, size);
513
514 /* To handle ranges do the math in wide_int and return the product
515 of the upper bounds as a constant. Ignore anti-ranges. */
516 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
517 wide_int rng2[2];
518 {
519 tree r[2];
520 /* As above, use the full non-negative range on failure. */
521 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
522 return NULL_TREE;
523 rng2[0] = wi::to_wide (r[0], prec);
524 rng2[1] = wi::to_wide (r[1], prec);
525 }
526
527 /* Compute products of both bounds for the caller but return the lesser
528 of SIZE_MAX and the product of the upper bounds as a constant. */
529 rng1[0] = rng1[0] * rng2[0];
530 rng1[1] = rng1[1] * rng2[1];
531
532 const tree size_max = TYPE_MAX_VALUE (sizetype);
533 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
534 {
535 rng1[1] = wi::to_wide (size_max, prec);
536 return size_max;
537 }
538
539 return wide_int_to_tree (sizetype, rng1[1]);
540 }
541
542 /* For an access to an object referenced to by the function parameter PTR
543 of pointer type, and set RNG[] to the range of sizes of the object
544 obtainedfrom the attribute access specification for the current function.
545 Set STATIC_ARRAY if the array parameter has been declared [static].
546 Return the function parameter on success and null otherwise. */
547
548 tree
549 gimple_parm_array_size (tree ptr, wide_int rng[2],
550 bool *static_array /* = NULL */)
551 {
552 /* For a function argument try to determine the byte size of the array
553 from the current function declaratation (e.g., attribute access or
554 related). */
555 tree var = SSA_NAME_VAR (ptr);
556 if (TREE_CODE (var) != PARM_DECL)
557 return NULL_TREE;
558
559 const unsigned prec = TYPE_PRECISION (sizetype);
560
561 rdwr_map rdwr_idx;
562 attr_access *access = get_parm_access (rdwr_idx, var);
563 if (!access)
564 return NULL_TREE;
565
566 if (access->sizarg != UINT_MAX)
567 {
568 /* TODO: Try to extract the range from the argument based on
569 those of subsequent assertions or based on known calls to
570 the current function. */
571 return NULL_TREE;
572 }
573
574 if (!access->minsize)
575 return NULL_TREE;
576
577 /* Only consider ordinary array bound at level 2 (or above if it's
578 ever added). */
579 if (warn_array_parameter < 2 && !access->static_p)
580 return NULL_TREE;
581
582 if (static_array)
583 *static_array = access->static_p;
584
585 rng[0] = wi::zero (prec);
586 rng[1] = wi::uhwi (access->minsize, prec);
587 /* Multiply the array bound encoded in the attribute by the size
588 of what the pointer argument to which it decays points to. */
589 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
590 tree size = TYPE_SIZE_UNIT (eltype);
591 if (!size || TREE_CODE (size) != INTEGER_CST)
592 return NULL_TREE;
593
594 rng[1] *= wi::to_wide (size, prec);
595 return var;
596 }
597
598 access_ref::access_ref (tree bound /* = NULL_TREE */,
599 bool minaccess /* = false */)
600 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
601 base0 (true), parmarray ()
602 {
603 /* Set to valid. */
604 offrng[0] = offrng[1] = 0;
605 offmax[0] = offmax[1] = 0;
606 /* Invalidate. */
607 sizrng[0] = sizrng[1] = -1;
608
609 /* Set the default bounds of the access and adjust below. */
610 bndrng[0] = minaccess ? 1 : 0;
611 bndrng[1] = HOST_WIDE_INT_M1U;
612
613 /* When BOUND is nonnull and a range can be extracted from it,
614 set the bounds of the access to reflect both it and MINACCESS.
615 BNDRNG[0] is the size of the minimum access. */
616 tree rng[2];
617 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
618 {
619 bndrng[0] = wi::to_offset (rng[0]);
620 bndrng[1] = wi::to_offset (rng[1]);
621 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
622 }
623 }
624
625 /* Return the PHI node REF refers to or null if it doesn't. */
626
627 gphi *
628 access_ref::phi () const
629 {
630 if (!ref || TREE_CODE (ref) != SSA_NAME)
631 return NULL;
632
633 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
634 if (!def_stmt || gimple_code (def_stmt) != GIMPLE_PHI)
635 return NULL;
636
637 return as_a <gphi *> (def_stmt);
638 }
639
640 /* Determine and return the largest object to which *THIS refers. If
641 *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
642 of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
643 argument ARG. */
644
645 tree
646 access_ref::get_ref (vec<access_ref> *all_refs,
647 access_ref *pref /* = NULL */,
648 int ostype /* = 1 */,
649 ssa_name_limit_t *psnlim /* = NULL */,
650 pointer_query *qry /* = NULL */) const
651 {
652 gphi *phi_stmt = this->phi ();
653 if (!phi_stmt)
654 return ref;
655
656 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
657 cause unbounded recursion. */
658 ssa_name_limit_t snlim_buf;
659 if (!psnlim)
660 psnlim = &snlim_buf;
661
662 if (!psnlim->visit_phi (ref))
663 return NULL_TREE;
664
665 pointer_query empty_qry;
666 if (!qry)
667 qry = &empty_qry;
668
669 /* The conservative result of the PHI reflecting the offset and size
670 of the largest PHI argument, regardless of whether or not they all
671 refer to the same object. */
672 access_ref phi_ref;
673 if (pref)
674 {
675 /* The identity of the object has not been determined yet but
676 PREF->REF is set by the caller to the PHI for convenience.
677 The size is negative/invalid and the offset is zero (it's
678 updated only after the identity of the object has been
679 established). */
680 gcc_assert (pref->sizrng[0] < 0);
681 gcc_assert (pref->offrng[0] == 0 && pref->offrng[1] == 0);
682
683 phi_ref = *pref;
684 }
685
686 /* Set if any argument is a function array (or VLA) parameter not
687 declared [static]. */
688 bool parmarray = false;
689 /* The size of the smallest object referenced by the PHI arguments. */
690 offset_int minsize = 0;
691 const offset_int maxobjsize = wi::to_offset (max_object_size ());
692
693 const unsigned nargs = gimple_phi_num_args (phi_stmt);
694 for (unsigned i = 0; i < nargs; ++i)
695 {
696 access_ref phi_arg_ref;
697 tree arg = gimple_phi_arg_def (phi_stmt, i);
698 if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
699 || phi_arg_ref.sizrng[0] < 0)
700 /* A PHI with all null pointer arguments. */
701 return NULL_TREE;
702
703 if (TREE_CODE (arg) == SSA_NAME)
704 qry->put_ref (arg, phi_arg_ref);
705
706 if (all_refs)
707 all_refs->safe_push (phi_arg_ref);
708
709 parmarray |= phi_arg_ref.parmarray;
710
711 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
712
713 if (phi_ref.sizrng[0] < 0)
714 {
715 /* If PHI_REF doesn't contain a meaningful result yet set it
716 to the result for the first argument. */
717 if (!nullp)
718 phi_ref = phi_arg_ref;
719
720 /* Set if the current argument refers to one or more objects of
721 known size (or range of sizes), as opposed to referring to
722 one or more unknown object(s). */
723 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
724 || phi_arg_ref.sizrng[1] != maxobjsize);
725 if (arg_known_size)
726 minsize = phi_arg_ref.sizrng[0];
727
728 continue;
729 }
730
731 const bool phi_known_size = (phi_ref.sizrng[0] != 0
732 || phi_ref.sizrng[1] != maxobjsize);
733
734 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
735 minsize = phi_arg_ref.sizrng[0];
736
737 /* Disregard null pointers in PHIs with two or more arguments.
738 TODO: Handle this better! */
739 if (nullp)
740 continue;
741
742 /* Determine the amount of remaining space in the argument. */
743 offset_int argrem[2];
744 argrem[1] = phi_arg_ref.size_remaining (argrem);
745
746 /* Determine the amount of remaining space computed so far and
747 if the remaining space in the argument is more use it instead. */
748 offset_int phirem[2];
749 phirem[1] = phi_ref.size_remaining (phirem);
750
751 /* Reset the PHI's BASE0 flag if any of the nonnull arguments
752 refers to an object at an unknown offset. */
753 if (!phi_arg_ref.base0)
754 phi_ref.base0 = false;
755
756 if (phirem[1] < argrem[1]
757 || (phirem[1] == argrem[1]
758 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
759 /* Use the argument with the most space remaining as the result,
760 or the larger one if the space is equal. */
761 phi_ref = phi_arg_ref;
762 }
763
764 /* Replace the lower bound of the largest argument with the size
765 of the smallest argument, and set PARMARRAY if any argument
766 was one. */
767 phi_ref.sizrng[0] = minsize;
768 phi_ref.parmarray = parmarray;
769
770 if (phi_ref.sizrng[0] < 0)
771 {
772 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
773 (perhaps because they have all been already visited by prior
774 recursive calls). */
775 psnlim->leave_phi (ref);
776 return NULL_TREE;
777 }
778
779 /* Avoid changing *THIS. */
780 if (pref && pref != this)
781 *pref = phi_ref;
782
783 psnlim->leave_phi (ref);
784
785 return phi_ref.ref;
786 }
787
788 /* Return the maximum amount of space remaining and if non-null, set
789 argument to the minimum. */
790
791 offset_int
792 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
793 {
794 offset_int minbuf;
795 if (!pmin)
796 pmin = &minbuf;
797
798 if (sizrng[0] < 0)
799 {
800 /* If the identity of the object hasn't been determined return
801 the maximum size range. */
802 *pmin = 0;
803 return wi::to_offset (max_object_size ());
804 }
805
806 /* add_offset() ensures the offset range isn't inverted. */
807 gcc_checking_assert (offrng[0] <= offrng[1]);
808
809 if (base0)
810 {
811 /* The offset into referenced object is zero-based (i.e., it's
812 not referenced by a pointer into middle of some unknown object). */
813 if (offrng[0] < 0 && offrng[1] < 0)
814 {
815 /* If the offset is negative the remaining size is zero. */
816 *pmin = 0;
817 return 0;
818 }
819
820 if (sizrng[1] <= offrng[0])
821 {
822 /* If the starting offset is greater than or equal to the upper
823 bound on the size of the object, the space remaining is zero.
824 As a special case, if it's equal, set *PMIN to -1 to let
825 the caller know the offset is valid and just past the end. */
826 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
827 return 0;
828 }
829
830 /* Otherwise return the size minus the lower bound of the offset. */
831 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
832
833 *pmin = sizrng[0] - or0;
834 return sizrng[1] - or0;
835 }
836
837 /* The offset to the referenced object isn't zero-based (i.e., it may
838 refer to a byte other than the first. The size of such an object
839 is constrained only by the size of the address space (the result
840 of max_object_size()). */
841 if (sizrng[1] <= offrng[0])
842 {
843 *pmin = 0;
844 return 0;
845 }
846
847 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
848
849 *pmin = sizrng[0] - or0;
850 return sizrng[1] - or0;
851 }
852
853 /* Return true if the offset and object size are in range for SIZE. */
854
855 bool
856 access_ref::offset_in_range (const offset_int &size) const
857 {
858 if (size_remaining () < size)
859 return false;
860
861 if (base0)
862 return offmax[0] >= 0 && offmax[1] <= sizrng[1];
863
864 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
865 return offmax[0] > -maxoff && offmax[1] < maxoff;
866 }
867
868 /* Add the range [MIN, MAX] to the offset range. For known objects (with
869 zero-based offsets) at least one of whose offset's bounds is in range,
870 constrain the other (or both) to the bounds of the object (i.e., zero
871 and the upper bound of its size). This improves the quality of
872 diagnostics. */
873
874 void access_ref::add_offset (const offset_int &min, const offset_int &max)
875 {
876 if (min <= max)
877 {
878 /* To add an ordinary range just add it to the bounds. */
879 offrng[0] += min;
880 offrng[1] += max;
881 }
882 else if (!base0)
883 {
884 /* To add an inverted range to an offset to an unknown object
885 expand it to the maximum. */
886 add_max_offset ();
887 return;
888 }
889 else
890 {
891 /* To add an inverted range to an offset to an known object set
892 the upper bound to the maximum representable offset value
893 (which may be greater than MAX_OBJECT_SIZE).
894 The lower bound is either the sum of the current offset and
895 MIN when abs(MAX) is greater than the former, or zero otherwise.
896 Zero because then then inverted range includes the negative of
897 the lower bound. */
898 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
899 offrng[1] = maxoff;
900
901 if (max >= 0)
902 {
903 offrng[0] = 0;
904 if (offmax[0] > 0)
905 offmax[0] = 0;
906 return;
907 }
908
909 offset_int absmax = wi::abs (max);
910 if (offrng[0] < absmax)
911 {
912 offrng[0] += min;
913 /* Cap the lower bound at the upper (set to MAXOFF above)
914 to avoid inadvertently recreating an inverted range. */
915 if (offrng[1] < offrng[0])
916 offrng[0] = offrng[1];
917 }
918 else
919 offrng[0] = 0;
920 }
921
922 /* Set the minimum and maximmum computed so far. */
923 if (offrng[1] < 0 && offrng[1] < offmax[0])
924 offmax[0] = offrng[1];
925 if (offrng[0] > 0 && offrng[0] > offmax[1])
926 offmax[1] = offrng[0];
927
928 if (!base0)
929 return;
930
931 /* When referencing a known object check to see if the offset computed
932 so far is in bounds... */
933 offset_int remrng[2];
934 remrng[1] = size_remaining (remrng);
935 if (remrng[1] > 0 || remrng[0] < 0)
936 {
937 /* ...if so, constrain it so that neither bound exceeds the size of
938 the object. Out of bounds offsets are left unchanged, and, for
939 better or worse, become in bounds later. They should be detected
940 and diagnosed at the point they first become invalid by
941 -Warray-bounds. */
942 if (offrng[0] < 0)
943 offrng[0] = 0;
944 if (offrng[1] > sizrng[1])
945 offrng[1] = sizrng[1];
946 }
947 }
948
949 /* Issue one inform message describing each target of an access REF.
950 WRITE is set for a write access and clear for a read access. */
951
952 void
953 access_ref::inform_access (access_mode mode) const
954 {
955 const access_ref &aref = *this;
956 if (!aref.ref)
957 return;
958
959 if (aref.phi ())
960 {
961 /* Set MAXREF to refer to the largest object and fill ALL_REFS
962 with data for all objects referenced by the PHI arguments. */
963 access_ref maxref;
964 auto_vec<access_ref> all_refs;
965 if (!get_ref (&all_refs, &maxref))
966 return;
967
968 /* Except for MAXREF, the rest of the arguments' offsets need not
969 reflect one added to the PHI itself. Determine the latter from
970 MAXREF on which the result is based. */
971 const offset_int orng[] =
972 {
973 offrng[0] - maxref.offrng[0],
974 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
975 };
976
977 /* Add the final PHI's offset to that of each of the arguments
978 and recurse to issue an inform message for it. */
979 for (unsigned i = 0; i != all_refs.length (); ++i)
980 {
981 /* Skip any PHIs; those could lead to infinite recursion. */
982 if (all_refs[i].phi ())
983 continue;
984
985 all_refs[i].add_offset (orng[0], orng[1]);
986 all_refs[i].inform_access (mode);
987 }
988 return;
989 }
990
991 /* Convert offset range and avoid including a zero range since it
992 isn't necessarily meaningful. */
993 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
994 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
995 HOST_WIDE_INT minoff;
996 HOST_WIDE_INT maxoff = diff_max;
997 if (wi::fits_shwi_p (aref.offrng[0]))
998 minoff = aref.offrng[0].to_shwi ();
999 else
1000 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
1001
1002 if (wi::fits_shwi_p (aref.offrng[1]))
1003 maxoff = aref.offrng[1].to_shwi ();
1004
1005 if (maxoff <= diff_min || maxoff >= diff_max)
1006 /* Avoid mentioning an upper bound that's equal to or in excess
1007 of the maximum of ptrdiff_t. */
1008 maxoff = minoff;
1009
1010 /* Convert size range and always include it since all sizes are
1011 meaningful. */
1012 unsigned long long minsize = 0, maxsize = 0;
1013 if (wi::fits_shwi_p (aref.sizrng[0])
1014 && wi::fits_shwi_p (aref.sizrng[1]))
1015 {
1016 minsize = aref.sizrng[0].to_shwi ();
1017 maxsize = aref.sizrng[1].to_shwi ();
1018 }
1019
1020 /* SIZRNG doesn't necessarily have the same range as the allocation
1021 size determined by gimple_call_alloc_size (). */
1022 char sizestr[80];
1023 if (minsize == maxsize)
1024 sprintf (sizestr, "%llu", minsize);
1025 else
1026 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
1027
1028 char offstr[80];
1029 if (minoff == 0
1030 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
1031 offstr[0] = '\0';
1032 else if (minoff == maxoff)
1033 sprintf (offstr, "%lli", (long long) minoff);
1034 else
1035 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
1036
1037 location_t loc = UNKNOWN_LOCATION;
1038
1039 tree ref = this->ref;
1040 tree allocfn = NULL_TREE;
1041 if (TREE_CODE (ref) == SSA_NAME)
1042 {
1043 gimple *stmt = SSA_NAME_DEF_STMT (ref);
1044 if (!stmt)
1045 return;
1046
1047 if (is_gimple_call (stmt))
1048 {
1049 loc = gimple_location (stmt);
1050 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1051 {
1052 /* Strip the SSA_NAME suffix from the variable name and
1053 recreate an identifier with the VLA's original name. */
1054 ref = gimple_call_lhs (stmt);
1055 if (SSA_NAME_IDENTIFIER (ref))
1056 {
1057 ref = SSA_NAME_IDENTIFIER (ref);
1058 const char *id = IDENTIFIER_POINTER (ref);
1059 size_t len = strcspn (id, ".$");
1060 if (!len)
1061 len = strlen (id);
1062 ref = get_identifier_with_length (id, len);
1063 }
1064 }
1065 else
1066 {
1067 /* Except for VLAs, retrieve the allocation function. */
1068 allocfn = gimple_call_fndecl (stmt);
1069 if (!allocfn)
1070 allocfn = gimple_call_fn (stmt);
1071 if (TREE_CODE (allocfn) == SSA_NAME)
1072 {
1073 /* For an ALLOC_CALL via a function pointer make a small
1074 effort to determine the destination of the pointer. */
1075 gimple *def = SSA_NAME_DEF_STMT (allocfn);
1076 if (gimple_assign_single_p (def))
1077 {
1078 tree rhs = gimple_assign_rhs1 (def);
1079 if (DECL_P (rhs))
1080 allocfn = rhs;
1081 else if (TREE_CODE (rhs) == COMPONENT_REF)
1082 allocfn = TREE_OPERAND (rhs, 1);
1083 }
1084 }
1085 }
1086 }
1087 else if (gimple_nop_p (stmt))
1088 /* Handle DECL_PARM below. */
1089 ref = SSA_NAME_VAR (ref);
1090 else if (is_gimple_assign (stmt)
1091 && (gimple_assign_rhs_code (stmt) == MIN_EXPR
1092 || gimple_assign_rhs_code (stmt) == MAX_EXPR))
1093 {
1094 /* MIN or MAX_EXPR here implies a reference to a known object
1095 and either an unknown or distinct one (the latter being
1096 the result of an invalid relational expression). Determine
1097 the identity of the former and point to it in the note.
1098 TODO: Consider merging with PHI handling. */
1099 access_ref arg_ref[2];
1100 tree arg = gimple_assign_rhs1 (stmt);
1101 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[0]);
1102 arg = gimple_assign_rhs2 (stmt);
1103 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[1]);
1104
1105 /* Use the argument that references a known object with more
1106 space remaining. */
1107 const bool idx
1108 = (!arg_ref[0].ref || !arg_ref[0].base0
1109 || (arg_ref[0].base0 && arg_ref[1].base0
1110 && (arg_ref[0].size_remaining ()
1111 < arg_ref[1].size_remaining ())));
1112
1113 arg_ref[idx].offrng[0] = offrng[0];
1114 arg_ref[idx].offrng[1] = offrng[1];
1115 arg_ref[idx].inform_access (mode);
1116 return;
1117 }
1118 }
1119
1120 if (DECL_P (ref))
1121 loc = DECL_SOURCE_LOCATION (ref);
1122 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
1123 loc = EXPR_LOCATION (ref);
1124 else if (TREE_CODE (ref) != IDENTIFIER_NODE
1125 && TREE_CODE (ref) != SSA_NAME)
1126 return;
1127
1128 if (mode == access_read_write || mode == access_write_only)
1129 {
1130 if (allocfn == NULL_TREE)
1131 {
1132 if (*offstr)
1133 inform (loc, "at offset %s into destination object %qE of size %s",
1134 offstr, ref, sizestr);
1135 else
1136 inform (loc, "destination object %qE of size %s", ref, sizestr);
1137 return;
1138 }
1139
1140 if (*offstr)
1141 inform (loc,
1142 "at offset %s into destination object of size %s "
1143 "allocated by %qE", offstr, sizestr, allocfn);
1144 else
1145 inform (loc, "destination object of size %s allocated by %qE",
1146 sizestr, allocfn);
1147 return;
1148 }
1149
1150 if (mode == access_read_only)
1151 {
1152 if (allocfn == NULL_TREE)
1153 {
1154 if (*offstr)
1155 inform (loc, "at offset %s into source object %qE of size %s",
1156 offstr, ref, sizestr);
1157 else
1158 inform (loc, "source object %qE of size %s", ref, sizestr);
1159
1160 return;
1161 }
1162
1163 if (*offstr)
1164 inform (loc,
1165 "at offset %s into source object of size %s allocated by %qE",
1166 offstr, sizestr, allocfn);
1167 else
1168 inform (loc, "source object of size %s allocated by %qE",
1169 sizestr, allocfn);
1170 return;
1171 }
1172
1173 if (allocfn == NULL_TREE)
1174 {
1175 if (*offstr)
1176 inform (loc, "at offset %s into object %qE of size %s",
1177 offstr, ref, sizestr);
1178 else
1179 inform (loc, "object %qE of size %s", ref, sizestr);
1180
1181 return;
1182 }
1183
1184 if (*offstr)
1185 inform (loc,
1186 "at offset %s into object of size %s allocated by %qE",
1187 offstr, sizestr, allocfn);
1188 else
1189 inform (loc, "object of size %s allocated by %qE",
1190 sizestr, allocfn);
1191 }
1192
1193 /* Set a bit for the PHI in VISITED and return true if it wasn't
1194 already set. */
1195
1196 bool
1197 ssa_name_limit_t::visit_phi (tree ssa_name)
1198 {
1199 if (!visited)
1200 visited = BITMAP_ALLOC (NULL);
1201
1202 /* Return false if SSA_NAME has already been visited. */
1203 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
1204 }
1205
1206 /* Clear a bit for the PHI in VISITED. */
1207
1208 void
1209 ssa_name_limit_t::leave_phi (tree ssa_name)
1210 {
1211 /* Return false if SSA_NAME has already been visited. */
1212 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
1213 }
1214
1215 /* Return false if the SSA_NAME chain length counter has reached
1216 the limit, otherwise increment the counter and return true. */
1217
1218 bool
1219 ssa_name_limit_t::next ()
1220 {
1221 /* Return a negative value to let caller avoid recursing beyond
1222 the specified limit. */
1223 if (ssa_def_max == 0)
1224 return false;
1225
1226 --ssa_def_max;
1227 return true;
1228 }
1229
1230 /* If the SSA_NAME has already been "seen" return a positive value.
1231 Otherwise add it to VISITED. If the SSA_NAME limit has been
1232 reached, return a negative value. Otherwise return zero. */
1233
1234 int
1235 ssa_name_limit_t::next_phi (tree ssa_name)
1236 {
1237 {
1238 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
1239 /* Return a positive value if the PHI has already been visited. */
1240 if (gimple_code (def_stmt) == GIMPLE_PHI
1241 && !visit_phi (ssa_name))
1242 return 1;
1243 }
1244
1245 /* Return a negative value to let caller avoid recursing beyond
1246 the specified limit. */
1247 if (ssa_def_max == 0)
1248 return -1;
1249
1250 --ssa_def_max;
1251
1252 return 0;
1253 }
1254
1255 ssa_name_limit_t::~ssa_name_limit_t ()
1256 {
1257 if (visited)
1258 BITMAP_FREE (visited);
1259 }
1260
1261 /* Default ctor. Initialize object with pointers to the range_query
1262 and cache_type instances to use or null. */
1263
1264 pointer_query::pointer_query (range_query *qry /* = NULL */,
1265 cache_type *cache /* = NULL */)
1266 : rvals (qry), var_cache (cache), hits (), misses (),
1267 failures (), depth (), max_depth ()
1268 {
1269 /* No op. */
1270 }
1271
1272 /* Return a pointer to the cached access_ref instance for the SSA_NAME
1273 PTR if it's there or null otherwise. */
1274
1275 const access_ref *
1276 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
1277 {
1278 if (!var_cache)
1279 {
1280 ++misses;
1281 return NULL;
1282 }
1283
1284 unsigned version = SSA_NAME_VERSION (ptr);
1285 unsigned idx = version << 1 | (ostype & 1);
1286 if (var_cache->indices.length () <= idx)
1287 {
1288 ++misses;
1289 return NULL;
1290 }
1291
1292 unsigned cache_idx = var_cache->indices[idx];
1293 if (var_cache->access_refs.length () <= cache_idx)
1294 {
1295 ++misses;
1296 return NULL;
1297 }
1298
1299 access_ref &cache_ref = var_cache->access_refs[cache_idx];
1300 if (cache_ref.ref)
1301 {
1302 ++hits;
1303 return &cache_ref;
1304 }
1305
1306 ++misses;
1307 return NULL;
1308 }
1309
1310 /* Retrieve the access_ref instance for a variable from the cache if it's
1311 there or compute it and insert it into the cache if it's nonnonull. */
1312
1313 bool
1314 pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
1315 {
1316 const unsigned version
1317 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
1318
1319 if (var_cache && version)
1320 {
1321 unsigned idx = version << 1 | (ostype & 1);
1322 if (idx < var_cache->indices.length ())
1323 {
1324 unsigned cache_idx = var_cache->indices[idx] - 1;
1325 if (cache_idx < var_cache->access_refs.length ()
1326 && var_cache->access_refs[cache_idx].ref)
1327 {
1328 ++hits;
1329 *pref = var_cache->access_refs[cache_idx];
1330 return true;
1331 }
1332 }
1333
1334 ++misses;
1335 }
1336
1337 if (!compute_objsize (ptr, ostype, pref, this))
1338 {
1339 ++failures;
1340 return false;
1341 }
1342
1343 return true;
1344 }
1345
1346 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1347 nonnull. */
1348
1349 void
1350 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
1351 {
1352 /* Only add populated/valid entries. */
1353 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
1354 return;
1355
1356 /* Add REF to the two-level cache. */
1357 unsigned version = SSA_NAME_VERSION (ptr);
1358 unsigned idx = version << 1 | (ostype & 1);
1359
1360 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1361 Its value minus one is the index into ACCESS_REFS. Not all
1362 entries are valid. */
1363 if (var_cache->indices.length () <= idx)
1364 var_cache->indices.safe_grow_cleared (idx + 1);
1365
1366 if (!var_cache->indices[idx])
1367 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
1368
1369 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1370 REF member is nonnull. All entries except for the last two
1371 are valid. Once nonnull, the REF value must stay unchanged. */
1372 unsigned cache_idx = var_cache->indices[idx];
1373 if (var_cache->access_refs.length () <= cache_idx)
1374 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
1375
1376 access_ref &cache_ref = var_cache->access_refs[cache_idx];
1377 if (cache_ref.ref)
1378 {
1379 gcc_checking_assert (cache_ref.ref == ref.ref);
1380 return;
1381 }
1382
1383 cache_ref = ref;
1384 }
1385
1386 /* Flush the cache if it's nonnull. */
1387
1388 void
1389 pointer_query::flush_cache ()
1390 {
1391 if (!var_cache)
1392 return;
1393 var_cache->indices.release ();
1394 var_cache->access_refs.release ();
1395 }
1396
1397 /* Dump statistics and, optionally, cache contents to DUMP_FILE. */
1398
1399 void
1400 pointer_query::dump (FILE *dump_file, bool contents /* = false */)
1401 {
1402 unsigned nused = 0, nrefs = 0;
1403 unsigned nidxs = var_cache->indices.length ();
1404 for (unsigned i = 0; i != nidxs; ++i)
1405 {
1406 unsigned ari = var_cache->indices[i];
1407 if (!ari)
1408 continue;
1409
1410 ++nused;
1411
1412 const access_ref &aref = var_cache->access_refs[ari];
1413 if (!aref.ref)
1414 continue;
1415
1416 ++nrefs;
1417 }
1418
1419 fprintf (dump_file, "pointer_query counters:\n"
1420 " index cache size: %u\n"
1421 " index entries: %u\n"
1422 " access cache size: %u\n"
1423 " access entries: %u\n"
1424 " hits: %u\n"
1425 " misses: %u\n"
1426 " failures: %u\n"
1427 " max_depth: %u\n",
1428 nidxs, nused,
1429 var_cache->access_refs.length (), nrefs,
1430 hits, misses, failures, max_depth);
1431
1432 if (!contents || !nidxs)
1433 return;
1434
1435 fputs ("\npointer_query cache contents:\n", dump_file);
1436
1437 for (unsigned i = 0; i != nidxs; ++i)
1438 {
1439 unsigned ari = var_cache->indices[i];
1440 if (!ari)
1441 continue;
1442
1443 const access_ref &aref = var_cache->access_refs[ari];
1444 if (!aref.ref)
1445 continue;
1446
1447 /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1448 shifted left by one and ORed with the Object Size Type in
1449 the lowest bit. Print the two separately. */
1450 unsigned ver = i >> 1;
1451 unsigned ost = i & 1;
1452
1453 fprintf (dump_file, " %u.%u[%u]: ", ver, ost, ari);
1454 if (tree name = ssa_name (ver))
1455 {
1456 print_generic_expr (dump_file, name);
1457 fputs (" = ", dump_file);
1458 }
1459 else
1460 fprintf (dump_file, " _%u = ", ver);
1461
1462 if (gphi *phi = aref.phi ())
1463 {
1464 fputs ("PHI <", dump_file);
1465 unsigned nargs = gimple_phi_num_args (phi);
1466 for (unsigned i = 0; i != nargs; ++i)
1467 {
1468 tree arg = gimple_phi_arg_def (phi, i);
1469 print_generic_expr (dump_file, arg);
1470 if (i + 1 < nargs)
1471 fputs (", ", dump_file);
1472 }
1473 fputc ('>', dump_file);
1474 }
1475 else
1476 print_generic_expr (dump_file, aref.ref);
1477
1478 if (aref.offrng[0] != aref.offrng[1])
1479 fprintf (dump_file, " + [%lli, %lli]",
1480 (long long) aref.offrng[0].to_shwi (),
1481 (long long) aref.offrng[1].to_shwi ());
1482 else if (aref.offrng[0] != 0)
1483 fprintf (dump_file, " %c %lli",
1484 aref.offrng[0] < 0 ? '-' : '+',
1485 (long long) aref.offrng[0].to_shwi ());
1486
1487 fputc ('\n', dump_file);
1488 }
1489
1490 fputc ('\n', dump_file);
1491 }
1492
1493 /* A helper of compute_objsize_r() to determine the size from an assignment
1494 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success
1495 set PREF->REF to the operand with more or less space remaining,
1496 respectively, if both refer to the same (sub)object, or to PTR if they
1497 might not, and return true. Otherwise, if the identity of neither
1498 operand can be determined, return false. */
1499
1500 static bool
1501 handle_min_max_size (tree ptr, int ostype, access_ref *pref,
1502 ssa_name_limit_t &snlim, pointer_query *qry)
1503 {
1504 const gimple *stmt = SSA_NAME_DEF_STMT (ptr);
1505 const tree_code code = gimple_assign_rhs_code (stmt);
1506
1507 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1508 Determine the size/offset of each and use the one with more or less
1509 space remaining, respectively. If either fails, use the information
1510 determined from the other instead, adjusted up or down as appropriate
1511 for the expression. */
1512 access_ref aref[2] = { *pref, *pref };
1513 tree arg1 = gimple_assign_rhs1 (stmt);
1514 if (!compute_objsize_r (arg1, ostype, &aref[0], snlim, qry))
1515 {
1516 aref[0].base0 = false;
1517 aref[0].offrng[0] = aref[0].offrng[1] = 0;
1518 aref[0].add_max_offset ();
1519 aref[0].set_max_size_range ();
1520 }
1521
1522 tree arg2 = gimple_assign_rhs2 (stmt);
1523 if (!compute_objsize_r (arg2, ostype, &aref[1], snlim, qry))
1524 {
1525 aref[1].base0 = false;
1526 aref[1].offrng[0] = aref[1].offrng[1] = 0;
1527 aref[1].add_max_offset ();
1528 aref[1].set_max_size_range ();
1529 }
1530
1531 if (!aref[0].ref && !aref[1].ref)
1532 /* Fail if the identity of neither argument could be determined. */
1533 return false;
1534
1535 bool i0 = false;
1536 if (aref[0].ref && aref[0].base0)
1537 {
1538 if (aref[1].ref && aref[1].base0)
1539 {
1540 /* If the object referenced by both arguments has been determined
1541 set *PREF to the one with more or less space remainng, whichever
1542 is appopriate for CODE.
1543 TODO: Indicate when the objects are distinct so it can be
1544 diagnosed. */
1545 i0 = code == MAX_EXPR;
1546 const bool i1 = !i0;
1547
1548 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
1549 *pref = aref[i1];
1550 else
1551 *pref = aref[i0];
1552
1553 if (aref[i0].ref != aref[i1].ref)
1554 /* If the operands don't refer to the same (sub)object set
1555 PREF->REF to the SSA_NAME from which STMT was obtained
1556 so that both can be identified in a diagnostic. */
1557 pref->ref = ptr;
1558
1559 return true;
1560 }
1561
1562 /* If only the object referenced by one of the arguments could be
1563 determined, use it and... */
1564 *pref = aref[0];
1565 i0 = true;
1566 }
1567 else
1568 *pref = aref[1];
1569
1570 const bool i1 = !i0;
1571 /* ...see if the offset obtained from the other pointer can be used
1572 to tighten up the bound on the offset obtained from the first. */
1573 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
1574 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
1575 {
1576 pref->offrng[0] = aref[i0].offrng[0];
1577 pref->offrng[1] = aref[i0].offrng[1];
1578 }
1579
1580 /* Replace PTR->REF with the SSA_NAME to indicate the expression
1581 might not refer to the same (sub)object. */
1582 pref->ref = ptr;
1583 return true;
1584 }
1585
1586 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1587 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1588 on success and false on failure. */
1589
1590 static bool
1591 handle_array_ref (tree aref, bool addr, int ostype, access_ref *pref,
1592 ssa_name_limit_t &snlim, pointer_query *qry)
1593 {
1594 gcc_assert (TREE_CODE (aref) == ARRAY_REF);
1595
1596 ++pref->deref;
1597
1598 tree arefop = TREE_OPERAND (aref, 0);
1599 tree reftype = TREE_TYPE (arefop);
1600 if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
1601 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1602 of known bound. */
1603 return false;
1604
1605 if (!compute_objsize_r (arefop, ostype, pref, snlim, qry))
1606 return false;
1607
1608 offset_int orng[2];
1609 tree off = pref->eval (TREE_OPERAND (aref, 1));
1610 range_query *const rvals = qry ? qry->rvals : NULL;
1611 if (!get_offset_range (off, NULL, orng, rvals))
1612 {
1613 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1614 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1615 orng[0] = -orng[1] - 1;
1616 }
1617
1618 /* Convert the array index range determined above to a byte
1619 offset. */
1620 tree lowbnd = array_ref_low_bound (aref);
1621 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
1622 {
1623 /* Adjust the index by the low bound of the array domain
1624 (normally zero but 1 in Fortran). */
1625 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
1626 orng[0] -= lb;
1627 orng[1] -= lb;
1628 }
1629
1630 tree eltype = TREE_TYPE (aref);
1631 tree tpsize = TYPE_SIZE_UNIT (eltype);
1632 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
1633 {
1634 pref->add_max_offset ();
1635 return true;
1636 }
1637
1638 offset_int sz = wi::to_offset (tpsize);
1639 orng[0] *= sz;
1640 orng[1] *= sz;
1641
1642 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
1643 {
1644 /* Except for the permissive raw memory functions which use
1645 the size of the whole object determined above, use the size
1646 of the referenced array. Because the overall offset is from
1647 the beginning of the complete array object add this overall
1648 offset to the size of array. */
1649 offset_int sizrng[2] =
1650 {
1651 pref->offrng[0] + orng[0] + sz,
1652 pref->offrng[1] + orng[1] + sz
1653 };
1654 if (sizrng[1] < sizrng[0])
1655 std::swap (sizrng[0], sizrng[1]);
1656 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
1657 pref->sizrng[0] = sizrng[0];
1658 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
1659 pref->sizrng[1] = sizrng[1];
1660 }
1661
1662 pref->add_offset (orng[0], orng[1]);
1663 return true;
1664 }
1665
1666 /* A helper of compute_objsize_r() to determine the size from MEM_REF
1667 MREF. Return true on success and false on failure. */
1668
1669 static bool
1670 handle_mem_ref (tree mref, int ostype, access_ref *pref,
1671 ssa_name_limit_t &snlim, pointer_query *qry)
1672 {
1673 gcc_assert (TREE_CODE (mref) == MEM_REF);
1674
1675 ++pref->deref;
1676
1677 if (VECTOR_TYPE_P (TREE_TYPE (mref)))
1678 {
1679 /* Hack: Handle MEM_REFs of vector types as those to complete
1680 objects; those may be synthesized from multiple assignments
1681 to consecutive data members (see PR 93200 and 96963).
1682 FIXME: Vectorized assignments should only be present after
1683 vectorization so this hack is only necessary after it has
1684 run and could be avoided in calls from prior passes (e.g.,
1685 tree-ssa-strlen.c).
1686 FIXME: Deal with this more generally, e.g., by marking up
1687 such MEM_REFs at the time they're created. */
1688 ostype = 0;
1689 }
1690
1691 tree mrefop = TREE_OPERAND (mref, 0);
1692 if (!compute_objsize_r (mrefop, ostype, pref, snlim, qry))
1693 return false;
1694
1695 offset_int orng[2];
1696 tree off = pref->eval (TREE_OPERAND (mref, 1));
1697 range_query *const rvals = qry ? qry->rvals : NULL;
1698 if (!get_offset_range (off, NULL, orng, rvals))
1699 {
1700 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1701 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1702 orng[0] = -orng[1] - 1;
1703 }
1704
1705 pref->add_offset (orng[0], orng[1]);
1706 return true;
1707 }
1708
1709 /* Helper to compute the size of the object referenced by the PTR
1710 expression which must have pointer type, using Object Size type
1711 OSTYPE (only the least significant 2 bits are used).
1712 On success, sets PREF->REF to the DECL of the referenced object
1713 if it's unique, otherwise to null, PREF->OFFRNG to the range of
1714 offsets into it, and PREF->SIZRNG to the range of sizes of
1715 the object(s).
1716 SNLIM is used to avoid visiting the same PHI operand multiple
1717 times, and, when nonnull, RVALS to determine range information.
1718 Returns true on success, false when a meaningful size (or range)
1719 cannot be determined.
1720
1721 The function is intended for diagnostics and should not be used
1722 to influence code generation or optimization. */
1723
1724 static bool
1725 compute_objsize_r (tree ptr, int ostype, access_ref *pref,
1726 ssa_name_limit_t &snlim, pointer_query *qry)
1727 {
1728 STRIP_NOPS (ptr);
1729
1730 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
1731 if (addr)
1732 {
1733 --pref->deref;
1734 ptr = TREE_OPERAND (ptr, 0);
1735 }
1736
1737 if (DECL_P (ptr))
1738 {
1739 pref->ref = ptr;
1740
1741 /* Reset the offset in case it was set by a prior call and not
1742 cleared by the caller. The offset is only adjusted after
1743 the identity of the object has been determined. */
1744 pref->offrng[0] = pref->offrng[1] = 0;
1745
1746 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
1747 {
1748 /* Set the maximum size if the reference is to the pointer
1749 itself (as opposed to what it points to), and clear
1750 BASE0 since the offset isn't necessarily zero-based. */
1751 pref->set_max_size_range ();
1752 pref->base0 = false;
1753 return true;
1754 }
1755
1756 /* Valid offsets into the object are nonnegative. */
1757 pref->base0 = true;
1758
1759 if (tree size = decl_init_size (ptr, false))
1760 if (TREE_CODE (size) == INTEGER_CST)
1761 {
1762 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1763 return true;
1764 }
1765
1766 pref->set_max_size_range ();
1767 return true;
1768 }
1769
1770 const tree_code code = TREE_CODE (ptr);
1771 range_query *const rvals = qry ? qry->rvals : NULL;
1772
1773 if (code == BIT_FIELD_REF)
1774 {
1775 tree ref = TREE_OPERAND (ptr, 0);
1776 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1777 return false;
1778
1779 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
1780 pref->add_offset (off / BITS_PER_UNIT);
1781 return true;
1782 }
1783
1784 if (code == COMPONENT_REF)
1785 {
1786 tree ref = TREE_OPERAND (ptr, 0);
1787 if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
1788 /* In accesses through union types consider the entire unions
1789 rather than just their members. */
1790 ostype = 0;
1791 tree field = TREE_OPERAND (ptr, 1);
1792
1793 if (ostype == 0)
1794 {
1795 /* In OSTYPE zero (for raw memory functions like memcpy), use
1796 the maximum size instead if the identity of the enclosing
1797 object cannot be determined. */
1798 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1799 return false;
1800
1801 /* Otherwise, use the size of the enclosing object and add
1802 the offset of the member to the offset computed so far. */
1803 tree offset = byte_position (field);
1804 if (TREE_CODE (offset) == INTEGER_CST)
1805 pref->add_offset (wi::to_offset (offset));
1806 else
1807 pref->add_max_offset ();
1808
1809 if (!pref->ref)
1810 /* REF may have been already set to an SSA_NAME earlier
1811 to provide better context for diagnostics. In that case,
1812 leave it unchanged. */
1813 pref->ref = ref;
1814 return true;
1815 }
1816
1817 pref->ref = field;
1818
1819 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
1820 {
1821 /* Set maximum size if the reference is to the pointer member
1822 itself (as opposed to what it points to). */
1823 pref->set_max_size_range ();
1824 return true;
1825 }
1826
1827 /* SAM is set for array members that might need special treatment. */
1828 special_array_member sam;
1829 tree size = component_ref_size (ptr, &sam);
1830 if (sam == special_array_member::int_0)
1831 pref->sizrng[0] = pref->sizrng[1] = 0;
1832 else if (!pref->trail1special && sam == special_array_member::trail_1)
1833 pref->sizrng[0] = pref->sizrng[1] = 1;
1834 else if (size && TREE_CODE (size) == INTEGER_CST)
1835 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1836 else
1837 {
1838 /* When the size of the member is unknown it's either a flexible
1839 array member or a trailing special array member (either zero
1840 length or one-element). Set the size to the maximum minus
1841 the constant size of the type. */
1842 pref->sizrng[0] = 0;
1843 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1844 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
1845 if (TREE_CODE (recsize) == INTEGER_CST)
1846 pref->sizrng[1] -= wi::to_offset (recsize);
1847 }
1848 return true;
1849 }
1850
1851 if (code == ARRAY_REF)
1852 return handle_array_ref (ptr, addr, ostype, pref, snlim, qry);
1853
1854 if (code == MEM_REF)
1855 return handle_mem_ref (ptr, ostype, pref, snlim, qry);
1856
1857 if (code == TARGET_MEM_REF)
1858 {
1859 tree ref = TREE_OPERAND (ptr, 0);
1860 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1861 return false;
1862
1863 /* TODO: Handle remaining operands. Until then, add maximum offset. */
1864 pref->ref = ptr;
1865 pref->add_max_offset ();
1866 return true;
1867 }
1868
1869 if (code == INTEGER_CST)
1870 {
1871 /* Pointer constants other than null are most likely the result
1872 of erroneous null pointer addition/subtraction. Set size to
1873 zero. For null pointers, set size to the maximum for now
1874 since those may be the result of jump threading. */
1875 if (integer_zerop (ptr))
1876 pref->set_max_size_range ();
1877 else
1878 pref->sizrng[0] = pref->sizrng[1] = 0;
1879 pref->ref = ptr;
1880
1881 return true;
1882 }
1883
1884 if (code == STRING_CST)
1885 {
1886 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
1887 pref->ref = ptr;
1888 return true;
1889 }
1890
1891 if (code == POINTER_PLUS_EXPR)
1892 {
1893 tree ref = TREE_OPERAND (ptr, 0);
1894 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1895 return false;
1896
1897 /* Clear DEREF since the offset is being applied to the target
1898 of the dereference. */
1899 pref->deref = 0;
1900
1901 offset_int orng[2];
1902 tree off = pref->eval (TREE_OPERAND (ptr, 1));
1903 if (get_offset_range (off, NULL, orng, rvals))
1904 pref->add_offset (orng[0], orng[1]);
1905 else
1906 pref->add_max_offset ();
1907 return true;
1908 }
1909
1910 if (code == VIEW_CONVERT_EXPR)
1911 {
1912 ptr = TREE_OPERAND (ptr, 0);
1913 return compute_objsize_r (ptr, ostype, pref, snlim, qry);
1914 }
1915
1916 if (code == SSA_NAME)
1917 {
1918 if (!snlim.next ())
1919 return false;
1920
1921 /* Only process an SSA_NAME if the recursion limit has not yet
1922 been reached. */
1923 if (qry)
1924 {
1925 if (++qry->depth)
1926 qry->max_depth = qry->depth;
1927 if (const access_ref *cache_ref = qry->get_ref (ptr))
1928 {
1929 /* If the pointer is in the cache set *PREF to what it refers
1930 to and return success.
1931 FIXME: BNDRNG is determined by each access and so it doesn't
1932 belong in access_ref. Until the design is changed, keep it
1933 unchanged here. */
1934 const offset_int bndrng[2] = { pref->bndrng[0], pref->bndrng[1] };
1935 *pref = *cache_ref;
1936 pref->bndrng[0] = bndrng[0];
1937 pref->bndrng[1] = bndrng[1];
1938 return true;
1939 }
1940 }
1941
1942 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
1943 if (is_gimple_call (stmt))
1944 {
1945 /* If STMT is a call to an allocation function get the size
1946 from its argument(s). If successful, also set *PREF->REF
1947 to PTR for the caller to include in diagnostics. */
1948 wide_int wr[2];
1949 if (gimple_call_alloc_size (stmt, wr, rvals))
1950 {
1951 pref->ref = ptr;
1952 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
1953 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
1954 /* Constrain both bounds to a valid size. */
1955 offset_int maxsize = wi::to_offset (max_object_size ());
1956 if (pref->sizrng[0] > maxsize)
1957 pref->sizrng[0] = maxsize;
1958 if (pref->sizrng[1] > maxsize)
1959 pref->sizrng[1] = maxsize;
1960 }
1961 else
1962 {
1963 /* For functions known to return one of their pointer arguments
1964 try to determine what the returned pointer points to, and on
1965 success add OFFRNG which was set to the offset added by
1966 the function (e.g., memchr) to the overall offset. */
1967 bool past_end;
1968 offset_int offrng[2];
1969 if (tree ret = gimple_call_return_array (stmt, offrng,
1970 &past_end, rvals))
1971 {
1972 if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
1973 return false;
1974
1975 /* Cap OFFRNG[1] to at most the remaining size of
1976 the object. */
1977 offset_int remrng[2];
1978 remrng[1] = pref->size_remaining (remrng);
1979 if (remrng[1] != 0 && !past_end)
1980 /* Decrement the size for functions that never return
1981 a past-the-end pointer. */
1982 remrng[1] -= 1;
1983
1984 if (remrng[1] < offrng[1])
1985 offrng[1] = remrng[1];
1986 pref->add_offset (offrng[0], offrng[1]);
1987 }
1988 else
1989 {
1990 /* For other calls that might return arbitrary pointers
1991 including into the middle of objects set the size
1992 range to maximum, clear PREF->BASE0, and also set
1993 PREF->REF to include in diagnostics. */
1994 pref->set_max_size_range ();
1995 pref->base0 = false;
1996 pref->ref = ptr;
1997 }
1998 }
1999 qry->put_ref (ptr, *pref);
2000 return true;
2001 }
2002
2003 if (gimple_nop_p (stmt))
2004 {
2005 /* For a function argument try to determine the byte size
2006 of the array from the current function declaratation
2007 (e.g., attribute access or related). */
2008 wide_int wr[2];
2009 bool static_array = false;
2010 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
2011 {
2012 pref->parmarray = !static_array;
2013 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2014 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2015 pref->ref = ref;
2016 qry->put_ref (ptr, *pref);
2017 return true;
2018 }
2019
2020 pref->set_max_size_range ();
2021 pref->base0 = false;
2022 pref->ref = ptr;
2023 qry->put_ref (ptr, *pref);
2024 return true;
2025 }
2026
2027 if (gimple_code (stmt) == GIMPLE_PHI)
2028 {
2029 pref->ref = ptr;
2030 access_ref phi_ref = *pref;
2031 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
2032 return false;
2033 *pref = phi_ref;
2034 pref->ref = ptr;
2035 qry->put_ref (ptr, *pref);
2036 return true;
2037 }
2038
2039 if (!is_gimple_assign (stmt))
2040 {
2041 /* Clear BASE0 since the assigned pointer might point into
2042 the middle of the object, set the maximum size range and,
2043 if the SSA_NAME refers to a function argumnent, set
2044 PREF->REF to it. */
2045 pref->base0 = false;
2046 pref->set_max_size_range ();
2047 pref->ref = ptr;
2048 return true;
2049 }
2050
2051 tree_code code = gimple_assign_rhs_code (stmt);
2052
2053 if (code == MAX_EXPR || code == MIN_EXPR)
2054 {
2055 if (!handle_min_max_size (ptr, ostype, pref, snlim, qry))
2056 return false;
2057
2058 qry->put_ref (ptr, *pref);
2059 return true;
2060 }
2061
2062 tree rhs = gimple_assign_rhs1 (stmt);
2063
2064 if (code == ASSERT_EXPR)
2065 {
2066 rhs = TREE_OPERAND (rhs, 0);
2067 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
2068 }
2069
2070 if (code == POINTER_PLUS_EXPR
2071 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
2072 {
2073 /* Compute the size of the object first. */
2074 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
2075 return false;
2076
2077 offset_int orng[2];
2078 tree off = gimple_assign_rhs2 (stmt);
2079 if (get_offset_range (off, stmt, orng, rvals))
2080 pref->add_offset (orng[0], orng[1]);
2081 else
2082 pref->add_max_offset ();
2083
2084 qry->put_ref (ptr, *pref);
2085 return true;
2086 }
2087
2088 if (code == ADDR_EXPR || code == SSA_NAME)
2089 {
2090 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
2091 return false;
2092 qry->put_ref (ptr, *pref);
2093 return true;
2094 }
2095
2096 /* (This could also be an assignment from a nonlocal pointer.) Save
2097 PTR to mention in diagnostics but otherwise treat it as a pointer
2098 to an unknown object. */
2099 pref->ref = rhs;
2100 pref->base0 = false;
2101 pref->set_max_size_range ();
2102 return true;
2103 }
2104
2105 /* Assume all other expressions point into an unknown object
2106 of the maximum valid size. */
2107 pref->ref = ptr;
2108 pref->base0 = false;
2109 pref->set_max_size_range ();
2110 if (TREE_CODE (ptr) == SSA_NAME)
2111 qry->put_ref (ptr, *pref);
2112 return true;
2113 }
2114
2115 /* A "public" wrapper around the above. Clients should use this overload
2116 instead. */
2117
2118 tree
2119 compute_objsize (tree ptr, int ostype, access_ref *pref,
2120 range_query *rvals /* = NULL */)
2121 {
2122 pointer_query qry;
2123 qry.rvals = rvals;
2124
2125 /* Clear and invalidate in case *PREF is being reused. */
2126 pref->offrng[0] = pref->offrng[1] = 0;
2127 pref->sizrng[0] = pref->sizrng[1] = -1;
2128
2129 ssa_name_limit_t snlim;
2130 if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
2131 return NULL_TREE;
2132
2133 offset_int maxsize = pref->size_remaining ();
2134 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
2135 pref->offrng[0] = 0;
2136 return wide_int_to_tree (sizetype, maxsize);
2137 }
2138
2139 /* Transitional wrapper. The function should be removed once callers
2140 transition to the pointer_query API. */
2141
2142 tree
2143 compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
2144 {
2145 pointer_query qry;
2146 if (ptr_qry)
2147 ptr_qry->depth = 0;
2148 else
2149 ptr_qry = &qry;
2150
2151 /* Clear and invalidate in case *PREF is being reused. */
2152 pref->offrng[0] = pref->offrng[1] = 0;
2153 pref->sizrng[0] = pref->sizrng[1] = -1;
2154
2155 ssa_name_limit_t snlim;
2156 if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
2157 return NULL_TREE;
2158
2159 offset_int maxsize = pref->size_remaining ();
2160 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
2161 pref->offrng[0] = 0;
2162 return wide_int_to_tree (sizetype, maxsize);
2163 }
2164
2165 /* Legacy wrapper around the above. The function should be removed
2166 once callers transition to one of the two above. */
2167
2168 tree
2169 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
2170 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
2171 {
2172 /* Set the initial offsets to zero and size to negative to indicate
2173 none has been computed yet. */
2174 access_ref ref;
2175 tree size = compute_objsize (ptr, ostype, &ref, rvals);
2176 if (!size || !ref.base0)
2177 return NULL_TREE;
2178
2179 if (pdecl)
2180 *pdecl = ref.ref;
2181
2182 if (poff)
2183 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
2184
2185 return size;
2186 }