]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/pointer-query.cc
Move bndrng from access_ref to access_data.
[thirdparty/gcc.git] / gcc / pointer-query.cc
CommitLineData
2a837de2
MS
1/* Definitions of the pointer_query and related classes.
2
3 Copyright (C) 2020-2021 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
2a837de2 25#include "tree.h"
2a837de2 26#include "gimple.h"
2a837de2
MS
27#include "stringpool.h"
28#include "tree-vrp.h"
2a837de2 29#include "diagnostic-core.h"
2a837de2 30#include "fold-const.h"
2a837de2
MS
31#include "tree-object-size.h"
32#include "tree-ssa-strlen.h"
2a837de2 33#include "langhooks.h"
2a837de2
MS
34#include "stringpool.h"
35#include "attribs.h"
2a837de2 36#include "gimple-fold.h"
ece28da9 37#include "gimple-ssa.h"
2a837de2 38#include "intl.h"
2a837de2
MS
39#include "attr-fnspec.h"
40#include "gimple-range.h"
2a837de2 41#include "pointer-query.h"
ece28da9
MS
42#include "tree-pretty-print.h"
43#include "tree-ssanames.h"
54fa5567 44#include "target.h"
2a837de2 45
9a27acc3
MS
46static bool compute_objsize_r (tree, gimple *, int, access_ref *,
47 ssa_name_limit_t &, pointer_query *);
2a837de2
MS
48
49/* Wrapper around the wide_int overload of get_range that accepts
50 offset_int instead. For middle end expressions returns the same
51 result. For a subset of nonconstamt expressions emitted by the front
52 end determines a more precise range than would be possible otherwise. */
53
54static bool
55get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
56{
57 offset_int add = 0;
58 if (TREE_CODE (x) == PLUS_EXPR)
59 {
60 /* Handle constant offsets in pointer addition expressions seen
61 n the front end IL. */
62 tree op = TREE_OPERAND (x, 1);
63 if (TREE_CODE (op) == INTEGER_CST)
64 {
65 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
66 add = wi::to_offset (op);
67 x = TREE_OPERAND (x, 0);
68 }
69 }
70
71 if (TREE_CODE (x) == NOP_EXPR)
72 /* Also handle conversions to sizetype seen in the front end IL. */
73 x = TREE_OPERAND (x, 0);
74
75 tree type = TREE_TYPE (x);
76 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
77 return false;
78
79 if (TREE_CODE (x) != INTEGER_CST
80 && TREE_CODE (x) != SSA_NAME)
81 {
82 if (TYPE_UNSIGNED (type)
83 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
84 type = signed_type_for (type);
85
86 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
87 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
88 return x;
89 }
90
91 wide_int wr[2];
92 if (!get_range (x, stmt, wr, rvals))
93 return false;
94
95 signop sgn = SIGNED;
96 /* Only convert signed integers or unsigned sizetype to a signed
97 offset and avoid converting large positive values in narrower
98 types to negative offsets. */
99 if (TYPE_UNSIGNED (type)
100 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
101 sgn = UNSIGNED;
102
103 r[0] = offset_int::from (wr[0], sgn);
104 r[1] = offset_int::from (wr[1], sgn);
105 return true;
106}
107
108/* Return the argument that the call STMT to a built-in function returns
109 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
110 from the argument reflected in the value returned by the built-in if it
111 can be determined, otherwise to 0 and HWI_M1U respectively. Set
112 *PAST_END for functions like mempcpy that might return a past the end
113 pointer (most functions return a dereferenceable pointer to an existing
114 element of an array). */
115
116static tree
117gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end,
9a27acc3 118 ssa_name_limit_t &snlim, pointer_query *qry)
2a837de2
MS
119{
120 /* Clear and set below for the rare function(s) that might return
121 a past-the-end pointer. */
122 *past_end = false;
123
124 {
125 /* Check for attribute fn spec to see if the function returns one
126 of its arguments. */
127 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
128 unsigned int argno;
129 if (fnspec.returns_arg (&argno))
130 {
131 /* Functions return the first argument (not a range). */
132 offrng[0] = offrng[1] = 0;
133 return gimple_call_arg (stmt, argno);
134 }
135 }
136
137 if (gimple_call_num_args (stmt) < 1)
138 return NULL_TREE;
139
140 tree fn = gimple_call_fndecl (stmt);
141 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
142 {
143 /* See if this is a call to placement new. */
144 if (!fn
145 || !DECL_IS_OPERATOR_NEW_P (fn)
146 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
147 return NULL_TREE;
148
149 /* Check the mangling, keeping in mind that operator new takes
150 a size_t which could be unsigned int or unsigned long. */
151 tree fname = DECL_ASSEMBLER_NAME (fn);
152 if (!id_equal (fname, "_ZnwjPv") // ordinary form
153 && !id_equal (fname, "_ZnwmPv") // ordinary form
154 && !id_equal (fname, "_ZnajPv") // array form
155 && !id_equal (fname, "_ZnamPv")) // array form
156 return NULL_TREE;
157
158 if (gimple_call_num_args (stmt) != 2)
159 return NULL_TREE;
160
161 /* Allocation functions return a pointer to the beginning. */
162 offrng[0] = offrng[1] = 0;
163 return gimple_call_arg (stmt, 1);
164 }
165
166 switch (DECL_FUNCTION_CODE (fn))
167 {
168 case BUILT_IN_MEMCPY:
169 case BUILT_IN_MEMCPY_CHK:
170 case BUILT_IN_MEMMOVE:
171 case BUILT_IN_MEMMOVE_CHK:
172 case BUILT_IN_MEMSET:
173 case BUILT_IN_STRCAT:
174 case BUILT_IN_STRCAT_CHK:
175 case BUILT_IN_STRCPY:
176 case BUILT_IN_STRCPY_CHK:
177 case BUILT_IN_STRNCAT:
178 case BUILT_IN_STRNCAT_CHK:
179 case BUILT_IN_STRNCPY:
180 case BUILT_IN_STRNCPY_CHK:
181 /* Functions return the first argument (not a range). */
182 offrng[0] = offrng[1] = 0;
183 return gimple_call_arg (stmt, 0);
184
185 case BUILT_IN_MEMPCPY:
186 case BUILT_IN_MEMPCPY_CHK:
187 {
188 /* The returned pointer is in a range constrained by the smaller
189 of the upper bound of the size argument and the source object
190 size. */
191 offrng[0] = 0;
192 offrng[1] = HOST_WIDE_INT_M1U;
193 tree off = gimple_call_arg (stmt, 2);
9a27acc3 194 bool off_valid = get_offset_range (off, stmt, offrng, qry->rvals);
2a837de2
MS
195 if (!off_valid || offrng[0] != offrng[1])
196 {
197 /* If the offset is either indeterminate or in some range,
198 try to constrain its upper bound to at most the size
199 of the source object. */
200 access_ref aref;
201 tree src = gimple_call_arg (stmt, 1);
425a39fd 202 if (compute_objsize_r (src, stmt, 1, &aref, snlim, qry)
2a837de2
MS
203 && aref.sizrng[1] < offrng[1])
204 offrng[1] = aref.sizrng[1];
205 }
206
207 /* Mempcpy may return a past-the-end pointer. */
208 *past_end = true;
209 return gimple_call_arg (stmt, 0);
210 }
211
212 case BUILT_IN_MEMCHR:
213 {
214 tree off = gimple_call_arg (stmt, 2);
9a27acc3 215 if (get_offset_range (off, stmt, offrng, qry->rvals))
2a837de2
MS
216 offrng[1] -= 1;
217 else
218 offrng[1] = HOST_WIDE_INT_M1U;
219
220 offrng[0] = 0;
221 return gimple_call_arg (stmt, 0);
222 }
223
224 case BUILT_IN_STRCHR:
225 case BUILT_IN_STRRCHR:
226 case BUILT_IN_STRSTR:
227 offrng[0] = 0;
228 offrng[1] = HOST_WIDE_INT_M1U;
229 return gimple_call_arg (stmt, 0);
230
231 case BUILT_IN_STPCPY:
232 case BUILT_IN_STPCPY_CHK:
233 {
234 access_ref aref;
235 tree src = gimple_call_arg (stmt, 1);
9a27acc3 236 if (compute_objsize_r (src, stmt, 1, &aref, snlim, qry))
2a837de2
MS
237 offrng[1] = aref.sizrng[1] - 1;
238 else
239 offrng[1] = HOST_WIDE_INT_M1U;
240
241 offrng[0] = 0;
242 return gimple_call_arg (stmt, 0);
243 }
244
245 case BUILT_IN_STPNCPY:
246 case BUILT_IN_STPNCPY_CHK:
247 {
248 /* The returned pointer is in a range between the first argument
249 and it plus the smaller of the upper bound of the size argument
250 and the source object size. */
251 offrng[1] = HOST_WIDE_INT_M1U;
252 tree off = gimple_call_arg (stmt, 2);
9a27acc3 253 if (!get_offset_range (off, stmt, offrng, qry->rvals)
2a837de2
MS
254 || offrng[0] != offrng[1])
255 {
256 /* If the offset is either indeterminate or in some range,
257 try to constrain its upper bound to at most the size
258 of the source object. */
259 access_ref aref;
260 tree src = gimple_call_arg (stmt, 1);
9a27acc3 261 if (compute_objsize_r (src, stmt, 1, &aref, snlim, qry)
2a837de2
MS
262 && aref.sizrng[1] < offrng[1])
263 offrng[1] = aref.sizrng[1];
264 }
265
266 /* When the source is the empty string the returned pointer is
267 a copy of the argument. Otherwise stpcpy can also return
268 a past-the-end pointer. */
269 offrng[0] = 0;
270 *past_end = true;
271 return gimple_call_arg (stmt, 0);
272 }
273
274 default:
275 break;
276 }
277
278 return NULL_TREE;
279}
280
b48d4e68
MS
281/* Return true when EXP's range can be determined and set RANGE[] to it
282 after adjusting it if necessary to make EXP a represents a valid size
283 of object, or a valid size argument to an allocation function declared
284 with attribute alloc_size (whose argument may be signed), or to a string
285 manipulation function like memset.
286 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
287 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
288 a (nearly) invalid argument to allocation functions like malloc but it
289 is a valid argument to functions like memset.
290 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
291 in a multi-range, otherwise to the smallest valid subrange. */
292
293bool
294get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
295 int flags /* = 0 */)
296{
297 if (!exp)
298 return false;
299
300 if (tree_fits_uhwi_p (exp))
301 {
302 /* EXP is a constant. */
303 range[0] = range[1] = exp;
304 return true;
305 }
306
307 tree exptype = TREE_TYPE (exp);
308 bool integral = INTEGRAL_TYPE_P (exptype);
309
310 wide_int min, max;
311 enum value_range_kind range_type;
312
313 if (!query)
ece28da9 314 query = get_range_query (cfun);
b48d4e68
MS
315
316 if (integral)
317 {
318 value_range vr;
319
320 query->range_of_expr (vr, exp, stmt);
321
322 if (vr.undefined_p ())
323 vr.set_varying (TREE_TYPE (exp));
324 range_type = vr.kind ();
325 min = wi::to_wide (vr.min ());
326 max = wi::to_wide (vr.max ());
327 }
328 else
329 range_type = VR_VARYING;
330
331 if (range_type == VR_VARYING)
332 {
333 if (integral)
334 {
335 /* Use the full range of the type of the expression when
336 no value range information is available. */
337 range[0] = TYPE_MIN_VALUE (exptype);
338 range[1] = TYPE_MAX_VALUE (exptype);
339 return true;
340 }
341
342 range[0] = NULL_TREE;
343 range[1] = NULL_TREE;
344 return false;
345 }
346
347 unsigned expprec = TYPE_PRECISION (exptype);
348
349 bool signed_p = !TYPE_UNSIGNED (exptype);
350
351 if (range_type == VR_ANTI_RANGE)
352 {
353 if (signed_p)
354 {
355 if (wi::les_p (max, 0))
356 {
357 /* EXP is not in a strictly negative range. That means
358 it must be in some (not necessarily strictly) positive
359 range which includes zero. Since in signed to unsigned
360 conversions negative values end up converted to large
361 positive values, and otherwise they are not valid sizes,
362 the resulting range is in both cases [0, TYPE_MAX]. */
363 min = wi::zero (expprec);
364 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
365 }
366 else if (wi::les_p (min - 1, 0))
367 {
368 /* EXP is not in a negative-positive range. That means EXP
369 is either negative, or greater than max. Since negative
370 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
371 min = max + 1;
372 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
373 }
374 else
375 {
376 max = min - 1;
377 min = wi::zero (expprec);
378 }
379 }
380 else
381 {
382 wide_int maxsize = wi::to_wide (max_object_size ());
383 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
384 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
385 if (wi::eq_p (0, min - 1))
386 {
387 /* EXP is unsigned and not in the range [1, MAX]. That means
388 it's either zero or greater than MAX. Even though 0 would
389 normally be detected by -Walloc-zero, unless ALLOW_ZERO
390 is set, set the range to [MAX, TYPE_MAX] so that when MAX
391 is greater than the limit the whole range is diagnosed. */
392 wide_int maxsize = wi::to_wide (max_object_size ());
393 if (flags & SR_ALLOW_ZERO)
394 {
395 if (wi::leu_p (maxsize, max + 1)
396 || !(flags & SR_USE_LARGEST))
397 min = max = wi::zero (expprec);
398 else
399 {
400 min = max + 1;
401 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
402 }
403 }
404 else
405 {
406 min = max + 1;
407 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
408 }
409 }
410 else if ((flags & SR_USE_LARGEST)
411 && wi::ltu_p (max + 1, maxsize))
412 {
413 /* When USE_LARGEST is set and the larger of the two subranges
414 is a valid size, use it... */
415 min = max + 1;
416 max = maxsize;
417 }
418 else
419 {
420 /* ...otherwise use the smaller subrange. */
421 max = min - 1;
422 min = wi::zero (expprec);
423 }
424 }
425 }
426
427 range[0] = wide_int_to_tree (exptype, min);
428 range[1] = wide_int_to_tree (exptype, max);
429
430 return true;
431}
432
433bool
434get_size_range (tree exp, tree range[2], int flags /* = 0 */)
435{
436 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
437}
438
2a837de2
MS
439/* If STMT is a call to an allocation function, returns the constant
440 maximum size of the object allocated by the call represented as
441 sizetype. If nonnull, sets RNG1[] to the range of the size.
442 When nonnull, uses RVALS for range information, otherwise gets global
443 range info.
444 Returns null when STMT is not a call to a valid allocation function. */
445
446tree
447gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
9a27acc3 448 range_query *qry /* = NULL */)
2a837de2
MS
449{
450 if (!stmt || !is_gimple_call (stmt))
451 return NULL_TREE;
452
453 tree allocfntype;
454 if (tree fndecl = gimple_call_fndecl (stmt))
455 allocfntype = TREE_TYPE (fndecl);
456 else
457 allocfntype = gimple_call_fntype (stmt);
458
459 if (!allocfntype)
460 return NULL_TREE;
461
462 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
463 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
464 if (!at)
465 {
466 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
467 return NULL_TREE;
468
469 argidx1 = 0;
470 }
471
472 unsigned nargs = gimple_call_num_args (stmt);
473
474 if (argidx1 == UINT_MAX)
475 {
476 tree atval = TREE_VALUE (at);
477 if (!atval)
478 return NULL_TREE;
479
480 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
481 if (nargs <= argidx1)
482 return NULL_TREE;
483
484 atval = TREE_CHAIN (atval);
485 if (atval)
486 {
487 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
488 if (nargs <= argidx2)
489 return NULL_TREE;
490 }
491 }
492
493 tree size = gimple_call_arg (stmt, argidx1);
494
495 wide_int rng1_buf[2];
496 /* If RNG1 is not set, use the buffer. */
497 if (!rng1)
498 rng1 = rng1_buf;
499
500 /* Use maximum precision to avoid overflow below. */
501 const int prec = ADDR_MAX_PRECISION;
502
503 {
504 tree r[2];
505 /* Determine the largest valid range size, including zero. */
9a27acc3 506 if (!get_size_range (qry, size, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
2a837de2
MS
507 return NULL_TREE;
508 rng1[0] = wi::to_wide (r[0], prec);
509 rng1[1] = wi::to_wide (r[1], prec);
510 }
511
512 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
513 return fold_convert (sizetype, size);
514
515 /* To handle ranges do the math in wide_int and return the product
516 of the upper bounds as a constant. Ignore anti-ranges. */
517 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
518 wide_int rng2[2];
519 {
520 tree r[2];
521 /* As above, use the full non-negative range on failure. */
9a27acc3 522 if (!get_size_range (qry, n, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
2a837de2
MS
523 return NULL_TREE;
524 rng2[0] = wi::to_wide (r[0], prec);
525 rng2[1] = wi::to_wide (r[1], prec);
526 }
527
528 /* Compute products of both bounds for the caller but return the lesser
529 of SIZE_MAX and the product of the upper bounds as a constant. */
530 rng1[0] = rng1[0] * rng2[0];
531 rng1[1] = rng1[1] * rng2[1];
532
533 const tree size_max = TYPE_MAX_VALUE (sizetype);
534 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
535 {
536 rng1[1] = wi::to_wide (size_max, prec);
537 return size_max;
538 }
539
540 return wide_int_to_tree (sizetype, rng1[1]);
541}
542
543/* For an access to an object referenced to by the function parameter PTR
544 of pointer type, and set RNG[] to the range of sizes of the object
545 obtainedfrom the attribute access specification for the current function.
546 Set STATIC_ARRAY if the array parameter has been declared [static].
547 Return the function parameter on success and null otherwise. */
548
9a27acc3 549static tree
2a837de2
MS
550gimple_parm_array_size (tree ptr, wide_int rng[2],
551 bool *static_array /* = NULL */)
552{
553 /* For a function argument try to determine the byte size of the array
554 from the current function declaratation (e.g., attribute access or
555 related). */
556 tree var = SSA_NAME_VAR (ptr);
557 if (TREE_CODE (var) != PARM_DECL)
558 return NULL_TREE;
559
560 const unsigned prec = TYPE_PRECISION (sizetype);
561
562 rdwr_map rdwr_idx;
563 attr_access *access = get_parm_access (rdwr_idx, var);
564 if (!access)
565 return NULL_TREE;
566
567 if (access->sizarg != UINT_MAX)
568 {
569 /* TODO: Try to extract the range from the argument based on
570 those of subsequent assertions or based on known calls to
571 the current function. */
572 return NULL_TREE;
573 }
574
575 if (!access->minsize)
576 return NULL_TREE;
577
578 /* Only consider ordinary array bound at level 2 (or above if it's
579 ever added). */
580 if (warn_array_parameter < 2 && !access->static_p)
581 return NULL_TREE;
582
583 if (static_array)
584 *static_array = access->static_p;
585
586 rng[0] = wi::zero (prec);
587 rng[1] = wi::uhwi (access->minsize, prec);
588 /* Multiply the array bound encoded in the attribute by the size
589 of what the pointer argument to which it decays points to. */
590 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
591 tree size = TYPE_SIZE_UNIT (eltype);
592 if (!size || TREE_CODE (size) != INTEGER_CST)
593 return NULL_TREE;
594
595 rng[1] *= wi::to_wide (size, prec);
596 return var;
597}
598
f9379fcb
MS
599/* Initialize the object. */
600
601access_ref::access_ref ()
9a27acc3
MS
602 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
603 base0 (true), parmarray ()
2a837de2
MS
604{
605 /* Set to valid. */
606 offrng[0] = offrng[1] = 0;
607 offmax[0] = offmax[1] = 0;
608 /* Invalidate. */
609 sizrng[0] = sizrng[1] = -1;
2a837de2
MS
610}
611
612/* Return the PHI node REF refers to or null if it doesn't. */
613
614gphi *
615access_ref::phi () const
616{
617 if (!ref || TREE_CODE (ref) != SSA_NAME)
618 return NULL;
619
620 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
ece28da9 621 if (!def_stmt || gimple_code (def_stmt) != GIMPLE_PHI)
2a837de2
MS
622 return NULL;
623
624 return as_a <gphi *> (def_stmt);
625}
626
820f0940
MS
627/* Determine and return the largest object to which *THIS refers. If
628 *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
629 of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
630 argument ARG. */
2a837de2
MS
631
632tree
633access_ref::get_ref (vec<access_ref> *all_refs,
634 access_ref *pref /* = NULL */,
635 int ostype /* = 1 */,
636 ssa_name_limit_t *psnlim /* = NULL */,
637 pointer_query *qry /* = NULL */) const
638{
639 gphi *phi_stmt = this->phi ();
640 if (!phi_stmt)
641 return ref;
642
643 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
644 cause unbounded recursion. */
645 ssa_name_limit_t snlim_buf;
646 if (!psnlim)
647 psnlim = &snlim_buf;
648
649 if (!psnlim->visit_phi (ref))
650 return NULL_TREE;
651
2a837de2
MS
652 pointer_query empty_qry;
653 if (!qry)
654 qry = &empty_qry;
655
820f0940
MS
656 /* The conservative result of the PHI reflecting the offset and size
657 of the largest PHI argument, regardless of whether or not they all
658 refer to the same object. */
2a837de2
MS
659 access_ref phi_ref;
660 if (pref)
661 {
820f0940
MS
662 /* The identity of the object has not been determined yet but
663 PREF->REF is set by the caller to the PHI for convenience.
664 The size is negative/invalid and the offset is zero (it's
665 updated only after the identity of the object has been
666 established). */
667 gcc_assert (pref->sizrng[0] < 0);
668 gcc_assert (pref->offrng[0] == 0 && pref->offrng[1] == 0);
669
2a837de2 670 phi_ref = *pref;
2a837de2
MS
671 }
672
673 /* Set if any argument is a function array (or VLA) parameter not
674 declared [static]. */
675 bool parmarray = false;
676 /* The size of the smallest object referenced by the PHI arguments. */
677 offset_int minsize = 0;
678 const offset_int maxobjsize = wi::to_offset (max_object_size ());
2a837de2
MS
679
680 const unsigned nargs = gimple_phi_num_args (phi_stmt);
681 for (unsigned i = 0; i < nargs; ++i)
682 {
683 access_ref phi_arg_ref;
684 tree arg = gimple_phi_arg_def (phi_stmt, i);
9a27acc3
MS
685 if (!compute_objsize_r (arg, phi_stmt, ostype, &phi_arg_ref, *psnlim,
686 qry)
2a837de2
MS
687 || phi_arg_ref.sizrng[0] < 0)
688 /* A PHI with all null pointer arguments. */
689 return NULL_TREE;
690
2a837de2
MS
691 if (TREE_CODE (arg) == SSA_NAME)
692 qry->put_ref (arg, phi_arg_ref);
693
694 if (all_refs)
695 all_refs->safe_push (phi_arg_ref);
696
2a837de2
MS
697 parmarray |= phi_arg_ref.parmarray;
698
699 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
700
701 if (phi_ref.sizrng[0] < 0)
702 {
820f0940
MS
703 /* If PHI_REF doesn't contain a meaningful result yet set it
704 to the result for the first argument. */
2a837de2 705 if (!nullp)
820f0940
MS
706 phi_ref = phi_arg_ref;
707
708 /* Set if the current argument refers to one or more objects of
709 known size (or range of sizes), as opposed to referring to
710 one or more unknown object(s). */
711 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
712 || phi_arg_ref.sizrng[1] != maxobjsize);
2a837de2
MS
713 if (arg_known_size)
714 minsize = phi_arg_ref.sizrng[0];
820f0940 715
2a837de2
MS
716 continue;
717 }
718
719 const bool phi_known_size = (phi_ref.sizrng[0] != 0
720 || phi_ref.sizrng[1] != maxobjsize);
721
722 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
723 minsize = phi_arg_ref.sizrng[0];
724
725 /* Disregard null pointers in PHIs with two or more arguments.
726 TODO: Handle this better! */
727 if (nullp)
728 continue;
729
730 /* Determine the amount of remaining space in the argument. */
731 offset_int argrem[2];
732 argrem[1] = phi_arg_ref.size_remaining (argrem);
733
734 /* Determine the amount of remaining space computed so far and
735 if the remaining space in the argument is more use it instead. */
736 offset_int phirem[2];
737 phirem[1] = phi_ref.size_remaining (phirem);
738
820f0940
MS
739 /* Reset the PHI's BASE0 flag if any of the nonnull arguments
740 refers to an object at an unknown offset. */
741 if (!phi_arg_ref.base0)
742 phi_ref.base0 = false;
2a837de2
MS
743
744 if (phirem[1] < argrem[1]
745 || (phirem[1] == argrem[1]
746 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
747 /* Use the argument with the most space remaining as the result,
748 or the larger one if the space is equal. */
749 phi_ref = phi_arg_ref;
2a837de2
MS
750 }
751
820f0940
MS
752 /* Replace the lower bound of the largest argument with the size
753 of the smallest argument, and set PARMARRAY if any argument
754 was one. */
755 phi_ref.sizrng[0] = minsize;
756 phi_ref.parmarray = parmarray;
2a837de2
MS
757
758 if (phi_ref.sizrng[0] < 0)
759 {
760 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
761 (perhaps because they have all been already visited by prior
762 recursive calls). */
763 psnlim->leave_phi (ref);
764 return NULL_TREE;
765 }
766
767 /* Avoid changing *THIS. */
768 if (pref && pref != this)
769 *pref = phi_ref;
770
771 psnlim->leave_phi (ref);
772
773 return phi_ref.ref;
774}
775
776/* Return the maximum amount of space remaining and if non-null, set
777 argument to the minimum. */
778
779offset_int
780access_ref::size_remaining (offset_int *pmin /* = NULL */) const
781{
782 offset_int minbuf;
783 if (!pmin)
784 pmin = &minbuf;
785
820f0940
MS
786 if (sizrng[0] < 0)
787 {
788 /* If the identity of the object hasn't been determined return
789 the maximum size range. */
790 *pmin = 0;
791 return wi::to_offset (max_object_size ());
792 }
793
2a837de2
MS
794 /* add_offset() ensures the offset range isn't inverted. */
795 gcc_checking_assert (offrng[0] <= offrng[1]);
796
797 if (base0)
798 {
799 /* The offset into referenced object is zero-based (i.e., it's
800 not referenced by a pointer into middle of some unknown object). */
801 if (offrng[0] < 0 && offrng[1] < 0)
802 {
803 /* If the offset is negative the remaining size is zero. */
804 *pmin = 0;
805 return 0;
806 }
807
808 if (sizrng[1] <= offrng[0])
809 {
810 /* If the starting offset is greater than or equal to the upper
811 bound on the size of the object, the space remaining is zero.
812 As a special case, if it's equal, set *PMIN to -1 to let
813 the caller know the offset is valid and just past the end. */
814 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
815 return 0;
816 }
817
818 /* Otherwise return the size minus the lower bound of the offset. */
819 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
820
821 *pmin = sizrng[0] - or0;
822 return sizrng[1] - or0;
823 }
824
825 /* The offset to the referenced object isn't zero-based (i.e., it may
826 refer to a byte other than the first. The size of such an object
827 is constrained only by the size of the address space (the result
828 of max_object_size()). */
829 if (sizrng[1] <= offrng[0])
830 {
831 *pmin = 0;
832 return 0;
833 }
834
835 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
836
837 *pmin = sizrng[0] - or0;
838 return sizrng[1] - or0;
839}
840
841/* Return true if the offset and object size are in range for SIZE. */
842
843bool
844access_ref::offset_in_range (const offset_int &size) const
845{
846 if (size_remaining () < size)
847 return false;
848
849 if (base0)
850 return offmax[0] >= 0 && offmax[1] <= sizrng[1];
851
852 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
853 return offmax[0] > -maxoff && offmax[1] < maxoff;
854}
855
856/* Add the range [MIN, MAX] to the offset range. For known objects (with
857 zero-based offsets) at least one of whose offset's bounds is in range,
858 constrain the other (or both) to the bounds of the object (i.e., zero
859 and the upper bound of its size). This improves the quality of
860 diagnostics. */
861
862void access_ref::add_offset (const offset_int &min, const offset_int &max)
863{
864 if (min <= max)
865 {
866 /* To add an ordinary range just add it to the bounds. */
867 offrng[0] += min;
868 offrng[1] += max;
869 }
870 else if (!base0)
871 {
872 /* To add an inverted range to an offset to an unknown object
873 expand it to the maximum. */
874 add_max_offset ();
875 return;
876 }
877 else
878 {
879 /* To add an inverted range to an offset to an known object set
880 the upper bound to the maximum representable offset value
881 (which may be greater than MAX_OBJECT_SIZE).
882 The lower bound is either the sum of the current offset and
883 MIN when abs(MAX) is greater than the former, or zero otherwise.
884 Zero because then then inverted range includes the negative of
885 the lower bound. */
886 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
887 offrng[1] = maxoff;
888
889 if (max >= 0)
890 {
891 offrng[0] = 0;
892 if (offmax[0] > 0)
893 offmax[0] = 0;
894 return;
895 }
896
897 offset_int absmax = wi::abs (max);
898 if (offrng[0] < absmax)
899 {
900 offrng[0] += min;
901 /* Cap the lower bound at the upper (set to MAXOFF above)
902 to avoid inadvertently recreating an inverted range. */
903 if (offrng[1] < offrng[0])
904 offrng[0] = offrng[1];
905 }
906 else
907 offrng[0] = 0;
908 }
909
910 /* Set the minimum and maximmum computed so far. */
911 if (offrng[1] < 0 && offrng[1] < offmax[0])
912 offmax[0] = offrng[1];
913 if (offrng[0] > 0 && offrng[0] > offmax[1])
914 offmax[1] = offrng[0];
915
916 if (!base0)
917 return;
918
919 /* When referencing a known object check to see if the offset computed
920 so far is in bounds... */
921 offset_int remrng[2];
922 remrng[1] = size_remaining (remrng);
923 if (remrng[1] > 0 || remrng[0] < 0)
924 {
925 /* ...if so, constrain it so that neither bound exceeds the size of
926 the object. Out of bounds offsets are left unchanged, and, for
927 better or worse, become in bounds later. They should be detected
928 and diagnosed at the point they first become invalid by
929 -Warray-bounds. */
930 if (offrng[0] < 0)
931 offrng[0] = 0;
932 if (offrng[1] > sizrng[1])
933 offrng[1] = sizrng[1];
934 }
935}
936
937/* Issue one inform message describing each target of an access REF.
938 WRITE is set for a write access and clear for a read access. */
939
940void
941access_ref::inform_access (access_mode mode) const
942{
943 const access_ref &aref = *this;
944 if (!aref.ref)
945 return;
946
947 if (aref.phi ())
948 {
949 /* Set MAXREF to refer to the largest object and fill ALL_REFS
950 with data for all objects referenced by the PHI arguments. */
951 access_ref maxref;
952 auto_vec<access_ref> all_refs;
953 if (!get_ref (&all_refs, &maxref))
954 return;
955
956 /* Except for MAXREF, the rest of the arguments' offsets need not
957 reflect one added to the PHI itself. Determine the latter from
958 MAXREF on which the result is based. */
959 const offset_int orng[] =
960 {
961 offrng[0] - maxref.offrng[0],
962 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
963 };
964
965 /* Add the final PHI's offset to that of each of the arguments
966 and recurse to issue an inform message for it. */
967 for (unsigned i = 0; i != all_refs.length (); ++i)
968 {
969 /* Skip any PHIs; those could lead to infinite recursion. */
970 if (all_refs[i].phi ())
971 continue;
972
973 all_refs[i].add_offset (orng[0], orng[1]);
974 all_refs[i].inform_access (mode);
975 }
976 return;
977 }
978
979 /* Convert offset range and avoid including a zero range since it
980 isn't necessarily meaningful. */
981 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
982 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
983 HOST_WIDE_INT minoff;
984 HOST_WIDE_INT maxoff = diff_max;
985 if (wi::fits_shwi_p (aref.offrng[0]))
986 minoff = aref.offrng[0].to_shwi ();
987 else
988 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
989
990 if (wi::fits_shwi_p (aref.offrng[1]))
991 maxoff = aref.offrng[1].to_shwi ();
992
993 if (maxoff <= diff_min || maxoff >= diff_max)
994 /* Avoid mentioning an upper bound that's equal to or in excess
995 of the maximum of ptrdiff_t. */
996 maxoff = minoff;
997
998 /* Convert size range and always include it since all sizes are
999 meaningful. */
1000 unsigned long long minsize = 0, maxsize = 0;
1001 if (wi::fits_shwi_p (aref.sizrng[0])
1002 && wi::fits_shwi_p (aref.sizrng[1]))
1003 {
1004 minsize = aref.sizrng[0].to_shwi ();
1005 maxsize = aref.sizrng[1].to_shwi ();
1006 }
1007
1008 /* SIZRNG doesn't necessarily have the same range as the allocation
1009 size determined by gimple_call_alloc_size (). */
1010 char sizestr[80];
1011 if (minsize == maxsize)
1012 sprintf (sizestr, "%llu", minsize);
1013 else
1014 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
1015
1016 char offstr[80];
1017 if (minoff == 0
1018 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
1019 offstr[0] = '\0';
1020 else if (minoff == maxoff)
1021 sprintf (offstr, "%lli", (long long) minoff);
1022 else
1023 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
1024
1025 location_t loc = UNKNOWN_LOCATION;
1026
1027 tree ref = this->ref;
1028 tree allocfn = NULL_TREE;
1029 if (TREE_CODE (ref) == SSA_NAME)
1030 {
1031 gimple *stmt = SSA_NAME_DEF_STMT (ref);
ece28da9
MS
1032 if (!stmt)
1033 return;
1034
2a837de2
MS
1035 if (is_gimple_call (stmt))
1036 {
1037 loc = gimple_location (stmt);
1038 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1039 {
1040 /* Strip the SSA_NAME suffix from the variable name and
1041 recreate an identifier with the VLA's original name. */
1042 ref = gimple_call_lhs (stmt);
1043 if (SSA_NAME_IDENTIFIER (ref))
1044 {
1045 ref = SSA_NAME_IDENTIFIER (ref);
1046 const char *id = IDENTIFIER_POINTER (ref);
1047 size_t len = strcspn (id, ".$");
1048 if (!len)
1049 len = strlen (id);
1050 ref = get_identifier_with_length (id, len);
1051 }
1052 }
1053 else
1054 {
1055 /* Except for VLAs, retrieve the allocation function. */
1056 allocfn = gimple_call_fndecl (stmt);
1057 if (!allocfn)
1058 allocfn = gimple_call_fn (stmt);
1059 if (TREE_CODE (allocfn) == SSA_NAME)
1060 {
1061 /* For an ALLOC_CALL via a function pointer make a small
1062 effort to determine the destination of the pointer. */
1063 gimple *def = SSA_NAME_DEF_STMT (allocfn);
1064 if (gimple_assign_single_p (def))
1065 {
1066 tree rhs = gimple_assign_rhs1 (def);
1067 if (DECL_P (rhs))
1068 allocfn = rhs;
1069 else if (TREE_CODE (rhs) == COMPONENT_REF)
1070 allocfn = TREE_OPERAND (rhs, 1);
1071 }
1072 }
1073 }
1074 }
1075 else if (gimple_nop_p (stmt))
1076 /* Handle DECL_PARM below. */
1077 ref = SSA_NAME_VAR (ref);
31e924c5
MS
1078 else if (is_gimple_assign (stmt)
1079 && (gimple_assign_rhs_code (stmt) == MIN_EXPR
1080 || gimple_assign_rhs_code (stmt) == MAX_EXPR))
1081 {
1082 /* MIN or MAX_EXPR here implies a reference to a known object
1083 and either an unknown or distinct one (the latter being
1084 the result of an invalid relational expression). Determine
1085 the identity of the former and point to it in the note.
1086 TODO: Consider merging with PHI handling. */
1087 access_ref arg_ref[2];
1088 tree arg = gimple_assign_rhs1 (stmt);
1089 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[0]);
1090 arg = gimple_assign_rhs2 (stmt);
1091 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[1]);
1092
1093 /* Use the argument that references a known object with more
1094 space remaining. */
1095 const bool idx
1096 = (!arg_ref[0].ref || !arg_ref[0].base0
1097 || (arg_ref[0].base0 && arg_ref[1].base0
1098 && (arg_ref[0].size_remaining ()
1099 < arg_ref[1].size_remaining ())));
1100
1101 arg_ref[idx].offrng[0] = offrng[0];
1102 arg_ref[idx].offrng[1] = offrng[1];
1103 arg_ref[idx].inform_access (mode);
1104 return;
1105 }
2a837de2
MS
1106 }
1107
1108 if (DECL_P (ref))
1109 loc = DECL_SOURCE_LOCATION (ref);
1110 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
1111 loc = EXPR_LOCATION (ref);
1112 else if (TREE_CODE (ref) != IDENTIFIER_NODE
1113 && TREE_CODE (ref) != SSA_NAME)
1114 return;
1115
1116 if (mode == access_read_write || mode == access_write_only)
1117 {
1118 if (allocfn == NULL_TREE)
1119 {
1120 if (*offstr)
1121 inform (loc, "at offset %s into destination object %qE of size %s",
1122 offstr, ref, sizestr);
1123 else
1124 inform (loc, "destination object %qE of size %s", ref, sizestr);
1125 return;
1126 }
1127
1128 if (*offstr)
1129 inform (loc,
1130 "at offset %s into destination object of size %s "
1131 "allocated by %qE", offstr, sizestr, allocfn);
1132 else
1133 inform (loc, "destination object of size %s allocated by %qE",
1134 sizestr, allocfn);
1135 return;
1136 }
1137
1138 if (mode == access_read_only)
1139 {
1140 if (allocfn == NULL_TREE)
1141 {
1142 if (*offstr)
1143 inform (loc, "at offset %s into source object %qE of size %s",
1144 offstr, ref, sizestr);
1145 else
1146 inform (loc, "source object %qE of size %s", ref, sizestr);
1147
1148 return;
1149 }
1150
1151 if (*offstr)
1152 inform (loc,
1153 "at offset %s into source object of size %s allocated by %qE",
1154 offstr, sizestr, allocfn);
1155 else
1156 inform (loc, "source object of size %s allocated by %qE",
1157 sizestr, allocfn);
1158 return;
1159 }
1160
1161 if (allocfn == NULL_TREE)
1162 {
1163 if (*offstr)
1164 inform (loc, "at offset %s into object %qE of size %s",
1165 offstr, ref, sizestr);
1166 else
1167 inform (loc, "object %qE of size %s", ref, sizestr);
1168
1169 return;
1170 }
1171
1172 if (*offstr)
1173 inform (loc,
1174 "at offset %s into object of size %s allocated by %qE",
1175 offstr, sizestr, allocfn);
1176 else
1177 inform (loc, "object of size %s allocated by %qE",
1178 sizestr, allocfn);
1179}
1180
f9379fcb
MS
1181/* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1182 when MINWRITE or MINREAD, respectively, is set. */
1183access_data::access_data (range_query *query, gimple *stmt, access_mode mode,
1184 tree maxwrite /* = NULL_TREE */,
1185 bool minwrite /* = false */,
1186 tree maxread /* = NULL_TREE */,
1187 bool minread /* = false */)
1188 : stmt (stmt), call (), dst (), src (), mode (mode)
1189{
1190 set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1191 set_bound (src_bndrng, maxread, minread, query, stmt);
1192}
1193
1194/* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1195 when MINWRITE or MINREAD, respectively, is set. */
1196access_data::access_data (range_query *query, tree expr, access_mode mode,
1197 tree maxwrite /* = NULL_TREE */,
1198 bool minwrite /* = false */,
1199 tree maxread /* = NULL_TREE */,
1200 bool minread /* = false */)
1201 : stmt (), call (expr), dst (), src (), mode (mode)
1202{
1203 set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1204 set_bound (src_bndrng, maxread, minread, query, stmt);
1205}
1206
1207/* Set BNDRNG to the range of BOUND for the statement STMT. */
1208
1209void
1210access_data::set_bound (offset_int bndrng[2], tree bound, bool minaccess,
1211 range_query *query, gimple *stmt)
1212{
1213 /* Set the default bounds of the access and adjust below. */
1214 bndrng[0] = minaccess ? 1 : 0;
1215 bndrng[1] = HOST_WIDE_INT_M1U;
1216
1217 /* When BOUND is nonnull and a range can be extracted from it,
1218 set the bounds of the access to reflect both it and MINACCESS.
1219 BNDRNG[0] is the size of the minimum access. */
1220 tree rng[2];
1221 if (bound && get_size_range (query, bound, stmt, rng, SR_ALLOW_ZERO))
1222 {
1223 bndrng[0] = wi::to_offset (rng[0]);
1224 bndrng[1] = wi::to_offset (rng[1]);
1225 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
1226 }
1227}
1228
2a837de2
MS
1229/* Set a bit for the PHI in VISITED and return true if it wasn't
1230 already set. */
1231
1232bool
1233ssa_name_limit_t::visit_phi (tree ssa_name)
1234{
1235 if (!visited)
1236 visited = BITMAP_ALLOC (NULL);
1237
1238 /* Return false if SSA_NAME has already been visited. */
1239 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
1240}
1241
1242/* Clear a bit for the PHI in VISITED. */
1243
1244void
1245ssa_name_limit_t::leave_phi (tree ssa_name)
1246{
1247 /* Return false if SSA_NAME has already been visited. */
1248 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
1249}
1250
1251/* Return false if the SSA_NAME chain length counter has reached
1252 the limit, otherwise increment the counter and return true. */
1253
1254bool
1255ssa_name_limit_t::next ()
1256{
1257 /* Return a negative value to let caller avoid recursing beyond
1258 the specified limit. */
1259 if (ssa_def_max == 0)
1260 return false;
1261
1262 --ssa_def_max;
1263 return true;
1264}
1265
1266/* If the SSA_NAME has already been "seen" return a positive value.
1267 Otherwise add it to VISITED. If the SSA_NAME limit has been
1268 reached, return a negative value. Otherwise return zero. */
1269
1270int
1271ssa_name_limit_t::next_phi (tree ssa_name)
1272{
1273 {
1274 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
1275 /* Return a positive value if the PHI has already been visited. */
1276 if (gimple_code (def_stmt) == GIMPLE_PHI
1277 && !visit_phi (ssa_name))
1278 return 1;
1279 }
1280
1281 /* Return a negative value to let caller avoid recursing beyond
1282 the specified limit. */
1283 if (ssa_def_max == 0)
1284 return -1;
1285
1286 --ssa_def_max;
1287
1288 return 0;
1289}
1290
1291ssa_name_limit_t::~ssa_name_limit_t ()
1292{
1293 if (visited)
1294 BITMAP_FREE (visited);
1295}
1296
1297/* Default ctor. Initialize object with pointers to the range_query
1298 and cache_type instances to use or null. */
1299
1300pointer_query::pointer_query (range_query *qry /* = NULL */,
1301 cache_type *cache /* = NULL */)
1302: rvals (qry), var_cache (cache), hits (), misses (),
1303 failures (), depth (), max_depth ()
1304{
1305 /* No op. */
1306}
1307
1308/* Return a pointer to the cached access_ref instance for the SSA_NAME
1309 PTR if it's there or null otherwise. */
1310
1311const access_ref *
1312pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
1313{
1314 if (!var_cache)
1315 {
1316 ++misses;
1317 return NULL;
1318 }
1319
1320 unsigned version = SSA_NAME_VERSION (ptr);
1321 unsigned idx = version << 1 | (ostype & 1);
1322 if (var_cache->indices.length () <= idx)
1323 {
1324 ++misses;
1325 return NULL;
1326 }
1327
1328 unsigned cache_idx = var_cache->indices[idx];
1329 if (var_cache->access_refs.length () <= cache_idx)
1330 {
1331 ++misses;
1332 return NULL;
1333 }
1334
1335 access_ref &cache_ref = var_cache->access_refs[cache_idx];
1336 if (cache_ref.ref)
1337 {
1338 ++hits;
1339 return &cache_ref;
1340 }
1341
1342 ++misses;
1343 return NULL;
1344}
1345
1346/* Retrieve the access_ref instance for a variable from the cache if it's
1347 there or compute it and insert it into the cache if it's nonnonull. */
1348
1349bool
9a27acc3 1350pointer_query::get_ref (tree ptr, gimple *stmt, access_ref *pref, int ostype /* = 1 */)
2a837de2
MS
1351{
1352 const unsigned version
1353 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
1354
1355 if (var_cache && version)
1356 {
1357 unsigned idx = version << 1 | (ostype & 1);
1358 if (idx < var_cache->indices.length ())
1359 {
1360 unsigned cache_idx = var_cache->indices[idx] - 1;
1361 if (cache_idx < var_cache->access_refs.length ()
1362 && var_cache->access_refs[cache_idx].ref)
1363 {
1364 ++hits;
1365 *pref = var_cache->access_refs[cache_idx];
1366 return true;
1367 }
1368 }
1369
1370 ++misses;
1371 }
1372
9a27acc3 1373 if (!compute_objsize (ptr, stmt, ostype, pref, this))
2a837de2
MS
1374 {
1375 ++failures;
1376 return false;
1377 }
1378
1379 return true;
1380}
1381
1382/* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1383 nonnull. */
1384
1385void
1386pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
1387{
1388 /* Only add populated/valid entries. */
1389 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
1390 return;
1391
1392 /* Add REF to the two-level cache. */
1393 unsigned version = SSA_NAME_VERSION (ptr);
1394 unsigned idx = version << 1 | (ostype & 1);
1395
1396 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1397 Its value minus one is the index into ACCESS_REFS. Not all
1398 entries are valid. */
1399 if (var_cache->indices.length () <= idx)
1400 var_cache->indices.safe_grow_cleared (idx + 1);
1401
1402 if (!var_cache->indices[idx])
1403 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
1404
1405 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1406 REF member is nonnull. All entries except for the last two
1407 are valid. Once nonnull, the REF value must stay unchanged. */
1408 unsigned cache_idx = var_cache->indices[idx];
1409 if (var_cache->access_refs.length () <= cache_idx)
1410 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
1411
ece28da9 1412 access_ref &cache_ref = var_cache->access_refs[cache_idx];
2a837de2
MS
1413 if (cache_ref.ref)
1414 {
1415 gcc_checking_assert (cache_ref.ref == ref.ref);
1416 return;
1417 }
1418
1419 cache_ref = ref;
1420}
1421
1422/* Flush the cache if it's nonnull. */
1423
1424void
1425pointer_query::flush_cache ()
1426{
1427 if (!var_cache)
1428 return;
1429 var_cache->indices.release ();
1430 var_cache->access_refs.release ();
1431}
1432
ece28da9
MS
1433/* Dump statistics and, optionally, cache contents to DUMP_FILE. */
1434
1435void
1436pointer_query::dump (FILE *dump_file, bool contents /* = false */)
1437{
1438 unsigned nused = 0, nrefs = 0;
1439 unsigned nidxs = var_cache->indices.length ();
1440 for (unsigned i = 0; i != nidxs; ++i)
1441 {
1442 unsigned ari = var_cache->indices[i];
1443 if (!ari)
1444 continue;
1445
1446 ++nused;
1447
1448 const access_ref &aref = var_cache->access_refs[ari];
1449 if (!aref.ref)
1450 continue;
1451
1452 ++nrefs;
1453 }
1454
1455 fprintf (dump_file, "pointer_query counters:\n"
1456 " index cache size: %u\n"
1457 " index entries: %u\n"
1458 " access cache size: %u\n"
1459 " access entries: %u\n"
1460 " hits: %u\n"
1461 " misses: %u\n"
1462 " failures: %u\n"
1463 " max_depth: %u\n",
1464 nidxs, nused,
1465 var_cache->access_refs.length (), nrefs,
1466 hits, misses, failures, max_depth);
1467
1468 if (!contents || !nidxs)
1469 return;
1470
1471 fputs ("\npointer_query cache contents:\n", dump_file);
1472
1473 for (unsigned i = 0; i != nidxs; ++i)
1474 {
1475 unsigned ari = var_cache->indices[i];
1476 if (!ari)
1477 continue;
1478
1479 const access_ref &aref = var_cache->access_refs[ari];
1480 if (!aref.ref)
1481 continue;
1482
1483 /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1484 shifted left by one and ORed with the Object Size Type in
1485 the lowest bit. Print the two separately. */
1486 unsigned ver = i >> 1;
1487 unsigned ost = i & 1;
1488
1489 fprintf (dump_file, " %u.%u[%u]: ", ver, ost, ari);
1490 if (tree name = ssa_name (ver))
1491 {
1492 print_generic_expr (dump_file, name);
1493 fputs (" = ", dump_file);
1494 }
1495 else
1496 fprintf (dump_file, " _%u = ", ver);
1497
1498 if (gphi *phi = aref.phi ())
1499 {
1500 fputs ("PHI <", dump_file);
1501 unsigned nargs = gimple_phi_num_args (phi);
1502 for (unsigned i = 0; i != nargs; ++i)
1503 {
1504 tree arg = gimple_phi_arg_def (phi, i);
1505 print_generic_expr (dump_file, arg);
1506 if (i + 1 < nargs)
1507 fputs (", ", dump_file);
1508 }
1509 fputc ('>', dump_file);
1510 }
1511 else
1512 print_generic_expr (dump_file, aref.ref);
1513
1514 if (aref.offrng[0] != aref.offrng[1])
1515 fprintf (dump_file, " + [%lli, %lli]",
1516 (long long) aref.offrng[0].to_shwi (),
1517 (long long) aref.offrng[1].to_shwi ());
1518 else if (aref.offrng[0] != 0)
1519 fprintf (dump_file, " %c %lli",
1520 aref.offrng[0] < 0 ? '-' : '+',
1521 (long long) aref.offrng[0].to_shwi ());
1522
1523 fputc ('\n', dump_file);
1524 }
1525
1526 fputc ('\n', dump_file);
1527}
1528
2a837de2 1529/* A helper of compute_objsize_r() to determine the size from an assignment
31e924c5
MS
1530 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success
1531 set PREF->REF to the operand with more or less space remaining,
1532 respectively, if both refer to the same (sub)object, or to PTR if they
1533 might not, and return true. Otherwise, if the identity of neither
1534 operand can be determined, return false. */
2a837de2
MS
1535
1536static bool
31e924c5 1537handle_min_max_size (tree ptr, int ostype, access_ref *pref,
2a837de2
MS
1538 ssa_name_limit_t &snlim, pointer_query *qry)
1539{
9a27acc3 1540 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
31e924c5 1541 const tree_code code = gimple_assign_rhs_code (stmt);
2a837de2
MS
1542
1543 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1544 Determine the size/offset of each and use the one with more or less
1545 space remaining, respectively. If either fails, use the information
1546 determined from the other instead, adjusted up or down as appropriate
1547 for the expression. */
1548 access_ref aref[2] = { *pref, *pref };
31e924c5 1549 tree arg1 = gimple_assign_rhs1 (stmt);
9a27acc3 1550 if (!compute_objsize_r (arg1, stmt, ostype, &aref[0], snlim, qry))
2a837de2
MS
1551 {
1552 aref[0].base0 = false;
1553 aref[0].offrng[0] = aref[0].offrng[1] = 0;
1554 aref[0].add_max_offset ();
1555 aref[0].set_max_size_range ();
1556 }
1557
31e924c5 1558 tree arg2 = gimple_assign_rhs2 (stmt);
9a27acc3 1559 if (!compute_objsize_r (arg2, stmt, ostype, &aref[1], snlim, qry))
2a837de2
MS
1560 {
1561 aref[1].base0 = false;
1562 aref[1].offrng[0] = aref[1].offrng[1] = 0;
1563 aref[1].add_max_offset ();
1564 aref[1].set_max_size_range ();
1565 }
1566
1567 if (!aref[0].ref && !aref[1].ref)
1568 /* Fail if the identity of neither argument could be determined. */
1569 return false;
1570
1571 bool i0 = false;
1572 if (aref[0].ref && aref[0].base0)
1573 {
1574 if (aref[1].ref && aref[1].base0)
1575 {
1576 /* If the object referenced by both arguments has been determined
1577 set *PREF to the one with more or less space remainng, whichever
1578 is appopriate for CODE.
1579 TODO: Indicate when the objects are distinct so it can be
1580 diagnosed. */
1581 i0 = code == MAX_EXPR;
1582 const bool i1 = !i0;
1583
1584 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
1585 *pref = aref[i1];
1586 else
1587 *pref = aref[i0];
31e924c5
MS
1588
1589 if (aref[i0].ref != aref[i1].ref)
1590 /* If the operands don't refer to the same (sub)object set
1591 PREF->REF to the SSA_NAME from which STMT was obtained
1592 so that both can be identified in a diagnostic. */
1593 pref->ref = ptr;
1594
2a837de2
MS
1595 return true;
1596 }
1597
1598 /* If only the object referenced by one of the arguments could be
1599 determined, use it and... */
1600 *pref = aref[0];
1601 i0 = true;
1602 }
1603 else
1604 *pref = aref[1];
1605
1606 const bool i1 = !i0;
1607 /* ...see if the offset obtained from the other pointer can be used
1608 to tighten up the bound on the offset obtained from the first. */
1609 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
1610 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
1611 {
1612 pref->offrng[0] = aref[i0].offrng[0];
1613 pref->offrng[1] = aref[i0].offrng[1];
1614 }
31e924c5
MS
1615
1616 /* Replace PTR->REF with the SSA_NAME to indicate the expression
1617 might not refer to the same (sub)object. */
1618 pref->ref = ptr;
2a837de2
MS
1619 return true;
1620}
1621
1622/* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1623 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1624 on success and false on failure. */
1625
1626static bool
9a27acc3
MS
1627handle_array_ref (tree aref, gimple *stmt, bool addr, int ostype,
1628 access_ref *pref, ssa_name_limit_t &snlim,
1629 pointer_query *qry)
2a837de2
MS
1630{
1631 gcc_assert (TREE_CODE (aref) == ARRAY_REF);
1632
1633 ++pref->deref;
1634
1635 tree arefop = TREE_OPERAND (aref, 0);
1636 tree reftype = TREE_TYPE (arefop);
1637 if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
1638 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1639 of known bound. */
1640 return false;
1641
9a27acc3 1642 if (!compute_objsize_r (arefop, stmt, ostype, pref, snlim, qry))
2a837de2
MS
1643 return false;
1644
1645 offset_int orng[2];
1646 tree off = pref->eval (TREE_OPERAND (aref, 1));
1647 range_query *const rvals = qry ? qry->rvals : NULL;
1648 if (!get_offset_range (off, NULL, orng, rvals))
1649 {
1650 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1651 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1652 orng[0] = -orng[1] - 1;
1653 }
1654
1655 /* Convert the array index range determined above to a byte
1656 offset. */
1657 tree lowbnd = array_ref_low_bound (aref);
1658 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
1659 {
1660 /* Adjust the index by the low bound of the array domain
1661 (normally zero but 1 in Fortran). */
1662 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
1663 orng[0] -= lb;
1664 orng[1] -= lb;
1665 }
1666
1667 tree eltype = TREE_TYPE (aref);
1668 tree tpsize = TYPE_SIZE_UNIT (eltype);
1669 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
1670 {
1671 pref->add_max_offset ();
1672 return true;
1673 }
1674
1675 offset_int sz = wi::to_offset (tpsize);
1676 orng[0] *= sz;
1677 orng[1] *= sz;
1678
1679 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
1680 {
1681 /* Except for the permissive raw memory functions which use
1682 the size of the whole object determined above, use the size
1683 of the referenced array. Because the overall offset is from
1684 the beginning of the complete array object add this overall
1685 offset to the size of array. */
1686 offset_int sizrng[2] =
1687 {
1688 pref->offrng[0] + orng[0] + sz,
1689 pref->offrng[1] + orng[1] + sz
1690 };
1691 if (sizrng[1] < sizrng[0])
1692 std::swap (sizrng[0], sizrng[1]);
1693 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
1694 pref->sizrng[0] = sizrng[0];
1695 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
1696 pref->sizrng[1] = sizrng[1];
1697 }
1698
1699 pref->add_offset (orng[0], orng[1]);
1700 return true;
1701}
1702
1703/* A helper of compute_objsize_r() to determine the size from MEM_REF
1704 MREF. Return true on success and false on failure. */
1705
1706static bool
9a27acc3 1707handle_mem_ref (tree mref, gimple *stmt, int ostype, access_ref *pref,
2a837de2
MS
1708 ssa_name_limit_t &snlim, pointer_query *qry)
1709{
1710 gcc_assert (TREE_CODE (mref) == MEM_REF);
1711
1712 ++pref->deref;
1713
1714 if (VECTOR_TYPE_P (TREE_TYPE (mref)))
1715 {
1716 /* Hack: Handle MEM_REFs of vector types as those to complete
1717 objects; those may be synthesized from multiple assignments
1718 to consecutive data members (see PR 93200 and 96963).
1719 FIXME: Vectorized assignments should only be present after
1720 vectorization so this hack is only necessary after it has
1721 run and could be avoided in calls from prior passes (e.g.,
1722 tree-ssa-strlen.c).
1723 FIXME: Deal with this more generally, e.g., by marking up
1724 such MEM_REFs at the time they're created. */
1725 ostype = 0;
1726 }
1727
1728 tree mrefop = TREE_OPERAND (mref, 0);
9a27acc3 1729 if (!compute_objsize_r (mrefop, stmt, ostype, pref, snlim, qry))
2a837de2
MS
1730 return false;
1731
1732 offset_int orng[2];
1733 tree off = pref->eval (TREE_OPERAND (mref, 1));
1734 range_query *const rvals = qry ? qry->rvals : NULL;
1735 if (!get_offset_range (off, NULL, orng, rvals))
1736 {
1737 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1738 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1739 orng[0] = -orng[1] - 1;
1740 }
1741
1742 pref->add_offset (orng[0], orng[1]);
1743 return true;
1744}
1745
1746/* Helper to compute the size of the object referenced by the PTR
1747 expression which must have pointer type, using Object Size type
1748 OSTYPE (only the least significant 2 bits are used).
1749 On success, sets PREF->REF to the DECL of the referenced object
1750 if it's unique, otherwise to null, PREF->OFFRNG to the range of
1751 offsets into it, and PREF->SIZRNG to the range of sizes of
1752 the object(s).
1753 SNLIM is used to avoid visiting the same PHI operand multiple
1754 times, and, when nonnull, RVALS to determine range information.
1755 Returns true on success, false when a meaningful size (or range)
1756 cannot be determined.
1757
1758 The function is intended for diagnostics and should not be used
1759 to influence code generation or optimization. */
1760
1761static bool
9a27acc3 1762compute_objsize_r (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2a837de2
MS
1763 ssa_name_limit_t &snlim, pointer_query *qry)
1764{
1765 STRIP_NOPS (ptr);
1766
1767 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
1768 if (addr)
1769 {
1770 --pref->deref;
1771 ptr = TREE_OPERAND (ptr, 0);
1772 }
1773
1774 if (DECL_P (ptr))
1775 {
1776 pref->ref = ptr;
1777
820f0940
MS
1778 /* Reset the offset in case it was set by a prior call and not
1779 cleared by the caller. The offset is only adjusted after
1780 the identity of the object has been determined. */
1781 pref->offrng[0] = pref->offrng[1] = 0;
1782
2a837de2
MS
1783 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
1784 {
1785 /* Set the maximum size if the reference is to the pointer
1786 itself (as opposed to what it points to), and clear
1787 BASE0 since the offset isn't necessarily zero-based. */
1788 pref->set_max_size_range ();
1789 pref->base0 = false;
1790 return true;
1791 }
1792
820f0940
MS
1793 /* Valid offsets into the object are nonnegative. */
1794 pref->base0 = true;
1795
2a837de2
MS
1796 if (tree size = decl_init_size (ptr, false))
1797 if (TREE_CODE (size) == INTEGER_CST)
1798 {
1799 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1800 return true;
1801 }
1802
1803 pref->set_max_size_range ();
1804 return true;
1805 }
1806
1807 const tree_code code = TREE_CODE (ptr);
1808 range_query *const rvals = qry ? qry->rvals : NULL;
1809
1810 if (code == BIT_FIELD_REF)
1811 {
1812 tree ref = TREE_OPERAND (ptr, 0);
9a27acc3 1813 if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry))
2a837de2
MS
1814 return false;
1815
1816 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
1817 pref->add_offset (off / BITS_PER_UNIT);
1818 return true;
1819 }
1820
1821 if (code == COMPONENT_REF)
1822 {
1823 tree ref = TREE_OPERAND (ptr, 0);
1824 if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
1825 /* In accesses through union types consider the entire unions
1826 rather than just their members. */
1827 ostype = 0;
1828 tree field = TREE_OPERAND (ptr, 1);
1829
1830 if (ostype == 0)
1831 {
1832 /* In OSTYPE zero (for raw memory functions like memcpy), use
1833 the maximum size instead if the identity of the enclosing
1834 object cannot be determined. */
9a27acc3 1835 if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry))
2a837de2
MS
1836 return false;
1837
1838 /* Otherwise, use the size of the enclosing object and add
1839 the offset of the member to the offset computed so far. */
1840 tree offset = byte_position (field);
1841 if (TREE_CODE (offset) == INTEGER_CST)
1842 pref->add_offset (wi::to_offset (offset));
1843 else
1844 pref->add_max_offset ();
1845
1846 if (!pref->ref)
1847 /* REF may have been already set to an SSA_NAME earlier
1848 to provide better context for diagnostics. In that case,
1849 leave it unchanged. */
1850 pref->ref = ref;
1851 return true;
1852 }
1853
1854 pref->ref = field;
1855
1856 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
1857 {
1858 /* Set maximum size if the reference is to the pointer member
1859 itself (as opposed to what it points to). */
1860 pref->set_max_size_range ();
1861 return true;
1862 }
1863
1864 /* SAM is set for array members that might need special treatment. */
1865 special_array_member sam;
1866 tree size = component_ref_size (ptr, &sam);
1867 if (sam == special_array_member::int_0)
1868 pref->sizrng[0] = pref->sizrng[1] = 0;
1869 else if (!pref->trail1special && sam == special_array_member::trail_1)
1870 pref->sizrng[0] = pref->sizrng[1] = 1;
1871 else if (size && TREE_CODE (size) == INTEGER_CST)
1872 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1873 else
1874 {
1875 /* When the size of the member is unknown it's either a flexible
1876 array member or a trailing special array member (either zero
1877 length or one-element). Set the size to the maximum minus
1878 the constant size of the type. */
1879 pref->sizrng[0] = 0;
1880 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1881 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
1882 if (TREE_CODE (recsize) == INTEGER_CST)
1883 pref->sizrng[1] -= wi::to_offset (recsize);
1884 }
1885 return true;
1886 }
1887
1888 if (code == ARRAY_REF)
9a27acc3 1889 return handle_array_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
2a837de2
MS
1890
1891 if (code == MEM_REF)
9a27acc3 1892 return handle_mem_ref (ptr, stmt, ostype, pref, snlim, qry);
2a837de2
MS
1893
1894 if (code == TARGET_MEM_REF)
1895 {
1896 tree ref = TREE_OPERAND (ptr, 0);
9a27acc3 1897 if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry))
2a837de2
MS
1898 return false;
1899
1900 /* TODO: Handle remaining operands. Until then, add maximum offset. */
1901 pref->ref = ptr;
1902 pref->add_max_offset ();
1903 return true;
1904 }
1905
1906 if (code == INTEGER_CST)
1907 {
1908 /* Pointer constants other than null are most likely the result
54fa5567
MS
1909 of erroneous null pointer addition/subtraction. Unless zero
1910 is a valid address set size to zero. For null pointers, set
1911 size to the maximum for now since those may be the result of
1912 jump threading. */
2a837de2
MS
1913 if (integer_zerop (ptr))
1914 pref->set_max_size_range ();
54fa5567
MS
1915 else if (POINTER_TYPE_P (TREE_TYPE (ptr)))
1916 {
1917 tree deref_type = TREE_TYPE (TREE_TYPE (ptr));
1918 addr_space_t as = TYPE_ADDR_SPACE (deref_type);
1919 if (targetm.addr_space.zero_address_valid (as))
1920 pref->set_max_size_range ();
1921 else
1922 pref->sizrng[0] = pref->sizrng[1] = 0;
1923 }
2a837de2
MS
1924 else
1925 pref->sizrng[0] = pref->sizrng[1] = 0;
54fa5567 1926
2a837de2
MS
1927 pref->ref = ptr;
1928
1929 return true;
1930 }
1931
1932 if (code == STRING_CST)
1933 {
1934 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
1935 pref->ref = ptr;
1936 return true;
1937 }
1938
1939 if (code == POINTER_PLUS_EXPR)
1940 {
1941 tree ref = TREE_OPERAND (ptr, 0);
9a27acc3 1942 if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry))
2a837de2
MS
1943 return false;
1944
1945 /* Clear DEREF since the offset is being applied to the target
1946 of the dereference. */
1947 pref->deref = 0;
1948
1949 offset_int orng[2];
1950 tree off = pref->eval (TREE_OPERAND (ptr, 1));
1951 if (get_offset_range (off, NULL, orng, rvals))
1952 pref->add_offset (orng[0], orng[1]);
1953 else
1954 pref->add_max_offset ();
1955 return true;
1956 }
1957
1958 if (code == VIEW_CONVERT_EXPR)
1959 {
1960 ptr = TREE_OPERAND (ptr, 0);
9a27acc3 1961 return compute_objsize_r (ptr, stmt, ostype, pref, snlim, qry);
2a837de2
MS
1962 }
1963
1964 if (code == SSA_NAME)
1965 {
1966 if (!snlim.next ())
1967 return false;
1968
1969 /* Only process an SSA_NAME if the recursion limit has not yet
1970 been reached. */
1971 if (qry)
1972 {
1973 if (++qry->depth)
1974 qry->max_depth = qry->depth;
1975 if (const access_ref *cache_ref = qry->get_ref (ptr))
1976 {
1977 /* If the pointer is in the cache set *PREF to what it refers
f9379fcb 1978 to and return success. */
2a837de2
MS
1979 *pref = *cache_ref;
1980 return true;
1981 }
1982 }
1983
9a27acc3 1984 stmt = SSA_NAME_DEF_STMT (ptr);
2a837de2
MS
1985 if (is_gimple_call (stmt))
1986 {
1987 /* If STMT is a call to an allocation function get the size
1988 from its argument(s). If successful, also set *PREF->REF
1989 to PTR for the caller to include in diagnostics. */
1990 wide_int wr[2];
1991 if (gimple_call_alloc_size (stmt, wr, rvals))
1992 {
1993 pref->ref = ptr;
1994 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
1995 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
1996 /* Constrain both bounds to a valid size. */
1997 offset_int maxsize = wi::to_offset (max_object_size ());
1998 if (pref->sizrng[0] > maxsize)
1999 pref->sizrng[0] = maxsize;
2000 if (pref->sizrng[1] > maxsize)
2001 pref->sizrng[1] = maxsize;
2002 }
2003 else
2004 {
2005 /* For functions known to return one of their pointer arguments
2006 try to determine what the returned pointer points to, and on
2007 success add OFFRNG which was set to the offset added by
2008 the function (e.g., memchr) to the overall offset. */
2009 bool past_end;
2010 offset_int offrng[2];
2011 if (tree ret = gimple_call_return_array (stmt, offrng,
9a27acc3 2012 &past_end, snlim, qry))
2a837de2 2013 {
9a27acc3 2014 if (!compute_objsize_r (ret, stmt, ostype, pref, snlim, qry))
2a837de2
MS
2015 return false;
2016
2017 /* Cap OFFRNG[1] to at most the remaining size of
2018 the object. */
2019 offset_int remrng[2];
2020 remrng[1] = pref->size_remaining (remrng);
2021 if (remrng[1] != 0 && !past_end)
2022 /* Decrement the size for functions that never return
2023 a past-the-end pointer. */
2024 remrng[1] -= 1;
2025
2026 if (remrng[1] < offrng[1])
2027 offrng[1] = remrng[1];
2028 pref->add_offset (offrng[0], offrng[1]);
2029 }
2030 else
2031 {
2032 /* For other calls that might return arbitrary pointers
2033 including into the middle of objects set the size
2034 range to maximum, clear PREF->BASE0, and also set
2035 PREF->REF to include in diagnostics. */
2036 pref->set_max_size_range ();
2037 pref->base0 = false;
2038 pref->ref = ptr;
2039 }
2040 }
2041 qry->put_ref (ptr, *pref);
2042 return true;
2043 }
2044
2045 if (gimple_nop_p (stmt))
2046 {
2047 /* For a function argument try to determine the byte size
2048 of the array from the current function declaratation
2049 (e.g., attribute access or related). */
2050 wide_int wr[2];
2051 bool static_array = false;
2052 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
2053 {
2054 pref->parmarray = !static_array;
2055 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2056 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2057 pref->ref = ref;
2058 qry->put_ref (ptr, *pref);
2059 return true;
2060 }
2061
2062 pref->set_max_size_range ();
2063 pref->base0 = false;
2064 pref->ref = ptr;
2065 qry->put_ref (ptr, *pref);
2066 return true;
2067 }
2068
2069 if (gimple_code (stmt) == GIMPLE_PHI)
2070 {
2071 pref->ref = ptr;
2072 access_ref phi_ref = *pref;
2073 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
2074 return false;
2075 *pref = phi_ref;
2076 pref->ref = ptr;
2077 qry->put_ref (ptr, *pref);
2078 return true;
2079 }
2080
2081 if (!is_gimple_assign (stmt))
2082 {
2083 /* Clear BASE0 since the assigned pointer might point into
2084 the middle of the object, set the maximum size range and,
2085 if the SSA_NAME refers to a function argumnent, set
2086 PREF->REF to it. */
2087 pref->base0 = false;
2088 pref->set_max_size_range ();
2089 pref->ref = ptr;
2090 return true;
2091 }
2092
2093 tree_code code = gimple_assign_rhs_code (stmt);
2094
2095 if (code == MAX_EXPR || code == MIN_EXPR)
2096 {
31e924c5 2097 if (!handle_min_max_size (ptr, ostype, pref, snlim, qry))
2a837de2 2098 return false;
31e924c5 2099
2a837de2
MS
2100 qry->put_ref (ptr, *pref);
2101 return true;
2102 }
2103
2104 tree rhs = gimple_assign_rhs1 (stmt);
2105
2106 if (code == ASSERT_EXPR)
2107 {
2108 rhs = TREE_OPERAND (rhs, 0);
9a27acc3 2109 return compute_objsize_r (rhs, stmt, ostype, pref, snlim, qry);
2a837de2
MS
2110 }
2111
2112 if (code == POINTER_PLUS_EXPR
2113 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
2114 {
2115 /* Compute the size of the object first. */
9a27acc3 2116 if (!compute_objsize_r (rhs, stmt, ostype, pref, snlim, qry))
2a837de2
MS
2117 return false;
2118
2119 offset_int orng[2];
2120 tree off = gimple_assign_rhs2 (stmt);
2121 if (get_offset_range (off, stmt, orng, rvals))
2122 pref->add_offset (orng[0], orng[1]);
2123 else
2124 pref->add_max_offset ();
ece28da9 2125
2a837de2
MS
2126 qry->put_ref (ptr, *pref);
2127 return true;
2128 }
2129
ece28da9
MS
2130 if (code == ADDR_EXPR || code == SSA_NAME)
2131 {
9a27acc3 2132 if (!compute_objsize_r (rhs, stmt, ostype, pref, snlim, qry))
ece28da9
MS
2133 return false;
2134 qry->put_ref (ptr, *pref);
2135 return true;
2136 }
2a837de2
MS
2137
2138 /* (This could also be an assignment from a nonlocal pointer.) Save
2139 PTR to mention in diagnostics but otherwise treat it as a pointer
2140 to an unknown object. */
2141 pref->ref = rhs;
2142 pref->base0 = false;
2143 pref->set_max_size_range ();
2144 return true;
2145 }
2146
2147 /* Assume all other expressions point into an unknown object
2148 of the maximum valid size. */
2149 pref->ref = ptr;
2150 pref->base0 = false;
2151 pref->set_max_size_range ();
2152 if (TREE_CODE (ptr) == SSA_NAME)
2153 qry->put_ref (ptr, *pref);
2154 return true;
2155}
2156
2157/* A "public" wrapper around the above. Clients should use this overload
2158 instead. */
2159
2160tree
9a27acc3
MS
2161compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2162 pointer_query *ptr_qry)
2a837de2
MS
2163{
2164 pointer_query qry;
9a27acc3
MS
2165 if (ptr_qry)
2166 ptr_qry->depth = 0;
2167 else
2168 ptr_qry = &qry;
820f0940
MS
2169
2170 /* Clear and invalidate in case *PREF is being reused. */
2171 pref->offrng[0] = pref->offrng[1] = 0;
2172 pref->sizrng[0] = pref->sizrng[1] = -1;
2173
2a837de2 2174 ssa_name_limit_t snlim;
9a27acc3 2175 if (!compute_objsize_r (ptr, stmt, ostype, pref, snlim, ptr_qry))
2a837de2
MS
2176 return NULL_TREE;
2177
2178 offset_int maxsize = pref->size_remaining ();
2179 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
2180 pref->offrng[0] = 0;
2181 return wide_int_to_tree (sizetype, maxsize);
2182}
2183
2184/* Transitional wrapper. The function should be removed once callers
2185 transition to the pointer_query API. */
2186
2187tree
9a27acc3
MS
2188compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2189 range_query *rvals /* = NULL */)
2a837de2
MS
2190{
2191 pointer_query qry;
9a27acc3
MS
2192 qry.rvals = rvals;
2193 return compute_objsize (ptr, stmt, ostype, pref, &qry);
2a837de2
MS
2194}
2195
2196/* Legacy wrapper around the above. The function should be removed
2197 once callers transition to one of the two above. */
2198
2199tree
2200compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
2201 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
2202{
2203 /* Set the initial offsets to zero and size to negative to indicate
2204 none has been computed yet. */
2205 access_ref ref;
9a27acc3 2206 tree size = compute_objsize (ptr, nullptr, ostype, &ref, rvals);
2a837de2
MS
2207 if (!size || !ref.base0)
2208 return NULL_TREE;
2209
2210 if (pdecl)
2211 *pdecl = ref.ref;
2212
2213 if (poff)
2214 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
2215
2216 return size;
2217}
1ff4dbdd
MS
2218
2219/* Determine the offset *FLDOFF of the first byte of a struct member
2220 of TYPE (possibly recursively) into which the byte offset OFF points,
2221 starting after the field START_AFTER if it's non-null. On success,
2222 if nonnull, set *FLDOFF to the offset of the first byte, and return
2223 the field decl. If nonnull, set *NEXTOFF to the offset of the next
2224 field (which reflects any padding between the returned field and
2225 the next). Otherwise, if no such member can be found, return null. */
2226
2227tree
2228field_at_offset (tree type, tree start_after, HOST_WIDE_INT off,
2229 HOST_WIDE_INT *fldoff /* = nullptr */,
2230 HOST_WIDE_INT *nextoff /* = nullptr */)
2231{
2232 tree first_fld = TYPE_FIELDS (type);
2233
2234 HOST_WIDE_INT offbuf = 0, nextbuf = 0;
2235 if (!fldoff)
2236 fldoff = &offbuf;
2237 if (!nextoff)
2238 nextoff = &nextbuf;
2239
2240 *nextoff = 0;
2241
2242 /* The field to return. */
2243 tree last_fld = NULL_TREE;
2244 /* The next field to advance to. */
2245 tree next_fld = NULL_TREE;
2246
2247 /* NEXT_FLD's cached offset. */
2248 HOST_WIDE_INT next_pos = -1;
2249
2250 for (tree fld = first_fld; fld; fld = next_fld)
2251 {
2252 next_fld = fld;
2253 do
2254 /* Advance to the next relevant data member. */
2255 next_fld = TREE_CHAIN (next_fld);
2256 while (next_fld
2257 && (TREE_CODE (next_fld) != FIELD_DECL
2258 || DECL_ARTIFICIAL (next_fld)));
2259
2260 if (TREE_CODE (fld) != FIELD_DECL || DECL_ARTIFICIAL (fld))
2261 continue;
2262
2263 if (fld == start_after)
2264 continue;
2265
2266 tree fldtype = TREE_TYPE (fld);
2267 /* The offset of FLD within its immediately enclosing structure. */
2268 HOST_WIDE_INT fldpos = next_pos < 0 ? int_byte_position (fld) : next_pos;
2269
2270 /* If the size is not available the field is a flexible array
2271 member. Treat this case as success. */
2272 tree typesize = TYPE_SIZE_UNIT (fldtype);
2273 HOST_WIDE_INT fldsize = (tree_fits_uhwi_p (typesize)
2274 ? tree_to_uhwi (typesize)
2275 : off);
2276
2277 /* If OFF is beyond the end of the current field continue. */
2278 HOST_WIDE_INT fldend = fldpos + fldsize;
2279 if (fldend < off)
2280 continue;
2281
2282 if (next_fld)
2283 {
2284 /* If OFF is equal to the offset of the next field continue
2285 to it and skip the array/struct business below. */
2286 next_pos = int_byte_position (next_fld);
2287 *nextoff = *fldoff + next_pos;
2288 if (*nextoff == off && TREE_CODE (type) != UNION_TYPE)
2289 continue;
2290 }
2291 else
2292 *nextoff = HOST_WIDE_INT_MAX;
2293
2294 /* OFF refers somewhere into the current field or just past its end,
2295 which could mean it refers to the next field. */
2296 if (TREE_CODE (fldtype) == ARRAY_TYPE)
2297 {
2298 /* Will be set to the offset of the first byte of the array
2299 element (which may be an array) of FLDTYPE into which
2300 OFF - FLDPOS points (which may be past ELTOFF). */
2301 HOST_WIDE_INT eltoff = 0;
2302 if (tree ft = array_elt_at_offset (fldtype, off - fldpos, &eltoff))
2303 fldtype = ft;
2304 else
2305 continue;
2306
2307 /* Advance the position to include the array element above.
2308 If OFF - FLPOS refers to a member of FLDTYPE, the member
2309 will be determined below. */
2310 fldpos += eltoff;
2311 }
2312
2313 *fldoff += fldpos;
2314
2315 if (TREE_CODE (fldtype) == RECORD_TYPE)
2316 /* Drill down into the current field if it's a struct. */
2317 fld = field_at_offset (fldtype, start_after, off - fldpos,
2318 fldoff, nextoff);
2319
2320 last_fld = fld;
2321
2322 /* Unless the offset is just past the end of the field return it.
2323 Otherwise save it and return it only if the offset of the next
2324 next field is greater (i.e., there is padding between the two)
2325 or if there is no next field. */
2326 if (off < fldend)
2327 break;
2328 }
2329
2330 if (*nextoff == HOST_WIDE_INT_MAX && next_fld)
2331 *nextoff = next_pos;
2332
2333 return last_fld;
2334}
2335
2336/* Determine the offset *ELTOFF of the first byte of the array element
2337 of array ARTYPE into which the byte offset OFF points. On success
2338 set *ELTOFF to the offset of the first byte and return type.
2339 Otherwise, if no such element can be found, return null. */
2340
2341tree
2342array_elt_at_offset (tree artype, HOST_WIDE_INT off,
2343 HOST_WIDE_INT *eltoff /* = nullptr */,
2344 HOST_WIDE_INT *subar_size /* = nullptr */)
2345{
2346 gcc_assert (TREE_CODE (artype) == ARRAY_TYPE);
2347
2348 HOST_WIDE_INT dummy;
2349 if (!eltoff)
2350 eltoff = &dummy;
2351 if (!subar_size)
2352 subar_size = &dummy;
2353
2354 tree eltype = artype;
2355 while (TREE_CODE (TREE_TYPE (eltype)) == ARRAY_TYPE)
2356 eltype = TREE_TYPE (eltype);
2357
2358 tree subartype = eltype;
2359 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (eltype))
2360 || TYPE_MODE (TREE_TYPE (eltype)) != TYPE_MODE (char_type_node))
2361 eltype = TREE_TYPE (eltype);
2362
2363 *subar_size = int_size_in_bytes (subartype);
2364
2365 if (eltype == artype)
2366 {
2367 *eltoff = 0;
2368 return artype;
2369 }
2370
2371 HOST_WIDE_INT artype_size = int_size_in_bytes (artype);
2372 HOST_WIDE_INT eltype_size = int_size_in_bytes (eltype);
2373
2374 if (off < artype_size)// * eltype_size)
2375 {
2376 *eltoff = (off / eltype_size) * eltype_size;
2377 return TREE_CODE (eltype) == ARRAY_TYPE ? TREE_TYPE (eltype) : eltype;
2378 }
2379
2380 return NULL_TREE;
2381}
ea9e0d6c
MS
2382
2383/* Wrapper around build_array_type_nelts that makes sure the array
2384 can be created at all and handles zero sized arrays specially. */
2385
2386tree
2387build_printable_array_type (tree eltype, unsigned HOST_WIDE_INT nelts)
2388{
2389 if (TYPE_SIZE_UNIT (eltype)
2390 && TREE_CODE (TYPE_SIZE_UNIT (eltype)) == INTEGER_CST
2391 && !integer_zerop (TYPE_SIZE_UNIT (eltype))
2392 && TYPE_ALIGN_UNIT (eltype) > 1
2393 && wi::zext (wi::to_wide (TYPE_SIZE_UNIT (eltype)),
2394 ffs_hwi (TYPE_ALIGN_UNIT (eltype)) - 1) != 0)
2395 eltype = TYPE_MAIN_VARIANT (eltype);
2396
2397 /* Consider excessive NELTS an array of unknown bound. */
2398 tree idxtype = NULL_TREE;
2399 if (nelts < HOST_WIDE_INT_MAX)
2400 {
2401 if (nelts)
2402 return build_array_type_nelts (eltype, nelts);
2403 idxtype = build_range_type (sizetype, size_zero_node, NULL_TREE);
2404 }
2405
2406 tree arrtype = build_array_type (eltype, idxtype);
2407 arrtype = build_distinct_type_copy (TYPE_MAIN_VARIANT (arrtype));
2408 TYPE_SIZE (arrtype) = bitsize_zero_node;
2409 TYPE_SIZE_UNIT (arrtype) = size_zero_node;
2410 return arrtype;
2411}