]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/pointer-query.cc
Don't build readline/libreadline.a, when --with-system-readline is supplied
[thirdparty/gcc.git] / gcc / pointer-query.cc
CommitLineData
2a837de2
MS
1/* Definitions of the pointer_query and related classes.
2
7adcbafe 3 Copyright (C) 2020-2022 Free Software Foundation, Inc.
2a837de2
MS
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
2a837de2 25#include "tree.h"
2a837de2 26#include "gimple.h"
2a837de2
MS
27#include "stringpool.h"
28#include "tree-vrp.h"
2a837de2 29#include "diagnostic-core.h"
2a837de2 30#include "fold-const.h"
2a837de2
MS
31#include "tree-object-size.h"
32#include "tree-ssa-strlen.h"
2a837de2 33#include "langhooks.h"
2a837de2
MS
34#include "stringpool.h"
35#include "attribs.h"
ba206889 36#include "gimple-iterator.h"
2a837de2 37#include "gimple-fold.h"
ece28da9 38#include "gimple-ssa.h"
2a837de2 39#include "intl.h"
2a837de2
MS
40#include "attr-fnspec.h"
41#include "gimple-range.h"
2a837de2 42#include "pointer-query.h"
ece28da9
MS
43#include "tree-pretty-print.h"
44#include "tree-ssanames.h"
54fa5567 45#include "target.h"
2a837de2 46
1334d889 47static bool compute_objsize_r (tree, gimple *, bool, int, access_ref *,
9a27acc3 48 ssa_name_limit_t &, pointer_query *);
2a837de2
MS
49
50/* Wrapper around the wide_int overload of get_range that accepts
51 offset_int instead. For middle end expressions returns the same
52 result. For a subset of nonconstamt expressions emitted by the front
53 end determines a more precise range than would be possible otherwise. */
54
55static bool
56get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
57{
58 offset_int add = 0;
59 if (TREE_CODE (x) == PLUS_EXPR)
60 {
61 /* Handle constant offsets in pointer addition expressions seen
62 n the front end IL. */
63 tree op = TREE_OPERAND (x, 1);
64 if (TREE_CODE (op) == INTEGER_CST)
65 {
66 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
67 add = wi::to_offset (op);
68 x = TREE_OPERAND (x, 0);
69 }
70 }
71
72 if (TREE_CODE (x) == NOP_EXPR)
73 /* Also handle conversions to sizetype seen in the front end IL. */
74 x = TREE_OPERAND (x, 0);
75
76 tree type = TREE_TYPE (x);
77 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
78 return false;
79
80 if (TREE_CODE (x) != INTEGER_CST
81 && TREE_CODE (x) != SSA_NAME)
82 {
83 if (TYPE_UNSIGNED (type)
84 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
85 type = signed_type_for (type);
86
87 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
88 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
89 return x;
90 }
91
92 wide_int wr[2];
93 if (!get_range (x, stmt, wr, rvals))
94 return false;
95
96 signop sgn = SIGNED;
97 /* Only convert signed integers or unsigned sizetype to a signed
98 offset and avoid converting large positive values in narrower
99 types to negative offsets. */
100 if (TYPE_UNSIGNED (type)
101 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
102 sgn = UNSIGNED;
103
104 r[0] = offset_int::from (wr[0], sgn);
105 r[1] = offset_int::from (wr[1], sgn);
106 return true;
107}
108
109/* Return the argument that the call STMT to a built-in function returns
110 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
111 from the argument reflected in the value returned by the built-in if it
112 can be determined, otherwise to 0 and HWI_M1U respectively. Set
113 *PAST_END for functions like mempcpy that might return a past the end
114 pointer (most functions return a dereferenceable pointer to an existing
115 element of an array). */
116
117static tree
118gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end,
9a27acc3 119 ssa_name_limit_t &snlim, pointer_query *qry)
2a837de2
MS
120{
121 /* Clear and set below for the rare function(s) that might return
122 a past-the-end pointer. */
123 *past_end = false;
124
125 {
126 /* Check for attribute fn spec to see if the function returns one
127 of its arguments. */
128 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
129 unsigned int argno;
130 if (fnspec.returns_arg (&argno))
131 {
132 /* Functions return the first argument (not a range). */
133 offrng[0] = offrng[1] = 0;
134 return gimple_call_arg (stmt, argno);
135 }
136 }
137
138 if (gimple_call_num_args (stmt) < 1)
139 return NULL_TREE;
140
141 tree fn = gimple_call_fndecl (stmt);
142 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
143 {
144 /* See if this is a call to placement new. */
145 if (!fn
146 || !DECL_IS_OPERATOR_NEW_P (fn)
147 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
148 return NULL_TREE;
149
150 /* Check the mangling, keeping in mind that operator new takes
151 a size_t which could be unsigned int or unsigned long. */
152 tree fname = DECL_ASSEMBLER_NAME (fn);
153 if (!id_equal (fname, "_ZnwjPv") // ordinary form
154 && !id_equal (fname, "_ZnwmPv") // ordinary form
155 && !id_equal (fname, "_ZnajPv") // array form
156 && !id_equal (fname, "_ZnamPv")) // array form
157 return NULL_TREE;
158
159 if (gimple_call_num_args (stmt) != 2)
160 return NULL_TREE;
161
162 /* Allocation functions return a pointer to the beginning. */
163 offrng[0] = offrng[1] = 0;
164 return gimple_call_arg (stmt, 1);
165 }
166
167 switch (DECL_FUNCTION_CODE (fn))
168 {
169 case BUILT_IN_MEMCPY:
170 case BUILT_IN_MEMCPY_CHK:
171 case BUILT_IN_MEMMOVE:
172 case BUILT_IN_MEMMOVE_CHK:
173 case BUILT_IN_MEMSET:
174 case BUILT_IN_STRCAT:
175 case BUILT_IN_STRCAT_CHK:
176 case BUILT_IN_STRCPY:
177 case BUILT_IN_STRCPY_CHK:
178 case BUILT_IN_STRNCAT:
179 case BUILT_IN_STRNCAT_CHK:
180 case BUILT_IN_STRNCPY:
181 case BUILT_IN_STRNCPY_CHK:
182 /* Functions return the first argument (not a range). */
183 offrng[0] = offrng[1] = 0;
184 return gimple_call_arg (stmt, 0);
185
186 case BUILT_IN_MEMPCPY:
187 case BUILT_IN_MEMPCPY_CHK:
188 {
189 /* The returned pointer is in a range constrained by the smaller
190 of the upper bound of the size argument and the source object
191 size. */
192 offrng[0] = 0;
193 offrng[1] = HOST_WIDE_INT_M1U;
194 tree off = gimple_call_arg (stmt, 2);
9a27acc3 195 bool off_valid = get_offset_range (off, stmt, offrng, qry->rvals);
2a837de2
MS
196 if (!off_valid || offrng[0] != offrng[1])
197 {
198 /* If the offset is either indeterminate or in some range,
199 try to constrain its upper bound to at most the size
200 of the source object. */
201 access_ref aref;
202 tree src = gimple_call_arg (stmt, 1);
1334d889 203 if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry)
2a837de2
MS
204 && aref.sizrng[1] < offrng[1])
205 offrng[1] = aref.sizrng[1];
206 }
207
208 /* Mempcpy may return a past-the-end pointer. */
209 *past_end = true;
210 return gimple_call_arg (stmt, 0);
211 }
212
213 case BUILT_IN_MEMCHR:
214 {
215 tree off = gimple_call_arg (stmt, 2);
9a27acc3 216 if (get_offset_range (off, stmt, offrng, qry->rvals))
2a837de2
MS
217 offrng[1] -= 1;
218 else
219 offrng[1] = HOST_WIDE_INT_M1U;
220
221 offrng[0] = 0;
222 return gimple_call_arg (stmt, 0);
223 }
224
225 case BUILT_IN_STRCHR:
226 case BUILT_IN_STRRCHR:
227 case BUILT_IN_STRSTR:
228 offrng[0] = 0;
229 offrng[1] = HOST_WIDE_INT_M1U;
230 return gimple_call_arg (stmt, 0);
231
232 case BUILT_IN_STPCPY:
233 case BUILT_IN_STPCPY_CHK:
234 {
235 access_ref aref;
236 tree src = gimple_call_arg (stmt, 1);
1334d889 237 if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry))
2a837de2
MS
238 offrng[1] = aref.sizrng[1] - 1;
239 else
240 offrng[1] = HOST_WIDE_INT_M1U;
241
242 offrng[0] = 0;
243 return gimple_call_arg (stmt, 0);
244 }
245
246 case BUILT_IN_STPNCPY:
247 case BUILT_IN_STPNCPY_CHK:
248 {
249 /* The returned pointer is in a range between the first argument
250 and it plus the smaller of the upper bound of the size argument
251 and the source object size. */
252 offrng[1] = HOST_WIDE_INT_M1U;
253 tree off = gimple_call_arg (stmt, 2);
9a27acc3 254 if (!get_offset_range (off, stmt, offrng, qry->rvals)
2a837de2
MS
255 || offrng[0] != offrng[1])
256 {
257 /* If the offset is either indeterminate or in some range,
258 try to constrain its upper bound to at most the size
259 of the source object. */
260 access_ref aref;
261 tree src = gimple_call_arg (stmt, 1);
1334d889 262 if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry)
2a837de2
MS
263 && aref.sizrng[1] < offrng[1])
264 offrng[1] = aref.sizrng[1];
265 }
266
267 /* When the source is the empty string the returned pointer is
268 a copy of the argument. Otherwise stpcpy can also return
269 a past-the-end pointer. */
270 offrng[0] = 0;
271 *past_end = true;
272 return gimple_call_arg (stmt, 0);
273 }
274
275 default:
276 break;
277 }
278
279 return NULL_TREE;
280}
281
b48d4e68
MS
282/* Return true when EXP's range can be determined and set RANGE[] to it
283 after adjusting it if necessary to make EXP a represents a valid size
284 of object, or a valid size argument to an allocation function declared
285 with attribute alloc_size (whose argument may be signed), or to a string
286 manipulation function like memset.
287 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
288 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
289 a (nearly) invalid argument to allocation functions like malloc but it
290 is a valid argument to functions like memset.
291 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
292 in a multi-range, otherwise to the smallest valid subrange. */
293
294bool
295get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
296 int flags /* = 0 */)
297{
298 if (!exp)
299 return false;
300
301 if (tree_fits_uhwi_p (exp))
302 {
303 /* EXP is a constant. */
304 range[0] = range[1] = exp;
305 return true;
306 }
307
308 tree exptype = TREE_TYPE (exp);
309 bool integral = INTEGRAL_TYPE_P (exptype);
310
311 wide_int min, max;
312 enum value_range_kind range_type;
313
314 if (!query)
ece28da9 315 query = get_range_query (cfun);
b48d4e68
MS
316
317 if (integral)
318 {
319 value_range vr;
320
321 query->range_of_expr (vr, exp, stmt);
322
323 if (vr.undefined_p ())
324 vr.set_varying (TREE_TYPE (exp));
325 range_type = vr.kind ();
326 min = wi::to_wide (vr.min ());
327 max = wi::to_wide (vr.max ());
328 }
329 else
330 range_type = VR_VARYING;
331
332 if (range_type == VR_VARYING)
333 {
334 if (integral)
335 {
336 /* Use the full range of the type of the expression when
337 no value range information is available. */
338 range[0] = TYPE_MIN_VALUE (exptype);
339 range[1] = TYPE_MAX_VALUE (exptype);
340 return true;
341 }
342
343 range[0] = NULL_TREE;
344 range[1] = NULL_TREE;
345 return false;
346 }
347
348 unsigned expprec = TYPE_PRECISION (exptype);
349
350 bool signed_p = !TYPE_UNSIGNED (exptype);
351
352 if (range_type == VR_ANTI_RANGE)
353 {
354 if (signed_p)
355 {
356 if (wi::les_p (max, 0))
357 {
358 /* EXP is not in a strictly negative range. That means
359 it must be in some (not necessarily strictly) positive
360 range which includes zero. Since in signed to unsigned
361 conversions negative values end up converted to large
362 positive values, and otherwise they are not valid sizes,
363 the resulting range is in both cases [0, TYPE_MAX]. */
364 min = wi::zero (expprec);
365 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
366 }
367 else if (wi::les_p (min - 1, 0))
368 {
369 /* EXP is not in a negative-positive range. That means EXP
370 is either negative, or greater than max. Since negative
371 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
372 min = max + 1;
373 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
374 }
375 else
376 {
377 max = min - 1;
378 min = wi::zero (expprec);
379 }
380 }
381 else
382 {
383 wide_int maxsize = wi::to_wide (max_object_size ());
384 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
385 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
386 if (wi::eq_p (0, min - 1))
387 {
388 /* EXP is unsigned and not in the range [1, MAX]. That means
389 it's either zero or greater than MAX. Even though 0 would
390 normally be detected by -Walloc-zero, unless ALLOW_ZERO
391 is set, set the range to [MAX, TYPE_MAX] so that when MAX
392 is greater than the limit the whole range is diagnosed. */
393 wide_int maxsize = wi::to_wide (max_object_size ());
394 if (flags & SR_ALLOW_ZERO)
395 {
396 if (wi::leu_p (maxsize, max + 1)
397 || !(flags & SR_USE_LARGEST))
398 min = max = wi::zero (expprec);
399 else
400 {
401 min = max + 1;
402 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
403 }
404 }
405 else
406 {
407 min = max + 1;
408 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
409 }
410 }
411 else if ((flags & SR_USE_LARGEST)
412 && wi::ltu_p (max + 1, maxsize))
413 {
414 /* When USE_LARGEST is set and the larger of the two subranges
415 is a valid size, use it... */
416 min = max + 1;
417 max = maxsize;
418 }
419 else
420 {
421 /* ...otherwise use the smaller subrange. */
422 max = min - 1;
423 min = wi::zero (expprec);
424 }
425 }
426 }
427
428 range[0] = wide_int_to_tree (exptype, min);
429 range[1] = wide_int_to_tree (exptype, max);
430
431 return true;
432}
433
434bool
435get_size_range (tree exp, tree range[2], int flags /* = 0 */)
436{
437 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
438}
439
2a837de2
MS
440/* If STMT is a call to an allocation function, returns the constant
441 maximum size of the object allocated by the call represented as
442 sizetype. If nonnull, sets RNG1[] to the range of the size.
443 When nonnull, uses RVALS for range information, otherwise gets global
444 range info.
445 Returns null when STMT is not a call to a valid allocation function. */
446
447tree
448gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
9a27acc3 449 range_query *qry /* = NULL */)
2a837de2
MS
450{
451 if (!stmt || !is_gimple_call (stmt))
452 return NULL_TREE;
453
454 tree allocfntype;
455 if (tree fndecl = gimple_call_fndecl (stmt))
456 allocfntype = TREE_TYPE (fndecl);
457 else
458 allocfntype = gimple_call_fntype (stmt);
459
460 if (!allocfntype)
461 return NULL_TREE;
462
463 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
464 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
465 if (!at)
466 {
467 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
468 return NULL_TREE;
469
470 argidx1 = 0;
471 }
472
473 unsigned nargs = gimple_call_num_args (stmt);
474
475 if (argidx1 == UINT_MAX)
476 {
477 tree atval = TREE_VALUE (at);
478 if (!atval)
479 return NULL_TREE;
480
481 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
482 if (nargs <= argidx1)
483 return NULL_TREE;
484
485 atval = TREE_CHAIN (atval);
486 if (atval)
487 {
488 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
489 if (nargs <= argidx2)
490 return NULL_TREE;
491 }
492 }
493
494 tree size = gimple_call_arg (stmt, argidx1);
495
496 wide_int rng1_buf[2];
497 /* If RNG1 is not set, use the buffer. */
498 if (!rng1)
499 rng1 = rng1_buf;
500
501 /* Use maximum precision to avoid overflow below. */
502 const int prec = ADDR_MAX_PRECISION;
503
504 {
505 tree r[2];
506 /* Determine the largest valid range size, including zero. */
9a27acc3 507 if (!get_size_range (qry, size, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
2a837de2
MS
508 return NULL_TREE;
509 rng1[0] = wi::to_wide (r[0], prec);
510 rng1[1] = wi::to_wide (r[1], prec);
511 }
512
513 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
514 return fold_convert (sizetype, size);
515
516 /* To handle ranges do the math in wide_int and return the product
517 of the upper bounds as a constant. Ignore anti-ranges. */
518 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
519 wide_int rng2[2];
520 {
521 tree r[2];
522 /* As above, use the full non-negative range on failure. */
9a27acc3 523 if (!get_size_range (qry, n, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
2a837de2
MS
524 return NULL_TREE;
525 rng2[0] = wi::to_wide (r[0], prec);
526 rng2[1] = wi::to_wide (r[1], prec);
527 }
528
529 /* Compute products of both bounds for the caller but return the lesser
530 of SIZE_MAX and the product of the upper bounds as a constant. */
531 rng1[0] = rng1[0] * rng2[0];
532 rng1[1] = rng1[1] * rng2[1];
533
534 const tree size_max = TYPE_MAX_VALUE (sizetype);
535 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
536 {
537 rng1[1] = wi::to_wide (size_max, prec);
538 return size_max;
539 }
540
541 return wide_int_to_tree (sizetype, rng1[1]);
542}
543
544/* For an access to an object referenced to by the function parameter PTR
545 of pointer type, and set RNG[] to the range of sizes of the object
546 obtainedfrom the attribute access specification for the current function.
547 Set STATIC_ARRAY if the array parameter has been declared [static].
548 Return the function parameter on success and null otherwise. */
549
9a27acc3 550static tree
2a837de2
MS
551gimple_parm_array_size (tree ptr, wide_int rng[2],
552 bool *static_array /* = NULL */)
553{
554 /* For a function argument try to determine the byte size of the array
555 from the current function declaratation (e.g., attribute access or
556 related). */
557 tree var = SSA_NAME_VAR (ptr);
3b4daa0b 558 if (TREE_CODE (var) != PARM_DECL || !POINTER_TYPE_P (TREE_TYPE (var)))
2a837de2
MS
559 return NULL_TREE;
560
561 const unsigned prec = TYPE_PRECISION (sizetype);
562
563 rdwr_map rdwr_idx;
564 attr_access *access = get_parm_access (rdwr_idx, var);
565 if (!access)
566 return NULL_TREE;
567
568 if (access->sizarg != UINT_MAX)
569 {
570 /* TODO: Try to extract the range from the argument based on
571 those of subsequent assertions or based on known calls to
572 the current function. */
573 return NULL_TREE;
574 }
575
576 if (!access->minsize)
577 return NULL_TREE;
578
579 /* Only consider ordinary array bound at level 2 (or above if it's
580 ever added). */
581 if (warn_array_parameter < 2 && !access->static_p)
582 return NULL_TREE;
583
584 if (static_array)
585 *static_array = access->static_p;
586
587 rng[0] = wi::zero (prec);
588 rng[1] = wi::uhwi (access->minsize, prec);
589 /* Multiply the array bound encoded in the attribute by the size
590 of what the pointer argument to which it decays points to. */
591 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
592 tree size = TYPE_SIZE_UNIT (eltype);
593 if (!size || TREE_CODE (size) != INTEGER_CST)
594 return NULL_TREE;
595
596 rng[1] *= wi::to_wide (size, prec);
597 return var;
598}
599
f9379fcb
MS
600/* Initialize the object. */
601
602access_ref::access_ref ()
9a27acc3
MS
603 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
604 base0 (true), parmarray ()
2a837de2
MS
605{
606 /* Set to valid. */
607 offrng[0] = offrng[1] = 0;
608 offmax[0] = offmax[1] = 0;
609 /* Invalidate. */
610 sizrng[0] = sizrng[1] = -1;
2a837de2
MS
611}
612
613/* Return the PHI node REF refers to or null if it doesn't. */
614
615gphi *
616access_ref::phi () const
617{
618 if (!ref || TREE_CODE (ref) != SSA_NAME)
619 return NULL;
620
621 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
ece28da9 622 if (!def_stmt || gimple_code (def_stmt) != GIMPLE_PHI)
2a837de2
MS
623 return NULL;
624
625 return as_a <gphi *> (def_stmt);
626}
627
10d185b9
MS
628/* Determine the size and offset for ARG, append it to ALL_REFS, and
629 merge the result with *THIS. Ignore ARG if SKIP_NULL is set and
630 ARG refers to the null pointer. Return true on success and false
631 on failure. */
632
58ec0964 633void
10d185b9
MS
634access_ref::merge_ref (vec<access_ref> *all_refs, tree arg, gimple *stmt,
635 int ostype, bool skip_null,
636 ssa_name_limit_t &snlim, pointer_query &qry)
637{
638 access_ref aref;
1334d889 639 if (!compute_objsize_r (arg, stmt, false, ostype, &aref, snlim, &qry)
10d185b9 640 || aref.sizrng[0] < 0)
58ec0964
MS
641 {
642 /* This may be a PHI with all null pointer arguments. Handle it
643 conservatively by setting all properties to the most permissive
644 values. */
645 base0 = false;
646 offrng[0] = offrng[1] = 0;
647 add_max_offset ();
648 set_max_size_range ();
649 return;
650 }
10d185b9
MS
651
652 if (all_refs)
653 {
654 access_ref dummy_ref;
655 aref.get_ref (all_refs, &dummy_ref, ostype, &snlim, &qry);
656 }
657
658 if (TREE_CODE (arg) == SSA_NAME)
659 qry.put_ref (arg, aref, ostype);
660
661 if (all_refs)
662 all_refs->safe_push (aref);
663
664 aref.deref += deref;
665
666 bool merged_parmarray = aref.parmarray;
667
668 const bool nullp = skip_null && integer_zerop (arg);
669 const offset_int maxobjsize = wi::to_offset (max_object_size ());
670 offset_int minsize = sizrng[0];
671
672 if (sizrng[0] < 0)
673 {
674 /* If *THIS doesn't contain a meaningful result yet set it to AREF
675 unless the argument is null and it's okay to ignore it. */
676 if (!nullp)
677 *this = aref;
678
679 /* Set if the current argument refers to one or more objects of
680 known size (or range of sizes), as opposed to referring to
681 one or more unknown object(s). */
682 const bool arg_known_size = (aref.sizrng[0] != 0
683 || aref.sizrng[1] != maxobjsize);
684 if (arg_known_size)
685 sizrng[0] = aref.sizrng[0];
686
58ec0964 687 return;
10d185b9
MS
688 }
689
690 /* Disregard null pointers in PHIs with two or more arguments.
691 TODO: Handle this better! */
692 if (nullp)
58ec0964 693 return;
10d185b9
MS
694
695 const bool known_size = (sizrng[0] != 0 || sizrng[1] != maxobjsize);
696
697 if (known_size && aref.sizrng[0] < minsize)
698 minsize = aref.sizrng[0];
699
243a9804
MS
700 /* Extend the size and offset of *THIS to account for AREF. The result
701 can be cached but results in false negatives. */
10d185b9 702
243a9804
MS
703 offset_int orng[2];
704 if (sizrng[1] < aref.sizrng[1])
705 {
706 orng[0] = offrng[0];
707 orng[1] = offrng[1];
708 *this = aref;
709 }
710 else
711 {
712 orng[0] = aref.offrng[0];
713 orng[1] = aref.offrng[1];
714 }
715
716 if (orng[0] < offrng[0])
717 offrng[0] = orng[0];
718 if (offrng[1] < orng[1])
719 offrng[1] = orng[1];
10d185b9
MS
720
721 /* Reset the PHI's BASE0 flag if any of the nonnull arguments
722 refers to an object at an unknown offset. */
723 if (!aref.base0)
724 base0 = false;
725
10d185b9
MS
726 sizrng[0] = minsize;
727 parmarray = merged_parmarray;
728
58ec0964 729 return;
10d185b9
MS
730}
731
820f0940
MS
732/* Determine and return the largest object to which *THIS refers. If
733 *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
734 of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
735 argument ARG. */
2a837de2
MS
736
737tree
738access_ref::get_ref (vec<access_ref> *all_refs,
739 access_ref *pref /* = NULL */,
740 int ostype /* = 1 */,
741 ssa_name_limit_t *psnlim /* = NULL */,
742 pointer_query *qry /* = NULL */) const
743{
10d185b9
MS
744 if (!ref || TREE_CODE (ref) != SSA_NAME)
745 return NULL;
2a837de2
MS
746
747 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
748 cause unbounded recursion. */
749 ssa_name_limit_t snlim_buf;
750 if (!psnlim)
751 psnlim = &snlim_buf;
752
2a837de2
MS
753 pointer_query empty_qry;
754 if (!qry)
755 qry = &empty_qry;
756
10d185b9
MS
757 if (gimple *def_stmt = SSA_NAME_DEF_STMT (ref))
758 {
759 if (is_gimple_assign (def_stmt))
760 {
761 tree_code code = gimple_assign_rhs_code (def_stmt);
762 if (code != MIN_EXPR && code != MAX_EXPR)
763 return NULL_TREE;
764
765 access_ref aref;
766 tree arg1 = gimple_assign_rhs1 (def_stmt);
58ec0964
MS
767 aref.merge_ref (all_refs, arg1, def_stmt, ostype, false,
768 *psnlim, *qry);
10d185b9
MS
769
770 tree arg2 = gimple_assign_rhs2 (def_stmt);
58ec0964
MS
771 aref.merge_ref (all_refs, arg2, def_stmt, ostype, false,
772 *psnlim, *qry);
10d185b9
MS
773
774 if (pref && pref != this)
775 {
776 tree ref = pref->ref;
777 *pref = aref;
778 pref->ref = ref;
779 }
780
781 return aref.ref;
782 }
783 }
784 else
785 return NULL_TREE;
786
787 gphi *phi_stmt = this->phi ();
788 if (!phi_stmt)
789 return ref;
790
791 if (!psnlim->visit_phi (ref))
792 return NULL_TREE;
793
820f0940
MS
794 /* The conservative result of the PHI reflecting the offset and size
795 of the largest PHI argument, regardless of whether or not they all
796 refer to the same object. */
2a837de2
MS
797 access_ref phi_ref;
798 if (pref)
799 {
820f0940
MS
800 /* The identity of the object has not been determined yet but
801 PREF->REF is set by the caller to the PHI for convenience.
802 The size is negative/invalid and the offset is zero (it's
803 updated only after the identity of the object has been
804 established). */
805 gcc_assert (pref->sizrng[0] < 0);
806 gcc_assert (pref->offrng[0] == 0 && pref->offrng[1] == 0);
807
2a837de2 808 phi_ref = *pref;
2a837de2
MS
809 }
810
58ec0964 811 const offset_int maxobjsize = wi::to_offset (max_object_size ());
2a837de2
MS
812 const unsigned nargs = gimple_phi_num_args (phi_stmt);
813 for (unsigned i = 0; i < nargs; ++i)
814 {
815 access_ref phi_arg_ref;
10d185b9 816 bool skip_null = i || i + 1 < nargs;
2a837de2 817 tree arg = gimple_phi_arg_def (phi_stmt, i);
58ec0964
MS
818 phi_ref.merge_ref (all_refs, arg, phi_stmt, ostype, skip_null,
819 *psnlim, *qry);
820
821 if (!phi_ref.base0
822 && phi_ref.sizrng[0] == 0
823 && phi_ref.sizrng[1] >= maxobjsize)
824 /* When an argument results in the most permissive result,
825 the remaining arguments cannot constrain it. Short-circuit
826 the evaluation. */
827 break;
2a837de2
MS
828 }
829
2a837de2
MS
830 if (phi_ref.sizrng[0] < 0)
831 {
832 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
833 (perhaps because they have all been already visited by prior
834 recursive calls). */
835 psnlim->leave_phi (ref);
836 return NULL_TREE;
837 }
838
839 /* Avoid changing *THIS. */
840 if (pref && pref != this)
10d185b9
MS
841 {
842 /* Keep the SSA_NAME of the PHI unchanged so that all PHI arguments
843 can be referred to later if necessary. This is useful even if
844 they all refer to the same object. */
845 tree ref = pref->ref;
846 *pref = phi_ref;
847 pref->ref = ref;
848 }
2a837de2
MS
849
850 psnlim->leave_phi (ref);
851
852 return phi_ref.ref;
853}
854
855/* Return the maximum amount of space remaining and if non-null, set
856 argument to the minimum. */
857
858offset_int
859access_ref::size_remaining (offset_int *pmin /* = NULL */) const
860{
861 offset_int minbuf;
862 if (!pmin)
863 pmin = &minbuf;
864
820f0940
MS
865 if (sizrng[0] < 0)
866 {
867 /* If the identity of the object hasn't been determined return
868 the maximum size range. */
869 *pmin = 0;
870 return wi::to_offset (max_object_size ());
871 }
872
2a837de2
MS
873 /* add_offset() ensures the offset range isn't inverted. */
874 gcc_checking_assert (offrng[0] <= offrng[1]);
875
876 if (base0)
877 {
878 /* The offset into referenced object is zero-based (i.e., it's
879 not referenced by a pointer into middle of some unknown object). */
880 if (offrng[0] < 0 && offrng[1] < 0)
881 {
882 /* If the offset is negative the remaining size is zero. */
883 *pmin = 0;
884 return 0;
885 }
886
887 if (sizrng[1] <= offrng[0])
888 {
889 /* If the starting offset is greater than or equal to the upper
890 bound on the size of the object, the space remaining is zero.
891 As a special case, if it's equal, set *PMIN to -1 to let
892 the caller know the offset is valid and just past the end. */
893 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
894 return 0;
895 }
896
897 /* Otherwise return the size minus the lower bound of the offset. */
898 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
899
900 *pmin = sizrng[0] - or0;
901 return sizrng[1] - or0;
902 }
903
904 /* The offset to the referenced object isn't zero-based (i.e., it may
905 refer to a byte other than the first. The size of such an object
906 is constrained only by the size of the address space (the result
907 of max_object_size()). */
908 if (sizrng[1] <= offrng[0])
909 {
910 *pmin = 0;
911 return 0;
912 }
913
914 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
915
916 *pmin = sizrng[0] - or0;
917 return sizrng[1] - or0;
918}
919
920/* Return true if the offset and object size are in range for SIZE. */
921
922bool
923access_ref::offset_in_range (const offset_int &size) const
924{
925 if (size_remaining () < size)
926 return false;
927
928 if (base0)
929 return offmax[0] >= 0 && offmax[1] <= sizrng[1];
930
931 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
932 return offmax[0] > -maxoff && offmax[1] < maxoff;
933}
934
935/* Add the range [MIN, MAX] to the offset range. For known objects (with
936 zero-based offsets) at least one of whose offset's bounds is in range,
937 constrain the other (or both) to the bounds of the object (i.e., zero
938 and the upper bound of its size). This improves the quality of
939 diagnostics. */
940
941void access_ref::add_offset (const offset_int &min, const offset_int &max)
942{
943 if (min <= max)
944 {
945 /* To add an ordinary range just add it to the bounds. */
946 offrng[0] += min;
947 offrng[1] += max;
948 }
949 else if (!base0)
950 {
951 /* To add an inverted range to an offset to an unknown object
952 expand it to the maximum. */
953 add_max_offset ();
954 return;
955 }
956 else
957 {
958 /* To add an inverted range to an offset to an known object set
959 the upper bound to the maximum representable offset value
960 (which may be greater than MAX_OBJECT_SIZE).
961 The lower bound is either the sum of the current offset and
962 MIN when abs(MAX) is greater than the former, or zero otherwise.
027e3041 963 Zero because then the inverted range includes the negative of
2a837de2
MS
964 the lower bound. */
965 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
966 offrng[1] = maxoff;
967
968 if (max >= 0)
969 {
970 offrng[0] = 0;
971 if (offmax[0] > 0)
972 offmax[0] = 0;
973 return;
974 }
975
976 offset_int absmax = wi::abs (max);
977 if (offrng[0] < absmax)
978 {
979 offrng[0] += min;
980 /* Cap the lower bound at the upper (set to MAXOFF above)
981 to avoid inadvertently recreating an inverted range. */
982 if (offrng[1] < offrng[0])
983 offrng[0] = offrng[1];
984 }
985 else
986 offrng[0] = 0;
987 }
988
989 /* Set the minimum and maximmum computed so far. */
990 if (offrng[1] < 0 && offrng[1] < offmax[0])
991 offmax[0] = offrng[1];
992 if (offrng[0] > 0 && offrng[0] > offmax[1])
993 offmax[1] = offrng[0];
994
995 if (!base0)
996 return;
997
998 /* When referencing a known object check to see if the offset computed
999 so far is in bounds... */
1000 offset_int remrng[2];
1001 remrng[1] = size_remaining (remrng);
1002 if (remrng[1] > 0 || remrng[0] < 0)
1003 {
1004 /* ...if so, constrain it so that neither bound exceeds the size of
1005 the object. Out of bounds offsets are left unchanged, and, for
1006 better or worse, become in bounds later. They should be detected
1007 and diagnosed at the point they first become invalid by
1008 -Warray-bounds. */
1009 if (offrng[0] < 0)
1010 offrng[0] = 0;
1011 if (offrng[1] > sizrng[1])
1012 offrng[1] = sizrng[1];
1013 }
1014}
1015
1016/* Issue one inform message describing each target of an access REF.
1017 WRITE is set for a write access and clear for a read access. */
1018
1019void
1334d889 1020access_ref::inform_access (access_mode mode, int ostype /* = 1 */) const
2a837de2
MS
1021{
1022 const access_ref &aref = *this;
1023 if (!aref.ref)
1024 return;
1025
1334d889 1026 if (phi ())
2a837de2
MS
1027 {
1028 /* Set MAXREF to refer to the largest object and fill ALL_REFS
1029 with data for all objects referenced by the PHI arguments. */
1030 access_ref maxref;
1031 auto_vec<access_ref> all_refs;
1334d889 1032 if (!get_ref (&all_refs, &maxref, ostype))
2a837de2
MS
1033 return;
1034
1334d889 1035 if (all_refs.length ())
2a837de2 1036 {
1334d889
MS
1037 /* Except for MAXREF, the rest of the arguments' offsets need not
1038 reflect one added to the PHI itself. Determine the latter from
1039 MAXREF on which the result is based. */
1040 const offset_int orng[] =
1041 {
1042 offrng[0] - maxref.offrng[0],
1043 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
1044 };
2a837de2 1045
1334d889
MS
1046 /* Add the final PHI's offset to that of each of the arguments
1047 and recurse to issue an inform message for it. */
1048 for (unsigned i = 0; i != all_refs.length (); ++i)
1049 {
1050 /* Skip any PHIs; those could lead to infinite recursion. */
1051 if (all_refs[i].phi ())
1052 continue;
2a837de2 1053
1334d889
MS
1054 all_refs[i].add_offset (orng[0], orng[1]);
1055 all_refs[i].inform_access (mode, ostype);
1056 }
1057 return;
2a837de2 1058 }
2a837de2
MS
1059 }
1060
1061 /* Convert offset range and avoid including a zero range since it
1062 isn't necessarily meaningful. */
1063 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
1064 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1065 HOST_WIDE_INT minoff;
1066 HOST_WIDE_INT maxoff = diff_max;
1067 if (wi::fits_shwi_p (aref.offrng[0]))
1068 minoff = aref.offrng[0].to_shwi ();
1069 else
1070 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
1071
1072 if (wi::fits_shwi_p (aref.offrng[1]))
1073 maxoff = aref.offrng[1].to_shwi ();
1074
1075 if (maxoff <= diff_min || maxoff >= diff_max)
1076 /* Avoid mentioning an upper bound that's equal to or in excess
1077 of the maximum of ptrdiff_t. */
1078 maxoff = minoff;
1079
1080 /* Convert size range and always include it since all sizes are
1081 meaningful. */
1082 unsigned long long minsize = 0, maxsize = 0;
1083 if (wi::fits_shwi_p (aref.sizrng[0])
1084 && wi::fits_shwi_p (aref.sizrng[1]))
1085 {
1086 minsize = aref.sizrng[0].to_shwi ();
1087 maxsize = aref.sizrng[1].to_shwi ();
1088 }
1089
1090 /* SIZRNG doesn't necessarily have the same range as the allocation
1091 size determined by gimple_call_alloc_size (). */
1092 char sizestr[80];
1093 if (minsize == maxsize)
1094 sprintf (sizestr, "%llu", minsize);
1095 else
1096 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
1097
1098 char offstr[80];
1099 if (minoff == 0
1100 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
1101 offstr[0] = '\0';
1102 else if (minoff == maxoff)
1103 sprintf (offstr, "%lli", (long long) minoff);
1104 else
1105 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
1106
1107 location_t loc = UNKNOWN_LOCATION;
1108
1109 tree ref = this->ref;
1110 tree allocfn = NULL_TREE;
1111 if (TREE_CODE (ref) == SSA_NAME)
1112 {
1113 gimple *stmt = SSA_NAME_DEF_STMT (ref);
ece28da9
MS
1114 if (!stmt)
1115 return;
1116
2a837de2
MS
1117 if (is_gimple_call (stmt))
1118 {
1119 loc = gimple_location (stmt);
1120 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1121 {
1122 /* Strip the SSA_NAME suffix from the variable name and
1123 recreate an identifier with the VLA's original name. */
1124 ref = gimple_call_lhs (stmt);
1125 if (SSA_NAME_IDENTIFIER (ref))
1126 {
1127 ref = SSA_NAME_IDENTIFIER (ref);
1128 const char *id = IDENTIFIER_POINTER (ref);
1129 size_t len = strcspn (id, ".$");
1130 if (!len)
1131 len = strlen (id);
1132 ref = get_identifier_with_length (id, len);
1133 }
1134 }
1135 else
1136 {
1137 /* Except for VLAs, retrieve the allocation function. */
1138 allocfn = gimple_call_fndecl (stmt);
1139 if (!allocfn)
1140 allocfn = gimple_call_fn (stmt);
1141 if (TREE_CODE (allocfn) == SSA_NAME)
1142 {
1143 /* For an ALLOC_CALL via a function pointer make a small
1144 effort to determine the destination of the pointer. */
1145 gimple *def = SSA_NAME_DEF_STMT (allocfn);
1146 if (gimple_assign_single_p (def))
1147 {
1148 tree rhs = gimple_assign_rhs1 (def);
1149 if (DECL_P (rhs))
1150 allocfn = rhs;
1151 else if (TREE_CODE (rhs) == COMPONENT_REF)
1152 allocfn = TREE_OPERAND (rhs, 1);
1153 }
1154 }
1155 }
1156 }
1157 else if (gimple_nop_p (stmt))
1158 /* Handle DECL_PARM below. */
1159 ref = SSA_NAME_VAR (ref);
31e924c5
MS
1160 else if (is_gimple_assign (stmt)
1161 && (gimple_assign_rhs_code (stmt) == MIN_EXPR
1162 || gimple_assign_rhs_code (stmt) == MAX_EXPR))
1163 {
1164 /* MIN or MAX_EXPR here implies a reference to a known object
1165 and either an unknown or distinct one (the latter being
1166 the result of an invalid relational expression). Determine
1167 the identity of the former and point to it in the note.
1168 TODO: Consider merging with PHI handling. */
1169 access_ref arg_ref[2];
1170 tree arg = gimple_assign_rhs1 (stmt);
1171 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[0]);
1172 arg = gimple_assign_rhs2 (stmt);
1173 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[1]);
1174
1175 /* Use the argument that references a known object with more
1176 space remaining. */
1177 const bool idx
1178 = (!arg_ref[0].ref || !arg_ref[0].base0
1179 || (arg_ref[0].base0 && arg_ref[1].base0
1180 && (arg_ref[0].size_remaining ()
1181 < arg_ref[1].size_remaining ())));
1182
1183 arg_ref[idx].offrng[0] = offrng[0];
1184 arg_ref[idx].offrng[1] = offrng[1];
1185 arg_ref[idx].inform_access (mode);
1186 return;
1187 }
2a837de2
MS
1188 }
1189
1190 if (DECL_P (ref))
1191 loc = DECL_SOURCE_LOCATION (ref);
1192 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
1193 loc = EXPR_LOCATION (ref);
1194 else if (TREE_CODE (ref) != IDENTIFIER_NODE
1195 && TREE_CODE (ref) != SSA_NAME)
1196 return;
1197
1198 if (mode == access_read_write || mode == access_write_only)
1199 {
1200 if (allocfn == NULL_TREE)
1201 {
1202 if (*offstr)
1203 inform (loc, "at offset %s into destination object %qE of size %s",
1204 offstr, ref, sizestr);
1205 else
1206 inform (loc, "destination object %qE of size %s", ref, sizestr);
1207 return;
1208 }
1209
1210 if (*offstr)
1211 inform (loc,
1212 "at offset %s into destination object of size %s "
1213 "allocated by %qE", offstr, sizestr, allocfn);
1214 else
1215 inform (loc, "destination object of size %s allocated by %qE",
1216 sizestr, allocfn);
1217 return;
1218 }
1219
1220 if (mode == access_read_only)
1221 {
1222 if (allocfn == NULL_TREE)
1223 {
1224 if (*offstr)
1225 inform (loc, "at offset %s into source object %qE of size %s",
1226 offstr, ref, sizestr);
1227 else
1228 inform (loc, "source object %qE of size %s", ref, sizestr);
1229
1230 return;
1231 }
1232
1233 if (*offstr)
1234 inform (loc,
1235 "at offset %s into source object of size %s allocated by %qE",
1236 offstr, sizestr, allocfn);
1237 else
1238 inform (loc, "source object of size %s allocated by %qE",
1239 sizestr, allocfn);
1240 return;
1241 }
1242
1243 if (allocfn == NULL_TREE)
1244 {
1245 if (*offstr)
1246 inform (loc, "at offset %s into object %qE of size %s",
1247 offstr, ref, sizestr);
1248 else
1249 inform (loc, "object %qE of size %s", ref, sizestr);
1250
1251 return;
1252 }
1253
1254 if (*offstr)
1255 inform (loc,
1256 "at offset %s into object of size %s allocated by %qE",
1257 offstr, sizestr, allocfn);
1258 else
1259 inform (loc, "object of size %s allocated by %qE",
1260 sizestr, allocfn);
1261}
1262
6dfb1059
MS
1263/* Dump *THIS to FILE. */
1264
1265void
1266access_ref::dump (FILE *file) const
1267{
1268 for (int i = deref; i < 0; ++i)
1269 fputc ('&', file);
1270
1271 for (int i = 0; i < deref; ++i)
1272 fputc ('*', file);
1273
1274 if (gphi *phi_stmt = phi ())
1275 {
1276 fputs ("PHI <", file);
1277 unsigned nargs = gimple_phi_num_args (phi_stmt);
1278 for (unsigned i = 0; i != nargs; ++i)
1279 {
1280 tree arg = gimple_phi_arg_def (phi_stmt, i);
1281 print_generic_expr (file, arg);
1282 if (i + 1 < nargs)
1283 fputs (", ", file);
1284 }
1285 fputc ('>', file);
1286 }
1287 else
1288 print_generic_expr (file, ref);
1289
1290 if (offrng[0] != offrng[1])
1291 fprintf (file, " + [%lli, %lli]",
1292 (long long) offrng[0].to_shwi (),
1293 (long long) offrng[1].to_shwi ());
1294 else if (offrng[0] != 0)
1295 fprintf (file, " %c %lli",
1296 offrng[0] < 0 ? '-' : '+',
1297 (long long) offrng[0].to_shwi ());
1298
1299 if (base0)
1300 fputs (" (base0)", file);
1301
1302 fputs ("; size: ", file);
1303 if (sizrng[0] != sizrng[1])
1304 {
1305 offset_int maxsize = wi::to_offset (max_object_size ());
1306 if (sizrng[0] == 0 && sizrng[1] >= maxsize)
1307 fputs ("unknown", file);
1308 else
1309 fprintf (file, "[%llu, %llu]",
1310 (unsigned long long) sizrng[0].to_uhwi (),
1311 (unsigned long long) sizrng[1].to_uhwi ());
1312 }
1313 else if (sizrng[0] != 0)
1314 fprintf (file, "%llu",
1315 (unsigned long long) sizrng[0].to_uhwi ());
1316
1317 fputc ('\n', file);
1318}
1319
f9379fcb
MS
1320/* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1321 when MINWRITE or MINREAD, respectively, is set. */
1322access_data::access_data (range_query *query, gimple *stmt, access_mode mode,
1323 tree maxwrite /* = NULL_TREE */,
1324 bool minwrite /* = false */,
1325 tree maxread /* = NULL_TREE */,
1326 bool minread /* = false */)
1334d889 1327 : stmt (stmt), call (), dst (), src (), mode (mode), ostype ()
f9379fcb
MS
1328{
1329 set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1330 set_bound (src_bndrng, maxread, minread, query, stmt);
1331}
1332
1333/* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1334 when MINWRITE or MINREAD, respectively, is set. */
1335access_data::access_data (range_query *query, tree expr, access_mode mode,
1336 tree maxwrite /* = NULL_TREE */,
1337 bool minwrite /* = false */,
1338 tree maxread /* = NULL_TREE */,
1339 bool minread /* = false */)
1334d889 1340 : stmt (), call (expr), dst (), src (), mode (mode), ostype ()
f9379fcb
MS
1341{
1342 set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1343 set_bound (src_bndrng, maxread, minread, query, stmt);
1344}
1345
1346/* Set BNDRNG to the range of BOUND for the statement STMT. */
1347
1348void
1349access_data::set_bound (offset_int bndrng[2], tree bound, bool minaccess,
1350 range_query *query, gimple *stmt)
1351{
1352 /* Set the default bounds of the access and adjust below. */
1353 bndrng[0] = minaccess ? 1 : 0;
1354 bndrng[1] = HOST_WIDE_INT_M1U;
1355
1356 /* When BOUND is nonnull and a range can be extracted from it,
1357 set the bounds of the access to reflect both it and MINACCESS.
1358 BNDRNG[0] is the size of the minimum access. */
1359 tree rng[2];
1360 if (bound && get_size_range (query, bound, stmt, rng, SR_ALLOW_ZERO))
1361 {
1362 bndrng[0] = wi::to_offset (rng[0]);
1363 bndrng[1] = wi::to_offset (rng[1]);
1364 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
1365 }
1366}
1367
2a837de2
MS
1368/* Set a bit for the PHI in VISITED and return true if it wasn't
1369 already set. */
1370
1371bool
1372ssa_name_limit_t::visit_phi (tree ssa_name)
1373{
1374 if (!visited)
1375 visited = BITMAP_ALLOC (NULL);
1376
1377 /* Return false if SSA_NAME has already been visited. */
1378 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
1379}
1380
1381/* Clear a bit for the PHI in VISITED. */
1382
1383void
1384ssa_name_limit_t::leave_phi (tree ssa_name)
1385{
1386 /* Return false if SSA_NAME has already been visited. */
1387 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
1388}
1389
1390/* Return false if the SSA_NAME chain length counter has reached
1391 the limit, otherwise increment the counter and return true. */
1392
1393bool
1394ssa_name_limit_t::next ()
1395{
1396 /* Return a negative value to let caller avoid recursing beyond
1397 the specified limit. */
1398 if (ssa_def_max == 0)
1399 return false;
1400
1401 --ssa_def_max;
1402 return true;
1403}
1404
1405/* If the SSA_NAME has already been "seen" return a positive value.
1406 Otherwise add it to VISITED. If the SSA_NAME limit has been
1407 reached, return a negative value. Otherwise return zero. */
1408
1409int
1410ssa_name_limit_t::next_phi (tree ssa_name)
1411{
1412 {
1413 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
1414 /* Return a positive value if the PHI has already been visited. */
1415 if (gimple_code (def_stmt) == GIMPLE_PHI
1416 && !visit_phi (ssa_name))
1417 return 1;
1418 }
1419
1420 /* Return a negative value to let caller avoid recursing beyond
1421 the specified limit. */
1422 if (ssa_def_max == 0)
1423 return -1;
1424
1425 --ssa_def_max;
1426
1427 return 0;
1428}
1429
1430ssa_name_limit_t::~ssa_name_limit_t ()
1431{
1432 if (visited)
1433 BITMAP_FREE (visited);
1434}
1435
1436/* Default ctor. Initialize object with pointers to the range_query
68e9b7b6 1437 instance to use or null. */
2a837de2 1438
68e9b7b6
MS
1439pointer_query::pointer_query (range_query *qry /* = NULL */)
1440 : rvals (qry), hits (), misses (), failures (), depth (), max_depth (),
1441 var_cache ()
2a837de2
MS
1442{
1443 /* No op. */
1444}
1445
1446/* Return a pointer to the cached access_ref instance for the SSA_NAME
1447 PTR if it's there or null otherwise. */
1448
1449const access_ref *
1450pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
1451{
2a837de2
MS
1452 unsigned version = SSA_NAME_VERSION (ptr);
1453 unsigned idx = version << 1 | (ostype & 1);
68e9b7b6 1454 if (var_cache.indices.length () <= idx)
2a837de2
MS
1455 {
1456 ++misses;
1457 return NULL;
1458 }
1459
68e9b7b6
MS
1460 unsigned cache_idx = var_cache.indices[idx];
1461 if (var_cache.access_refs.length () <= cache_idx)
2a837de2
MS
1462 {
1463 ++misses;
1464 return NULL;
1465 }
1466
68e9b7b6 1467 const access_ref &cache_ref = var_cache.access_refs[cache_idx];
2a837de2
MS
1468 if (cache_ref.ref)
1469 {
1470 ++hits;
1471 return &cache_ref;
1472 }
1473
1474 ++misses;
1475 return NULL;
1476}
1477
1478/* Retrieve the access_ref instance for a variable from the cache if it's
1479 there or compute it and insert it into the cache if it's nonnonull. */
1480
1481bool
1334d889
MS
1482pointer_query::get_ref (tree ptr, gimple *stmt, access_ref *pref,
1483 int ostype /* = 1 */)
2a837de2
MS
1484{
1485 const unsigned version
1486 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
1487
68e9b7b6 1488 if (version)
2a837de2
MS
1489 {
1490 unsigned idx = version << 1 | (ostype & 1);
68e9b7b6 1491 if (idx < var_cache.indices.length ())
2a837de2 1492 {
68e9b7b6
MS
1493 unsigned cache_idx = var_cache.indices[idx] - 1;
1494 if (cache_idx < var_cache.access_refs.length ()
1495 && var_cache.access_refs[cache_idx].ref)
2a837de2
MS
1496 {
1497 ++hits;
68e9b7b6 1498 *pref = var_cache.access_refs[cache_idx];
2a837de2
MS
1499 return true;
1500 }
1501 }
1502
1503 ++misses;
1504 }
1505
9a27acc3 1506 if (!compute_objsize (ptr, stmt, ostype, pref, this))
2a837de2
MS
1507 {
1508 ++failures;
1509 return false;
1510 }
1511
1512 return true;
1513}
1514
1515/* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1516 nonnull. */
1517
1518void
1519pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
1520{
1521 /* Only add populated/valid entries. */
68e9b7b6 1522 if (!ref.ref || ref.sizrng[0] < 0)
2a837de2
MS
1523 return;
1524
1525 /* Add REF to the two-level cache. */
1526 unsigned version = SSA_NAME_VERSION (ptr);
1527 unsigned idx = version << 1 | (ostype & 1);
1528
1529 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1530 Its value minus one is the index into ACCESS_REFS. Not all
1531 entries are valid. */
68e9b7b6
MS
1532 if (var_cache.indices.length () <= idx)
1533 var_cache.indices.safe_grow_cleared (idx + 1);
2a837de2 1534
68e9b7b6
MS
1535 if (!var_cache.indices[idx])
1536 var_cache.indices[idx] = var_cache.access_refs.length () + 1;
2a837de2
MS
1537
1538 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1539 REF member is nonnull. All entries except for the last two
1540 are valid. Once nonnull, the REF value must stay unchanged. */
68e9b7b6
MS
1541 unsigned cache_idx = var_cache.indices[idx];
1542 if (var_cache.access_refs.length () <= cache_idx)
1543 var_cache.access_refs.safe_grow_cleared (cache_idx + 1);
2a837de2 1544
68e9b7b6 1545 access_ref &cache_ref = var_cache.access_refs[cache_idx];
2a837de2
MS
1546 if (cache_ref.ref)
1547 {
1548 gcc_checking_assert (cache_ref.ref == ref.ref);
1549 return;
1550 }
1551
1552 cache_ref = ref;
1553}
1554
1555/* Flush the cache if it's nonnull. */
1556
1557void
1558pointer_query::flush_cache ()
1559{
68e9b7b6
MS
1560 var_cache.indices.release ();
1561 var_cache.access_refs.release ();
2a837de2
MS
1562}
1563
ece28da9
MS
1564/* Dump statistics and, optionally, cache contents to DUMP_FILE. */
1565
1566void
1567pointer_query::dump (FILE *dump_file, bool contents /* = false */)
1568{
1569 unsigned nused = 0, nrefs = 0;
68e9b7b6 1570 unsigned nidxs = var_cache.indices.length ();
ece28da9
MS
1571 for (unsigned i = 0; i != nidxs; ++i)
1572 {
68e9b7b6 1573 unsigned ari = var_cache.indices[i];
ece28da9
MS
1574 if (!ari)
1575 continue;
1576
1577 ++nused;
1578
68e9b7b6 1579 const access_ref &aref = var_cache.access_refs[ari];
ece28da9
MS
1580 if (!aref.ref)
1581 continue;
1582
1583 ++nrefs;
1584 }
1585
1586 fprintf (dump_file, "pointer_query counters:\n"
1587 " index cache size: %u\n"
1588 " index entries: %u\n"
1589 " access cache size: %u\n"
1590 " access entries: %u\n"
1591 " hits: %u\n"
1592 " misses: %u\n"
1593 " failures: %u\n"
1594 " max_depth: %u\n",
1595 nidxs, nused,
68e9b7b6 1596 var_cache.access_refs.length (), nrefs,
ece28da9
MS
1597 hits, misses, failures, max_depth);
1598
1599 if (!contents || !nidxs)
1600 return;
1601
1602 fputs ("\npointer_query cache contents:\n", dump_file);
1603
1604 for (unsigned i = 0; i != nidxs; ++i)
1605 {
68e9b7b6 1606 unsigned ari = var_cache.indices[i];
ece28da9
MS
1607 if (!ari)
1608 continue;
1609
68e9b7b6 1610 const access_ref &aref = var_cache.access_refs[ari];
ece28da9
MS
1611 if (!aref.ref)
1612 continue;
1613
1614 /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1615 shifted left by one and ORed with the Object Size Type in
1616 the lowest bit. Print the two separately. */
1617 unsigned ver = i >> 1;
1618 unsigned ost = i & 1;
1619
1620 fprintf (dump_file, " %u.%u[%u]: ", ver, ost, ari);
1621 if (tree name = ssa_name (ver))
1622 {
1623 print_generic_expr (dump_file, name);
1624 fputs (" = ", dump_file);
1625 }
1626 else
1627 fprintf (dump_file, " _%u = ", ver);
1628
6dfb1059 1629 aref.dump (dump_file);
ece28da9
MS
1630 }
1631
1632 fputc ('\n', dump_file);
1633}
1634
2a837de2 1635/* A helper of compute_objsize_r() to determine the size from an assignment
31e924c5
MS
1636 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success
1637 set PREF->REF to the operand with more or less space remaining,
1638 respectively, if both refer to the same (sub)object, or to PTR if they
1639 might not, and return true. Otherwise, if the identity of neither
1640 operand can be determined, return false. */
2a837de2
MS
1641
1642static bool
31e924c5 1643handle_min_max_size (tree ptr, int ostype, access_ref *pref,
2a837de2
MS
1644 ssa_name_limit_t &snlim, pointer_query *qry)
1645{
9a27acc3 1646 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
31e924c5 1647 const tree_code code = gimple_assign_rhs_code (stmt);
2a837de2
MS
1648
1649 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1650 Determine the size/offset of each and use the one with more or less
1651 space remaining, respectively. If either fails, use the information
1652 determined from the other instead, adjusted up or down as appropriate
1653 for the expression. */
1654 access_ref aref[2] = { *pref, *pref };
31e924c5 1655 tree arg1 = gimple_assign_rhs1 (stmt);
1334d889 1656 if (!compute_objsize_r (arg1, stmt, false, ostype, &aref[0], snlim, qry))
2a837de2
MS
1657 {
1658 aref[0].base0 = false;
1659 aref[0].offrng[0] = aref[0].offrng[1] = 0;
1660 aref[0].add_max_offset ();
1661 aref[0].set_max_size_range ();
1662 }
1663
31e924c5 1664 tree arg2 = gimple_assign_rhs2 (stmt);
1334d889 1665 if (!compute_objsize_r (arg2, stmt, false, ostype, &aref[1], snlim, qry))
2a837de2
MS
1666 {
1667 aref[1].base0 = false;
1668 aref[1].offrng[0] = aref[1].offrng[1] = 0;
1669 aref[1].add_max_offset ();
1670 aref[1].set_max_size_range ();
1671 }
1672
1673 if (!aref[0].ref && !aref[1].ref)
1674 /* Fail if the identity of neither argument could be determined. */
1675 return false;
1676
1677 bool i0 = false;
1678 if (aref[0].ref && aref[0].base0)
1679 {
1680 if (aref[1].ref && aref[1].base0)
1681 {
1682 /* If the object referenced by both arguments has been determined
1683 set *PREF to the one with more or less space remainng, whichever
1684 is appopriate for CODE.
1685 TODO: Indicate when the objects are distinct so it can be
1686 diagnosed. */
1687 i0 = code == MAX_EXPR;
1688 const bool i1 = !i0;
1689
1690 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
1691 *pref = aref[i1];
1692 else
1693 *pref = aref[i0];
31e924c5
MS
1694
1695 if (aref[i0].ref != aref[i1].ref)
1696 /* If the operands don't refer to the same (sub)object set
1697 PREF->REF to the SSA_NAME from which STMT was obtained
1698 so that both can be identified in a diagnostic. */
1699 pref->ref = ptr;
1700
2a837de2
MS
1701 return true;
1702 }
1703
1704 /* If only the object referenced by one of the arguments could be
1705 determined, use it and... */
1706 *pref = aref[0];
1707 i0 = true;
1708 }
1709 else
1710 *pref = aref[1];
1711
1712 const bool i1 = !i0;
1713 /* ...see if the offset obtained from the other pointer can be used
1714 to tighten up the bound on the offset obtained from the first. */
1715 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
1716 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
1717 {
1718 pref->offrng[0] = aref[i0].offrng[0];
1719 pref->offrng[1] = aref[i0].offrng[1];
1720 }
31e924c5
MS
1721
1722 /* Replace PTR->REF with the SSA_NAME to indicate the expression
1723 might not refer to the same (sub)object. */
1724 pref->ref = ptr;
2a837de2
MS
1725 return true;
1726}
1727
1334d889
MS
1728/* A helper of compute_objsize_r() to determine the size of a DECL.
1729 Return true on success and (possibly in the future) false on failure. */
1730
1731static bool
1732handle_decl (tree decl, bool addr, access_ref *pref)
1733{
1734 tree decl_type = TREE_TYPE (decl);
1735
1736 pref->ref = decl;
1737
1738 /* Reset the offset in case it was set by a prior call and not
1739 cleared by the caller. The offset is only adjusted after
1740 the identity of the object has been determined. */
1741 pref->offrng[0] = pref->offrng[1] = 0;
1742
1743 if (!addr && POINTER_TYPE_P (decl_type))
1744 {
1745 /* Set the maximum size if the reference is to the pointer
1746 itself (as opposed to what it points to), and clear
1747 BASE0 since the offset isn't necessarily zero-based. */
1748 pref->set_max_size_range ();
1749 pref->base0 = false;
1750 return true;
1751 }
1752
1753 /* Valid offsets into the object are nonnegative. */
1754 pref->base0 = true;
1755
1756 if (tree size = decl_init_size (decl, false))
1757 if (TREE_CODE (size) == INTEGER_CST)
1758 {
1759 pref->sizrng[0] = wi::to_offset (size);
1760 pref->sizrng[1] = pref->sizrng[0];
1761 return true;
1762 }
1763
1764 pref->set_max_size_range ();
1765 return true;
1766}
1767
2a837de2
MS
1768/* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1769 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1770 on success and false on failure. */
1771
1772static bool
9a27acc3
MS
1773handle_array_ref (tree aref, gimple *stmt, bool addr, int ostype,
1774 access_ref *pref, ssa_name_limit_t &snlim,
1775 pointer_query *qry)
2a837de2
MS
1776{
1777 gcc_assert (TREE_CODE (aref) == ARRAY_REF);
1778
2a837de2
MS
1779 tree arefop = TREE_OPERAND (aref, 0);
1780 tree reftype = TREE_TYPE (arefop);
1781 if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
1782 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1783 of known bound. */
1784 return false;
1785
1334d889 1786 if (!compute_objsize_r (arefop, stmt, addr, ostype, pref, snlim, qry))
2a837de2
MS
1787 return false;
1788
1789 offset_int orng[2];
1790 tree off = pref->eval (TREE_OPERAND (aref, 1));
1791 range_query *const rvals = qry ? qry->rvals : NULL;
9354a7d7 1792 if (!get_offset_range (off, stmt, orng, rvals))
2a837de2
MS
1793 {
1794 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1795 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1796 orng[0] = -orng[1] - 1;
1797 }
1798
853ce8ee 1799 /* Convert the array index range determined above to a byte offset. */
2a837de2 1800 tree lowbnd = array_ref_low_bound (aref);
853ce8ee 1801 if (TREE_CODE (lowbnd) == INTEGER_CST && !integer_zerop (lowbnd))
2a837de2 1802 {
853ce8ee
EB
1803 /* Adjust the index by the low bound of the array domain (0 in C/C++,
1804 1 in Fortran and anything in Ada) by applying the same processing
1805 as in get_offset_range. */
1806 const wide_int wlb = wi::to_wide (lowbnd);
1807 signop sgn = SIGNED;
1808 if (TYPE_UNSIGNED (TREE_TYPE (lowbnd))
1809 && wlb.get_precision () < TYPE_PRECISION (sizetype))
1810 sgn = UNSIGNED;
1811 const offset_int lb = offset_int::from (wlb, sgn);
2a837de2
MS
1812 orng[0] -= lb;
1813 orng[1] -= lb;
1814 }
1815
1816 tree eltype = TREE_TYPE (aref);
1817 tree tpsize = TYPE_SIZE_UNIT (eltype);
1818 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
1819 {
1820 pref->add_max_offset ();
1821 return true;
1822 }
1823
1824 offset_int sz = wi::to_offset (tpsize);
1825 orng[0] *= sz;
1826 orng[1] *= sz;
1827
1828 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
1829 {
1830 /* Except for the permissive raw memory functions which use
1831 the size of the whole object determined above, use the size
1832 of the referenced array. Because the overall offset is from
1833 the beginning of the complete array object add this overall
1834 offset to the size of array. */
1835 offset_int sizrng[2] =
1836 {
1837 pref->offrng[0] + orng[0] + sz,
1838 pref->offrng[1] + orng[1] + sz
1839 };
1840 if (sizrng[1] < sizrng[0])
1841 std::swap (sizrng[0], sizrng[1]);
1842 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
1843 pref->sizrng[0] = sizrng[0];
1844 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
1845 pref->sizrng[1] = sizrng[1];
1846 }
1847
1848 pref->add_offset (orng[0], orng[1]);
1849 return true;
1850}
1851
1334d889
MS
1852/* Given a COMPONENT_REF CREF, set *PREF size to the size of the referenced
1853 member. */
1854
1855static void
1856set_component_ref_size (tree cref, access_ref *pref)
1857{
1858 const tree base = TREE_OPERAND (cref, 0);
1859 const tree base_type = TREE_TYPE (base);
1860
1861 /* SAM is set for array members that might need special treatment. */
1862 special_array_member sam;
1863 tree size = component_ref_size (cref, &sam);
1864 if (sam == special_array_member::int_0)
1865 pref->sizrng[0] = pref->sizrng[1] = 0;
1866 else if (!pref->trail1special && sam == special_array_member::trail_1)
1867 pref->sizrng[0] = pref->sizrng[1] = 1;
1868 else if (size && TREE_CODE (size) == INTEGER_CST)
1869 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1870 else
1871 {
1872 /* When the size of the member is unknown it's either a flexible
1873 array member or a trailing special array member (either zero
1874 length or one-element). Set the size to the maximum minus
1875 the constant size of the base object's type. */
1876 pref->sizrng[0] = 0;
1877 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1878 if (tree base_size = TYPE_SIZE_UNIT (base_type))
1879 if (TREE_CODE (base_size) == INTEGER_CST)
1880 pref->sizrng[1] -= wi::to_offset (base_size);
1881 }
1882}
1883
1884/* A helper of compute_objsize_r() to determine the size from COMPONENT_REF
1885 CREF. Return true on success and false on failure. */
1886
1887static bool
1888handle_component_ref (tree cref, gimple *stmt, bool addr, int ostype,
1889 access_ref *pref, ssa_name_limit_t &snlim,
1890 pointer_query *qry)
1891{
1892 gcc_assert (TREE_CODE (cref) == COMPONENT_REF);
1893
1894 const tree base = TREE_OPERAND (cref, 0);
72332337
MS
1895 const tree field = TREE_OPERAND (cref, 1);
1896 access_ref base_ref = *pref;
1897
1898 /* Unconditionally determine the size of the base object (it could
1899 be smaller than the referenced member when the object is stored
1900 in a buffer with an insufficient size). */
1901 if (!compute_objsize_r (base, stmt, addr, 0, &base_ref, snlim, qry))
1902 return false;
1903
1904 /* Add the offset of the member to the offset into the object computed
1905 so far. */
1906 tree offset = byte_position (field);
1907 if (TREE_CODE (offset) == INTEGER_CST)
1908 base_ref.add_offset (wi::to_offset (offset));
1909 else
1910 base_ref.add_max_offset ();
1911
1912 if (!base_ref.ref)
1913 /* PREF->REF may have been already set to an SSA_NAME earlier
1914 to provide better context for diagnostics. In that case,
1915 leave it unchanged. */
1916 base_ref.ref = base;
1917
1334d889
MS
1918 const tree base_type = TREE_TYPE (base);
1919 if (TREE_CODE (base_type) == UNION_TYPE)
1920 /* In accesses through union types consider the entire unions
1921 rather than just their members. */
1922 ostype = 0;
1923
1334d889
MS
1924 if (ostype == 0)
1925 {
1926 /* In OSTYPE zero (for raw memory functions like memcpy), use
1927 the maximum size instead if the identity of the enclosing
1928 object cannot be determined. */
72332337 1929 *pref = base_ref;
1334d889
MS
1930 return true;
1931 }
1932
1933 pref->ref = field;
1934
1935 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
1936 {
1937 /* Set maximum size if the reference is to the pointer member
1938 itself (as opposed to what it points to). */
1939 pref->set_max_size_range ();
1940 return true;
1941 }
1942
1943 set_component_ref_size (cref, pref);
72332337
MS
1944
1945 if (base_ref.size_remaining () < pref->size_remaining ())
1946 /* Use the base object if it's smaller than the member. */
1947 *pref = base_ref;
1948
1334d889
MS
1949 return true;
1950}
1951
2a837de2
MS
1952/* A helper of compute_objsize_r() to determine the size from MEM_REF
1953 MREF. Return true on success and false on failure. */
1954
1955static bool
9a27acc3 1956handle_mem_ref (tree mref, gimple *stmt, int ostype, access_ref *pref,
2a837de2
MS
1957 ssa_name_limit_t &snlim, pointer_query *qry)
1958{
1959 gcc_assert (TREE_CODE (mref) == MEM_REF);
1960
1334d889
MS
1961 tree mreftype = TYPE_MAIN_VARIANT (TREE_TYPE (mref));
1962 if (VECTOR_TYPE_P (mreftype))
1963 {
2a837de2
MS
1964 /* Hack: Handle MEM_REFs of vector types as those to complete
1965 objects; those may be synthesized from multiple assignments
1966 to consecutive data members (see PR 93200 and 96963).
1967 FIXME: Vectorized assignments should only be present after
1968 vectorization so this hack is only necessary after it has
1969 run and could be avoided in calls from prior passes (e.g.,
e53b6e56 1970 tree-ssa-strlen.cc).
2a837de2
MS
1971 FIXME: Deal with this more generally, e.g., by marking up
1972 such MEM_REFs at the time they're created. */
1973 ostype = 0;
1974 }
1975
1976 tree mrefop = TREE_OPERAND (mref, 0);
1334d889 1977 if (!compute_objsize_r (mrefop, stmt, false, ostype, pref, snlim, qry))
2a837de2
MS
1978 return false;
1979
1334d889
MS
1980 ++pref->deref;
1981
2a837de2
MS
1982 offset_int orng[2];
1983 tree off = pref->eval (TREE_OPERAND (mref, 1));
1984 range_query *const rvals = qry ? qry->rvals : NULL;
9354a7d7 1985 if (!get_offset_range (off, stmt, orng, rvals))
2a837de2
MS
1986 {
1987 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1988 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1989 orng[0] = -orng[1] - 1;
1990 }
1991
1992 pref->add_offset (orng[0], orng[1]);
1993 return true;
1994}
1995
1334d889
MS
1996/* A helper of compute_objsize_r() to determine the size from SSA_NAME
1997 PTR. Return true on success and false on failure. */
2a837de2
MS
1998
1999static bool
1334d889
MS
2000handle_ssa_name (tree ptr, bool addr, int ostype,
2001 access_ref *pref, ssa_name_limit_t &snlim,
2002 pointer_query *qry)
2a837de2 2003{
1334d889
MS
2004 if (!snlim.next ())
2005 return false;
2a837de2 2006
1334d889
MS
2007 /* Only process an SSA_NAME if the recursion limit has not yet
2008 been reached. */
2009 if (qry)
2a837de2 2010 {
1334d889
MS
2011 if (++qry->depth > qry->max_depth)
2012 qry->max_depth = qry->depth;
2013 if (const access_ref *cache_ref = qry->get_ref (ptr, ostype))
2014 {
2015 /* Add the number of DEREFerences accummulated so far. */
2016 const int deref = pref->deref;
2017 *pref = *cache_ref;
2018 pref->deref += deref;
2019 return true;
2020 }
2a837de2
MS
2021 }
2022
1334d889
MS
2023 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
2024 if (is_gimple_call (stmt))
2a837de2 2025 {
1334d889
MS
2026 /* If STMT is a call to an allocation function get the size
2027 from its argument(s). If successful, also set *PREF->REF
2028 to PTR for the caller to include in diagnostics. */
2029 wide_int wr[2];
2030 range_query *const rvals = qry ? qry->rvals : NULL;
2031 if (gimple_call_alloc_size (stmt, wr, rvals))
2032 {
2033 pref->ref = ptr;
2034 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2035 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2036 /* Constrain both bounds to a valid size. */
2037 offset_int maxsize = wi::to_offset (max_object_size ());
2038 if (pref->sizrng[0] > maxsize)
2039 pref->sizrng[0] = maxsize;
2040 if (pref->sizrng[1] > maxsize)
2041 pref->sizrng[1] = maxsize;
2042 }
2043 else
2044 {
2045 /* For functions known to return one of their pointer arguments
2046 try to determine what the returned pointer points to, and on
2047 success add OFFRNG which was set to the offset added by
2048 the function (e.g., memchr) to the overall offset. */
2049 bool past_end;
2050 offset_int offrng[2];
2051 if (tree ret = gimple_call_return_array (stmt, offrng, &past_end,
2052 snlim, qry))
2053 {
2054 if (!compute_objsize_r (ret, stmt, addr, ostype, pref, snlim, qry))
2055 return false;
2056
2057 /* Cap OFFRNG[1] to at most the remaining size of
2058 the object. */
2059 offset_int remrng[2];
2060 remrng[1] = pref->size_remaining (remrng);
2061 if (remrng[1] != 0 && !past_end)
2062 /* Decrement the size for functions that never return
2063 a past-the-end pointer. */
2064 remrng[1] -= 1;
2065
2066 if (remrng[1] < offrng[1])
2067 offrng[1] = remrng[1];
2068 pref->add_offset (offrng[0], offrng[1]);
2069 }
2070 else
2071 {
2072 /* For other calls that might return arbitrary pointers
2073 including into the middle of objects set the size
2074 range to maximum, clear PREF->BASE0, and also set
2075 PREF->REF to include in diagnostics. */
2076 pref->set_max_size_range ();
2077 pref->base0 = false;
2078 pref->ref = ptr;
2079 }
2080 }
2081 qry->put_ref (ptr, *pref, ostype);
2082 return true;
2083 }
820f0940 2084
1334d889
MS
2085 if (gimple_nop_p (stmt))
2086 {
2087 /* For a function argument try to determine the byte size
2088 of the array from the current function declaratation
2089 (e.g., attribute access or related). */
2090 wide_int wr[2];
2091 bool static_array = false;
2092 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
2a837de2 2093 {
1334d889
MS
2094 pref->parmarray = !static_array;
2095 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2096 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2097 pref->ref = ref;
2098 qry->put_ref (ptr, *pref, ostype);
2a837de2
MS
2099 return true;
2100 }
2101
2a837de2 2102 pref->set_max_size_range ();
1334d889
MS
2103 pref->base0 = false;
2104 pref->ref = ptr;
2105 qry->put_ref (ptr, *pref, ostype);
2a837de2
MS
2106 return true;
2107 }
2108
1334d889 2109 if (gimple_code (stmt) == GIMPLE_PHI)
2a837de2 2110 {
1334d889
MS
2111 /* Pass PTR to get_ref() via PREF. If all PHI arguments refer
2112 to the same object the function will replace it with it. */
2113 pref->ref = ptr;
2114 access_ref phi_ref = *pref;
2115 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
2a837de2 2116 return false;
1334d889
MS
2117 *pref = phi_ref;
2118 qry->put_ref (ptr, *pref, ostype);
2119 return true;
2120 }
2a837de2 2121
1334d889
MS
2122 if (!is_gimple_assign (stmt))
2123 {
2124 /* Clear BASE0 since the assigned pointer might point into
2125 the middle of the object, set the maximum size range and,
2126 if the SSA_NAME refers to a function argumnent, set
2127 PREF->REF to it. */
2128 pref->base0 = false;
2129 pref->set_max_size_range ();
2130 pref->ref = ptr;
2a837de2
MS
2131 return true;
2132 }
2133
1334d889
MS
2134 tree_code code = gimple_assign_rhs_code (stmt);
2135
2136 if (code == MAX_EXPR || code == MIN_EXPR)
2a837de2 2137 {
1334d889
MS
2138 if (!handle_min_max_size (ptr, ostype, pref, snlim, qry))
2139 return false;
2a837de2 2140
1334d889
MS
2141 qry->put_ref (ptr, *pref, ostype);
2142 return true;
2143 }
2a837de2 2144
1334d889 2145 tree rhs = gimple_assign_rhs1 (stmt);
2a837de2 2146
1334d889
MS
2147 if (code == ASSERT_EXPR)
2148 {
2149 rhs = TREE_OPERAND (rhs, 0);
2150 return compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry);
2151 }
2a837de2 2152
1334d889
MS
2153 if (code == POINTER_PLUS_EXPR
2154 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
2155 {
2156 /* Compute the size of the object first. */
2157 if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2158 return false;
2a837de2 2159
1334d889
MS
2160 offset_int orng[2];
2161 tree off = gimple_assign_rhs2 (stmt);
2162 range_query *const rvals = qry ? qry->rvals : NULL;
2163 if (get_offset_range (off, stmt, orng, rvals))
2164 pref->add_offset (orng[0], orng[1]);
2a837de2 2165 else
1334d889
MS
2166 pref->add_max_offset ();
2167
2168 qry->put_ref (ptr, *pref, ostype);
2a837de2
MS
2169 return true;
2170 }
2171
1334d889
MS
2172 if (code == ADDR_EXPR || code == SSA_NAME)
2173 {
2174 if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2175 return false;
2176 qry->put_ref (ptr, *pref, ostype);
2177 return true;
2178 }
2a837de2 2179
1334d889 2180 if (ostype > 1 && POINTER_TYPE_P (TREE_TYPE (rhs)))
2a837de2 2181 {
1334d889
MS
2182 /* When determining the qualifiers follow the pointer but
2183 avoid caching the result. As the pointer is added to
2184 and/or dereferenced the computed size and offset need
2185 not be meaningful for other queries involving the same
2186 pointer. */
2187 if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2a837de2
MS
2188 return false;
2189
1334d889 2190 rhs = pref->ref;
2a837de2
MS
2191 }
2192
1334d889
MS
2193 /* (This could also be an assignment from a nonlocal pointer.) Save
2194 PTR to mention in diagnostics but otherwise treat it as a pointer
2195 to an unknown object. */
2196 pref->ref = rhs;
2197 pref->base0 = false;
2198 pref->set_max_size_range ();
2199 return true;
2200}
2201
2202/* Helper to compute the size of the object referenced by the PTR
2203 expression which must have pointer type, using Object Size type
2204 OSTYPE (only the least significant 2 bits are used).
2205 On success, sets PREF->REF to the DECL of the referenced object
2206 if it's unique, otherwise to null, PREF->OFFRNG to the range of
2207 offsets into it, and PREF->SIZRNG to the range of sizes of
2208 the object(s).
2209 ADDR is true for an enclosing ADDR_EXPR.
2210 SNLIM is used to avoid visiting the same PHI operand multiple
2211 times, and, when nonnull, RVALS to determine range information.
2212 Returns true on success, false when a meaningful size (or range)
2213 cannot be determined.
2214
2215 The function is intended for diagnostics and should not be used
2216 to influence code generation or optimization. */
2217
2218static bool
2219compute_objsize_r (tree ptr, gimple *stmt, bool addr, int ostype,
2220 access_ref *pref, ssa_name_limit_t &snlim,
2221 pointer_query *qry)
2222{
2223 STRIP_NOPS (ptr);
2224
2225 if (DECL_P (ptr))
2226 return handle_decl (ptr, addr, pref);
2227
2228 switch (TREE_CODE (ptr))
2a837de2 2229 {
1334d889
MS
2230 case ADDR_EXPR:
2231 {
2232 tree ref = TREE_OPERAND (ptr, 0);
2233 if (!compute_objsize_r (ref, stmt, true, ostype, pref, snlim, qry))
2234 return false;
2235
2236 --pref->deref;
2237 return true;
2238 }
2239
2240 case BIT_FIELD_REF:
2241 {
2242 tree ref = TREE_OPERAND (ptr, 0);
2243 if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2244 return false;
2245
2246 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
2247 pref->add_offset (off / BITS_PER_UNIT);
2248 return true;
2249 }
2250
2251 case ARRAY_REF:
32ca611c 2252 return handle_array_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
1334d889
MS
2253
2254 case COMPONENT_REF:
2255 return handle_component_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
2256
2257 case MEM_REF:
2258 return handle_mem_ref (ptr, stmt, ostype, pref, snlim, qry);
2259
2260 case TARGET_MEM_REF:
2261 {
2262 tree ref = TREE_OPERAND (ptr, 0);
2263 if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2264 return false;
2265
2266 /* TODO: Handle remaining operands. Until then, add maximum offset. */
2267 pref->ref = ptr;
2268 pref->add_max_offset ();
2269 return true;
2270 }
2271
2272 case INTEGER_CST:
32ca611c
JJ
2273 /* Pointer constants other than null smaller than param_min_pagesize
2274 might be the result of erroneous null pointer addition/subtraction.
2275 Unless zero is a valid address set size to zero. For null pointers,
2276 set size to the maximum for now since those may be the result of
2277 jump threading. Similarly, for values >= param_min_pagesize in
2278 order to support (type *) 0x7cdeab00. */
2279 if (integer_zerop (ptr)
2280 || wi::to_widest (ptr) >= param_min_pagesize)
2a837de2 2281 pref->set_max_size_range ();
54fa5567
MS
2282 else if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2283 {
2284 tree deref_type = TREE_TYPE (TREE_TYPE (ptr));
2285 addr_space_t as = TYPE_ADDR_SPACE (deref_type);
2286 if (targetm.addr_space.zero_address_valid (as))
2287 pref->set_max_size_range ();
2288 else
2289 pref->sizrng[0] = pref->sizrng[1] = 0;
2290 }
2a837de2
MS
2291 else
2292 pref->sizrng[0] = pref->sizrng[1] = 0;
54fa5567 2293
2a837de2 2294 pref->ref = ptr;
2a837de2 2295 return true;
2a837de2 2296
1334d889 2297 case STRING_CST:
2a837de2
MS
2298 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
2299 pref->ref = ptr;
2300 return true;
2a837de2 2301
1334d889 2302 case POINTER_PLUS_EXPR:
2a837de2
MS
2303 {
2304 tree ref = TREE_OPERAND (ptr, 0);
1334d889 2305 if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2a837de2
MS
2306 return false;
2307
790b02af
JM
2308 /* The below only makes sense if the offset is being applied to the
2309 address of the object. */
2310 if (pref->deref != -1)
2311 return false;
2a837de2
MS
2312
2313 offset_int orng[2];
2314 tree off = pref->eval (TREE_OPERAND (ptr, 1));
1334d889 2315 if (get_offset_range (off, stmt, orng, qry->rvals))
2a837de2
MS
2316 pref->add_offset (orng[0], orng[1]);
2317 else
2318 pref->add_max_offset ();
2319 return true;
2320 }
2321
1334d889 2322 case VIEW_CONVERT_EXPR:
2a837de2 2323 ptr = TREE_OPERAND (ptr, 0);
1334d889 2324 return compute_objsize_r (ptr, stmt, addr, ostype, pref, snlim, qry);
2a837de2 2325
1334d889
MS
2326 case SSA_NAME:
2327 return handle_ssa_name (ptr, addr, ostype, pref, snlim, qry);
2a837de2 2328
1334d889
MS
2329 default:
2330 break;
2a837de2
MS
2331 }
2332
2333 /* Assume all other expressions point into an unknown object
2334 of the maximum valid size. */
2335 pref->ref = ptr;
2336 pref->base0 = false;
2337 pref->set_max_size_range ();
2338 if (TREE_CODE (ptr) == SSA_NAME)
2339 qry->put_ref (ptr, *pref);
2340 return true;
2341}
2342
2343/* A "public" wrapper around the above. Clients should use this overload
2344 instead. */
2345
2346tree
9a27acc3
MS
2347compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2348 pointer_query *ptr_qry)
2a837de2
MS
2349{
2350 pointer_query qry;
9a27acc3
MS
2351 if (ptr_qry)
2352 ptr_qry->depth = 0;
2353 else
2354 ptr_qry = &qry;
820f0940
MS
2355
2356 /* Clear and invalidate in case *PREF is being reused. */
2357 pref->offrng[0] = pref->offrng[1] = 0;
2358 pref->sizrng[0] = pref->sizrng[1] = -1;
2359
2a837de2 2360 ssa_name_limit_t snlim;
1334d889 2361 if (!compute_objsize_r (ptr, stmt, false, ostype, pref, snlim, ptr_qry))
2a837de2
MS
2362 return NULL_TREE;
2363
2364 offset_int maxsize = pref->size_remaining ();
2365 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
2366 pref->offrng[0] = 0;
2367 return wide_int_to_tree (sizetype, maxsize);
2368}
2369
2370/* Transitional wrapper. The function should be removed once callers
2371 transition to the pointer_query API. */
2372
2373tree
9a27acc3
MS
2374compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2375 range_query *rvals /* = NULL */)
2a837de2
MS
2376{
2377 pointer_query qry;
9a27acc3
MS
2378 qry.rvals = rvals;
2379 return compute_objsize (ptr, stmt, ostype, pref, &qry);
2a837de2
MS
2380}
2381
2382/* Legacy wrapper around the above. The function should be removed
2383 once callers transition to one of the two above. */
2384
2385tree
9354a7d7 2386compute_objsize (tree ptr, gimple *stmt, int ostype, tree *pdecl /* = NULL */,
2a837de2
MS
2387 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
2388{
2389 /* Set the initial offsets to zero and size to negative to indicate
2390 none has been computed yet. */
2391 access_ref ref;
9354a7d7 2392 tree size = compute_objsize (ptr, stmt, ostype, &ref, rvals);
2a837de2
MS
2393 if (!size || !ref.base0)
2394 return NULL_TREE;
2395
2396 if (pdecl)
2397 *pdecl = ref.ref;
2398
2399 if (poff)
2400 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
2401
2402 return size;
2403}
1ff4dbdd
MS
2404
2405/* Determine the offset *FLDOFF of the first byte of a struct member
2406 of TYPE (possibly recursively) into which the byte offset OFF points,
2407 starting after the field START_AFTER if it's non-null. On success,
2408 if nonnull, set *FLDOFF to the offset of the first byte, and return
2409 the field decl. If nonnull, set *NEXTOFF to the offset of the next
2410 field (which reflects any padding between the returned field and
2411 the next). Otherwise, if no such member can be found, return null. */
2412
2413tree
2414field_at_offset (tree type, tree start_after, HOST_WIDE_INT off,
2415 HOST_WIDE_INT *fldoff /* = nullptr */,
2416 HOST_WIDE_INT *nextoff /* = nullptr */)
2417{
2418 tree first_fld = TYPE_FIELDS (type);
2419
2420 HOST_WIDE_INT offbuf = 0, nextbuf = 0;
2421 if (!fldoff)
2422 fldoff = &offbuf;
2423 if (!nextoff)
2424 nextoff = &nextbuf;
2425
2426 *nextoff = 0;
2427
2428 /* The field to return. */
2429 tree last_fld = NULL_TREE;
2430 /* The next field to advance to. */
2431 tree next_fld = NULL_TREE;
2432
2433 /* NEXT_FLD's cached offset. */
2434 HOST_WIDE_INT next_pos = -1;
2435
2436 for (tree fld = first_fld; fld; fld = next_fld)
2437 {
2438 next_fld = fld;
2439 do
2440 /* Advance to the next relevant data member. */
2441 next_fld = TREE_CHAIN (next_fld);
2442 while (next_fld
2443 && (TREE_CODE (next_fld) != FIELD_DECL
2444 || DECL_ARTIFICIAL (next_fld)));
2445
2446 if (TREE_CODE (fld) != FIELD_DECL || DECL_ARTIFICIAL (fld))
2447 continue;
2448
2449 if (fld == start_after)
2450 continue;
2451
2452 tree fldtype = TREE_TYPE (fld);
2453 /* The offset of FLD within its immediately enclosing structure. */
2454 HOST_WIDE_INT fldpos = next_pos < 0 ? int_byte_position (fld) : next_pos;
2455
10d1986a
MS
2456 tree typesize = TYPE_SIZE_UNIT (fldtype);
2457 if (typesize && TREE_CODE (typesize) != INTEGER_CST)
2458 /* Bail if FLD is a variable length member. */
2459 return NULL_TREE;
2460
1ff4dbdd
MS
2461 /* If the size is not available the field is a flexible array
2462 member. Treat this case as success. */
1ff4dbdd
MS
2463 HOST_WIDE_INT fldsize = (tree_fits_uhwi_p (typesize)
2464 ? tree_to_uhwi (typesize)
2465 : off);
2466
2467 /* If OFF is beyond the end of the current field continue. */
2468 HOST_WIDE_INT fldend = fldpos + fldsize;
2469 if (fldend < off)
2470 continue;
2471
2472 if (next_fld)
2473 {
2474 /* If OFF is equal to the offset of the next field continue
2475 to it and skip the array/struct business below. */
10d1986a
MS
2476 tree pos = byte_position (next_fld);
2477 if (!tree_fits_shwi_p (pos))
2478 /* Bail if NEXT_FLD is a variable length member. */
2479 return NULL_TREE;
2480 next_pos = tree_to_shwi (pos);
1ff4dbdd
MS
2481 *nextoff = *fldoff + next_pos;
2482 if (*nextoff == off && TREE_CODE (type) != UNION_TYPE)
2483 continue;
2484 }
2485 else
2486 *nextoff = HOST_WIDE_INT_MAX;
2487
2488 /* OFF refers somewhere into the current field or just past its end,
2489 which could mean it refers to the next field. */
2490 if (TREE_CODE (fldtype) == ARRAY_TYPE)
2491 {
2492 /* Will be set to the offset of the first byte of the array
2493 element (which may be an array) of FLDTYPE into which
2494 OFF - FLDPOS points (which may be past ELTOFF). */
2495 HOST_WIDE_INT eltoff = 0;
2496 if (tree ft = array_elt_at_offset (fldtype, off - fldpos, &eltoff))
2497 fldtype = ft;
2498 else
2499 continue;
2500
2501 /* Advance the position to include the array element above.
2502 If OFF - FLPOS refers to a member of FLDTYPE, the member
2503 will be determined below. */
2504 fldpos += eltoff;
2505 }
2506
2507 *fldoff += fldpos;
2508
2509 if (TREE_CODE (fldtype) == RECORD_TYPE)
2510 /* Drill down into the current field if it's a struct. */
2511 fld = field_at_offset (fldtype, start_after, off - fldpos,
2512 fldoff, nextoff);
2513
2514 last_fld = fld;
2515
2516 /* Unless the offset is just past the end of the field return it.
2517 Otherwise save it and return it only if the offset of the next
2518 next field is greater (i.e., there is padding between the two)
2519 or if there is no next field. */
2520 if (off < fldend)
2521 break;
2522 }
2523
2524 if (*nextoff == HOST_WIDE_INT_MAX && next_fld)
2525 *nextoff = next_pos;
2526
2527 return last_fld;
2528}
2529
2530/* Determine the offset *ELTOFF of the first byte of the array element
2531 of array ARTYPE into which the byte offset OFF points. On success
2532 set *ELTOFF to the offset of the first byte and return type.
2533 Otherwise, if no such element can be found, return null. */
2534
2535tree
2536array_elt_at_offset (tree artype, HOST_WIDE_INT off,
2537 HOST_WIDE_INT *eltoff /* = nullptr */,
2538 HOST_WIDE_INT *subar_size /* = nullptr */)
2539{
2540 gcc_assert (TREE_CODE (artype) == ARRAY_TYPE);
2541
2542 HOST_WIDE_INT dummy;
2543 if (!eltoff)
2544 eltoff = &dummy;
2545 if (!subar_size)
2546 subar_size = &dummy;
2547
2548 tree eltype = artype;
2549 while (TREE_CODE (TREE_TYPE (eltype)) == ARRAY_TYPE)
2550 eltype = TREE_TYPE (eltype);
2551
2552 tree subartype = eltype;
2553 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (eltype))
2554 || TYPE_MODE (TREE_TYPE (eltype)) != TYPE_MODE (char_type_node))
2555 eltype = TREE_TYPE (eltype);
2556
2557 *subar_size = int_size_in_bytes (subartype);
2558
2559 if (eltype == artype)
2560 {
2561 *eltoff = 0;
2562 return artype;
2563 }
2564
2565 HOST_WIDE_INT artype_size = int_size_in_bytes (artype);
2566 HOST_WIDE_INT eltype_size = int_size_in_bytes (eltype);
2567
2568 if (off < artype_size)// * eltype_size)
2569 {
2570 *eltoff = (off / eltype_size) * eltype_size;
2571 return TREE_CODE (eltype) == ARRAY_TYPE ? TREE_TYPE (eltype) : eltype;
2572 }
2573
2574 return NULL_TREE;
2575}
ea9e0d6c
MS
2576
2577/* Wrapper around build_array_type_nelts that makes sure the array
2578 can be created at all and handles zero sized arrays specially. */
2579
2580tree
2581build_printable_array_type (tree eltype, unsigned HOST_WIDE_INT nelts)
2582{
2583 if (TYPE_SIZE_UNIT (eltype)
2584 && TREE_CODE (TYPE_SIZE_UNIT (eltype)) == INTEGER_CST
2585 && !integer_zerop (TYPE_SIZE_UNIT (eltype))
2586 && TYPE_ALIGN_UNIT (eltype) > 1
2587 && wi::zext (wi::to_wide (TYPE_SIZE_UNIT (eltype)),
2588 ffs_hwi (TYPE_ALIGN_UNIT (eltype)) - 1) != 0)
2589 eltype = TYPE_MAIN_VARIANT (eltype);
2590
2591 /* Consider excessive NELTS an array of unknown bound. */
2592 tree idxtype = NULL_TREE;
2593 if (nelts < HOST_WIDE_INT_MAX)
2594 {
2595 if (nelts)
2596 return build_array_type_nelts (eltype, nelts);
2597 idxtype = build_range_type (sizetype, size_zero_node, NULL_TREE);
2598 }
2599
2600 tree arrtype = build_array_type (eltype, idxtype);
2601 arrtype = build_distinct_type_copy (TYPE_MAIN_VARIANT (arrtype));
2602 TYPE_SIZE (arrtype) = bitsize_zero_node;
2603 TYPE_SIZE_UNIT (arrtype) = size_zero_node;
2604 return arrtype;
2605}