]>
Commit | Line | Data |
---|---|---|
2a837de2 MS |
1 | /* Definitions of the pointer_query and related classes. |
2 | ||
3 | Copyright (C) 2020-2021 Free Software Foundation, Inc. | |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "backend.h" | |
2a837de2 | 25 | #include "tree.h" |
2a837de2 | 26 | #include "gimple.h" |
2a837de2 MS |
27 | #include "stringpool.h" |
28 | #include "tree-vrp.h" | |
2a837de2 | 29 | #include "diagnostic-core.h" |
2a837de2 | 30 | #include "fold-const.h" |
2a837de2 MS |
31 | #include "tree-object-size.h" |
32 | #include "tree-ssa-strlen.h" | |
2a837de2 | 33 | #include "langhooks.h" |
2a837de2 MS |
34 | #include "stringpool.h" |
35 | #include "attribs.h" | |
2a837de2 | 36 | #include "gimple-fold.h" |
ece28da9 | 37 | #include "gimple-ssa.h" |
2a837de2 | 38 | #include "intl.h" |
2a837de2 MS |
39 | #include "attr-fnspec.h" |
40 | #include "gimple-range.h" | |
2a837de2 | 41 | #include "pointer-query.h" |
ece28da9 MS |
42 | #include "tree-pretty-print.h" |
43 | #include "tree-ssanames.h" | |
54fa5567 | 44 | #include "target.h" |
2a837de2 | 45 | |
9a27acc3 MS |
46 | static bool compute_objsize_r (tree, gimple *, int, access_ref *, |
47 | ssa_name_limit_t &, pointer_query *); | |
2a837de2 MS |
48 | |
49 | /* Wrapper around the wide_int overload of get_range that accepts | |
50 | offset_int instead. For middle end expressions returns the same | |
51 | result. For a subset of nonconstamt expressions emitted by the front | |
52 | end determines a more precise range than would be possible otherwise. */ | |
53 | ||
54 | static bool | |
55 | get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals) | |
56 | { | |
57 | offset_int add = 0; | |
58 | if (TREE_CODE (x) == PLUS_EXPR) | |
59 | { | |
60 | /* Handle constant offsets in pointer addition expressions seen | |
61 | n the front end IL. */ | |
62 | tree op = TREE_OPERAND (x, 1); | |
63 | if (TREE_CODE (op) == INTEGER_CST) | |
64 | { | |
65 | op = fold_convert (signed_type_for (TREE_TYPE (op)), op); | |
66 | add = wi::to_offset (op); | |
67 | x = TREE_OPERAND (x, 0); | |
68 | } | |
69 | } | |
70 | ||
71 | if (TREE_CODE (x) == NOP_EXPR) | |
72 | /* Also handle conversions to sizetype seen in the front end IL. */ | |
73 | x = TREE_OPERAND (x, 0); | |
74 | ||
75 | tree type = TREE_TYPE (x); | |
76 | if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) | |
77 | return false; | |
78 | ||
79 | if (TREE_CODE (x) != INTEGER_CST | |
80 | && TREE_CODE (x) != SSA_NAME) | |
81 | { | |
82 | if (TYPE_UNSIGNED (type) | |
83 | && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype)) | |
84 | type = signed_type_for (type); | |
85 | ||
86 | r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add; | |
87 | r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add; | |
88 | return x; | |
89 | } | |
90 | ||
91 | wide_int wr[2]; | |
92 | if (!get_range (x, stmt, wr, rvals)) | |
93 | return false; | |
94 | ||
95 | signop sgn = SIGNED; | |
96 | /* Only convert signed integers or unsigned sizetype to a signed | |
97 | offset and avoid converting large positive values in narrower | |
98 | types to negative offsets. */ | |
99 | if (TYPE_UNSIGNED (type) | |
100 | && wr[0].get_precision () < TYPE_PRECISION (sizetype)) | |
101 | sgn = UNSIGNED; | |
102 | ||
103 | r[0] = offset_int::from (wr[0], sgn); | |
104 | r[1] = offset_int::from (wr[1], sgn); | |
105 | return true; | |
106 | } | |
107 | ||
108 | /* Return the argument that the call STMT to a built-in function returns | |
109 | or null if it doesn't. On success, set OFFRNG[] to the range of offsets | |
110 | from the argument reflected in the value returned by the built-in if it | |
111 | can be determined, otherwise to 0 and HWI_M1U respectively. Set | |
112 | *PAST_END for functions like mempcpy that might return a past the end | |
113 | pointer (most functions return a dereferenceable pointer to an existing | |
114 | element of an array). */ | |
115 | ||
116 | static tree | |
117 | gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end, | |
9a27acc3 | 118 | ssa_name_limit_t &snlim, pointer_query *qry) |
2a837de2 MS |
119 | { |
120 | /* Clear and set below for the rare function(s) that might return | |
121 | a past-the-end pointer. */ | |
122 | *past_end = false; | |
123 | ||
124 | { | |
125 | /* Check for attribute fn spec to see if the function returns one | |
126 | of its arguments. */ | |
127 | attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt)); | |
128 | unsigned int argno; | |
129 | if (fnspec.returns_arg (&argno)) | |
130 | { | |
131 | /* Functions return the first argument (not a range). */ | |
132 | offrng[0] = offrng[1] = 0; | |
133 | return gimple_call_arg (stmt, argno); | |
134 | } | |
135 | } | |
136 | ||
137 | if (gimple_call_num_args (stmt) < 1) | |
138 | return NULL_TREE; | |
139 | ||
140 | tree fn = gimple_call_fndecl (stmt); | |
141 | if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) | |
142 | { | |
143 | /* See if this is a call to placement new. */ | |
144 | if (!fn | |
145 | || !DECL_IS_OPERATOR_NEW_P (fn) | |
146 | || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn)) | |
147 | return NULL_TREE; | |
148 | ||
149 | /* Check the mangling, keeping in mind that operator new takes | |
150 | a size_t which could be unsigned int or unsigned long. */ | |
151 | tree fname = DECL_ASSEMBLER_NAME (fn); | |
152 | if (!id_equal (fname, "_ZnwjPv") // ordinary form | |
153 | && !id_equal (fname, "_ZnwmPv") // ordinary form | |
154 | && !id_equal (fname, "_ZnajPv") // array form | |
155 | && !id_equal (fname, "_ZnamPv")) // array form | |
156 | return NULL_TREE; | |
157 | ||
158 | if (gimple_call_num_args (stmt) != 2) | |
159 | return NULL_TREE; | |
160 | ||
161 | /* Allocation functions return a pointer to the beginning. */ | |
162 | offrng[0] = offrng[1] = 0; | |
163 | return gimple_call_arg (stmt, 1); | |
164 | } | |
165 | ||
166 | switch (DECL_FUNCTION_CODE (fn)) | |
167 | { | |
168 | case BUILT_IN_MEMCPY: | |
169 | case BUILT_IN_MEMCPY_CHK: | |
170 | case BUILT_IN_MEMMOVE: | |
171 | case BUILT_IN_MEMMOVE_CHK: | |
172 | case BUILT_IN_MEMSET: | |
173 | case BUILT_IN_STRCAT: | |
174 | case BUILT_IN_STRCAT_CHK: | |
175 | case BUILT_IN_STRCPY: | |
176 | case BUILT_IN_STRCPY_CHK: | |
177 | case BUILT_IN_STRNCAT: | |
178 | case BUILT_IN_STRNCAT_CHK: | |
179 | case BUILT_IN_STRNCPY: | |
180 | case BUILT_IN_STRNCPY_CHK: | |
181 | /* Functions return the first argument (not a range). */ | |
182 | offrng[0] = offrng[1] = 0; | |
183 | return gimple_call_arg (stmt, 0); | |
184 | ||
185 | case BUILT_IN_MEMPCPY: | |
186 | case BUILT_IN_MEMPCPY_CHK: | |
187 | { | |
188 | /* The returned pointer is in a range constrained by the smaller | |
189 | of the upper bound of the size argument and the source object | |
190 | size. */ | |
191 | offrng[0] = 0; | |
192 | offrng[1] = HOST_WIDE_INT_M1U; | |
193 | tree off = gimple_call_arg (stmt, 2); | |
9a27acc3 | 194 | bool off_valid = get_offset_range (off, stmt, offrng, qry->rvals); |
2a837de2 MS |
195 | if (!off_valid || offrng[0] != offrng[1]) |
196 | { | |
197 | /* If the offset is either indeterminate or in some range, | |
198 | try to constrain its upper bound to at most the size | |
199 | of the source object. */ | |
200 | access_ref aref; | |
201 | tree src = gimple_call_arg (stmt, 1); | |
9a27acc3 | 202 | if (compute_objsize (src, stmt, 1, &aref, qry) |
2a837de2 MS |
203 | && aref.sizrng[1] < offrng[1]) |
204 | offrng[1] = aref.sizrng[1]; | |
205 | } | |
206 | ||
207 | /* Mempcpy may return a past-the-end pointer. */ | |
208 | *past_end = true; | |
209 | return gimple_call_arg (stmt, 0); | |
210 | } | |
211 | ||
212 | case BUILT_IN_MEMCHR: | |
213 | { | |
214 | tree off = gimple_call_arg (stmt, 2); | |
9a27acc3 | 215 | if (get_offset_range (off, stmt, offrng, qry->rvals)) |
2a837de2 MS |
216 | offrng[1] -= 1; |
217 | else | |
218 | offrng[1] = HOST_WIDE_INT_M1U; | |
219 | ||
220 | offrng[0] = 0; | |
221 | return gimple_call_arg (stmt, 0); | |
222 | } | |
223 | ||
224 | case BUILT_IN_STRCHR: | |
225 | case BUILT_IN_STRRCHR: | |
226 | case BUILT_IN_STRSTR: | |
227 | offrng[0] = 0; | |
228 | offrng[1] = HOST_WIDE_INT_M1U; | |
229 | return gimple_call_arg (stmt, 0); | |
230 | ||
231 | case BUILT_IN_STPCPY: | |
232 | case BUILT_IN_STPCPY_CHK: | |
233 | { | |
234 | access_ref aref; | |
235 | tree src = gimple_call_arg (stmt, 1); | |
9a27acc3 | 236 | if (compute_objsize_r (src, stmt, 1, &aref, snlim, qry)) |
2a837de2 MS |
237 | offrng[1] = aref.sizrng[1] - 1; |
238 | else | |
239 | offrng[1] = HOST_WIDE_INT_M1U; | |
240 | ||
241 | offrng[0] = 0; | |
242 | return gimple_call_arg (stmt, 0); | |
243 | } | |
244 | ||
245 | case BUILT_IN_STPNCPY: | |
246 | case BUILT_IN_STPNCPY_CHK: | |
247 | { | |
248 | /* The returned pointer is in a range between the first argument | |
249 | and it plus the smaller of the upper bound of the size argument | |
250 | and the source object size. */ | |
251 | offrng[1] = HOST_WIDE_INT_M1U; | |
252 | tree off = gimple_call_arg (stmt, 2); | |
9a27acc3 | 253 | if (!get_offset_range (off, stmt, offrng, qry->rvals) |
2a837de2 MS |
254 | || offrng[0] != offrng[1]) |
255 | { | |
256 | /* If the offset is either indeterminate or in some range, | |
257 | try to constrain its upper bound to at most the size | |
258 | of the source object. */ | |
259 | access_ref aref; | |
260 | tree src = gimple_call_arg (stmt, 1); | |
9a27acc3 | 261 | if (compute_objsize_r (src, stmt, 1, &aref, snlim, qry) |
2a837de2 MS |
262 | && aref.sizrng[1] < offrng[1]) |
263 | offrng[1] = aref.sizrng[1]; | |
264 | } | |
265 | ||
266 | /* When the source is the empty string the returned pointer is | |
267 | a copy of the argument. Otherwise stpcpy can also return | |
268 | a past-the-end pointer. */ | |
269 | offrng[0] = 0; | |
270 | *past_end = true; | |
271 | return gimple_call_arg (stmt, 0); | |
272 | } | |
273 | ||
274 | default: | |
275 | break; | |
276 | } | |
277 | ||
278 | return NULL_TREE; | |
279 | } | |
280 | ||
b48d4e68 MS |
281 | /* Return true when EXP's range can be determined and set RANGE[] to it |
282 | after adjusting it if necessary to make EXP a represents a valid size | |
283 | of object, or a valid size argument to an allocation function declared | |
284 | with attribute alloc_size (whose argument may be signed), or to a string | |
285 | manipulation function like memset. | |
286 | When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for | |
287 | a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is | |
288 | a (nearly) invalid argument to allocation functions like malloc but it | |
289 | is a valid argument to functions like memset. | |
290 | When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange | |
291 | in a multi-range, otherwise to the smallest valid subrange. */ | |
292 | ||
293 | bool | |
294 | get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2], | |
295 | int flags /* = 0 */) | |
296 | { | |
297 | if (!exp) | |
298 | return false; | |
299 | ||
300 | if (tree_fits_uhwi_p (exp)) | |
301 | { | |
302 | /* EXP is a constant. */ | |
303 | range[0] = range[1] = exp; | |
304 | return true; | |
305 | } | |
306 | ||
307 | tree exptype = TREE_TYPE (exp); | |
308 | bool integral = INTEGRAL_TYPE_P (exptype); | |
309 | ||
310 | wide_int min, max; | |
311 | enum value_range_kind range_type; | |
312 | ||
313 | if (!query) | |
ece28da9 | 314 | query = get_range_query (cfun); |
b48d4e68 MS |
315 | |
316 | if (integral) | |
317 | { | |
318 | value_range vr; | |
319 | ||
320 | query->range_of_expr (vr, exp, stmt); | |
321 | ||
322 | if (vr.undefined_p ()) | |
323 | vr.set_varying (TREE_TYPE (exp)); | |
324 | range_type = vr.kind (); | |
325 | min = wi::to_wide (vr.min ()); | |
326 | max = wi::to_wide (vr.max ()); | |
327 | } | |
328 | else | |
329 | range_type = VR_VARYING; | |
330 | ||
331 | if (range_type == VR_VARYING) | |
332 | { | |
333 | if (integral) | |
334 | { | |
335 | /* Use the full range of the type of the expression when | |
336 | no value range information is available. */ | |
337 | range[0] = TYPE_MIN_VALUE (exptype); | |
338 | range[1] = TYPE_MAX_VALUE (exptype); | |
339 | return true; | |
340 | } | |
341 | ||
342 | range[0] = NULL_TREE; | |
343 | range[1] = NULL_TREE; | |
344 | return false; | |
345 | } | |
346 | ||
347 | unsigned expprec = TYPE_PRECISION (exptype); | |
348 | ||
349 | bool signed_p = !TYPE_UNSIGNED (exptype); | |
350 | ||
351 | if (range_type == VR_ANTI_RANGE) | |
352 | { | |
353 | if (signed_p) | |
354 | { | |
355 | if (wi::les_p (max, 0)) | |
356 | { | |
357 | /* EXP is not in a strictly negative range. That means | |
358 | it must be in some (not necessarily strictly) positive | |
359 | range which includes zero. Since in signed to unsigned | |
360 | conversions negative values end up converted to large | |
361 | positive values, and otherwise they are not valid sizes, | |
362 | the resulting range is in both cases [0, TYPE_MAX]. */ | |
363 | min = wi::zero (expprec); | |
364 | max = wi::to_wide (TYPE_MAX_VALUE (exptype)); | |
365 | } | |
366 | else if (wi::les_p (min - 1, 0)) | |
367 | { | |
368 | /* EXP is not in a negative-positive range. That means EXP | |
369 | is either negative, or greater than max. Since negative | |
370 | sizes are invalid make the range [MAX + 1, TYPE_MAX]. */ | |
371 | min = max + 1; | |
372 | max = wi::to_wide (TYPE_MAX_VALUE (exptype)); | |
373 | } | |
374 | else | |
375 | { | |
376 | max = min - 1; | |
377 | min = wi::zero (expprec); | |
378 | } | |
379 | } | |
380 | else | |
381 | { | |
382 | wide_int maxsize = wi::to_wide (max_object_size ()); | |
383 | min = wide_int::from (min, maxsize.get_precision (), UNSIGNED); | |
384 | max = wide_int::from (max, maxsize.get_precision (), UNSIGNED); | |
385 | if (wi::eq_p (0, min - 1)) | |
386 | { | |
387 | /* EXP is unsigned and not in the range [1, MAX]. That means | |
388 | it's either zero or greater than MAX. Even though 0 would | |
389 | normally be detected by -Walloc-zero, unless ALLOW_ZERO | |
390 | is set, set the range to [MAX, TYPE_MAX] so that when MAX | |
391 | is greater than the limit the whole range is diagnosed. */ | |
392 | wide_int maxsize = wi::to_wide (max_object_size ()); | |
393 | if (flags & SR_ALLOW_ZERO) | |
394 | { | |
395 | if (wi::leu_p (maxsize, max + 1) | |
396 | || !(flags & SR_USE_LARGEST)) | |
397 | min = max = wi::zero (expprec); | |
398 | else | |
399 | { | |
400 | min = max + 1; | |
401 | max = wi::to_wide (TYPE_MAX_VALUE (exptype)); | |
402 | } | |
403 | } | |
404 | else | |
405 | { | |
406 | min = max + 1; | |
407 | max = wi::to_wide (TYPE_MAX_VALUE (exptype)); | |
408 | } | |
409 | } | |
410 | else if ((flags & SR_USE_LARGEST) | |
411 | && wi::ltu_p (max + 1, maxsize)) | |
412 | { | |
413 | /* When USE_LARGEST is set and the larger of the two subranges | |
414 | is a valid size, use it... */ | |
415 | min = max + 1; | |
416 | max = maxsize; | |
417 | } | |
418 | else | |
419 | { | |
420 | /* ...otherwise use the smaller subrange. */ | |
421 | max = min - 1; | |
422 | min = wi::zero (expprec); | |
423 | } | |
424 | } | |
425 | } | |
426 | ||
427 | range[0] = wide_int_to_tree (exptype, min); | |
428 | range[1] = wide_int_to_tree (exptype, max); | |
429 | ||
430 | return true; | |
431 | } | |
432 | ||
433 | bool | |
434 | get_size_range (tree exp, tree range[2], int flags /* = 0 */) | |
435 | { | |
436 | return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags); | |
437 | } | |
438 | ||
2a837de2 MS |
439 | /* If STMT is a call to an allocation function, returns the constant |
440 | maximum size of the object allocated by the call represented as | |
441 | sizetype. If nonnull, sets RNG1[] to the range of the size. | |
442 | When nonnull, uses RVALS for range information, otherwise gets global | |
443 | range info. | |
444 | Returns null when STMT is not a call to a valid allocation function. */ | |
445 | ||
446 | tree | |
447 | gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */, | |
9a27acc3 | 448 | range_query *qry /* = NULL */) |
2a837de2 MS |
449 | { |
450 | if (!stmt || !is_gimple_call (stmt)) | |
451 | return NULL_TREE; | |
452 | ||
453 | tree allocfntype; | |
454 | if (tree fndecl = gimple_call_fndecl (stmt)) | |
455 | allocfntype = TREE_TYPE (fndecl); | |
456 | else | |
457 | allocfntype = gimple_call_fntype (stmt); | |
458 | ||
459 | if (!allocfntype) | |
460 | return NULL_TREE; | |
461 | ||
462 | unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX; | |
463 | tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype)); | |
464 | if (!at) | |
465 | { | |
466 | if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN)) | |
467 | return NULL_TREE; | |
468 | ||
469 | argidx1 = 0; | |
470 | } | |
471 | ||
472 | unsigned nargs = gimple_call_num_args (stmt); | |
473 | ||
474 | if (argidx1 == UINT_MAX) | |
475 | { | |
476 | tree atval = TREE_VALUE (at); | |
477 | if (!atval) | |
478 | return NULL_TREE; | |
479 | ||
480 | argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1; | |
481 | if (nargs <= argidx1) | |
482 | return NULL_TREE; | |
483 | ||
484 | atval = TREE_CHAIN (atval); | |
485 | if (atval) | |
486 | { | |
487 | argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1; | |
488 | if (nargs <= argidx2) | |
489 | return NULL_TREE; | |
490 | } | |
491 | } | |
492 | ||
493 | tree size = gimple_call_arg (stmt, argidx1); | |
494 | ||
495 | wide_int rng1_buf[2]; | |
496 | /* If RNG1 is not set, use the buffer. */ | |
497 | if (!rng1) | |
498 | rng1 = rng1_buf; | |
499 | ||
500 | /* Use maximum precision to avoid overflow below. */ | |
501 | const int prec = ADDR_MAX_PRECISION; | |
502 | ||
503 | { | |
504 | tree r[2]; | |
505 | /* Determine the largest valid range size, including zero. */ | |
9a27acc3 | 506 | if (!get_size_range (qry, size, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST)) |
2a837de2 MS |
507 | return NULL_TREE; |
508 | rng1[0] = wi::to_wide (r[0], prec); | |
509 | rng1[1] = wi::to_wide (r[1], prec); | |
510 | } | |
511 | ||
512 | if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST) | |
513 | return fold_convert (sizetype, size); | |
514 | ||
515 | /* To handle ranges do the math in wide_int and return the product | |
516 | of the upper bounds as a constant. Ignore anti-ranges. */ | |
517 | tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node; | |
518 | wide_int rng2[2]; | |
519 | { | |
520 | tree r[2]; | |
521 | /* As above, use the full non-negative range on failure. */ | |
9a27acc3 | 522 | if (!get_size_range (qry, n, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST)) |
2a837de2 MS |
523 | return NULL_TREE; |
524 | rng2[0] = wi::to_wide (r[0], prec); | |
525 | rng2[1] = wi::to_wide (r[1], prec); | |
526 | } | |
527 | ||
528 | /* Compute products of both bounds for the caller but return the lesser | |
529 | of SIZE_MAX and the product of the upper bounds as a constant. */ | |
530 | rng1[0] = rng1[0] * rng2[0]; | |
531 | rng1[1] = rng1[1] * rng2[1]; | |
532 | ||
533 | const tree size_max = TYPE_MAX_VALUE (sizetype); | |
534 | if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec))) | |
535 | { | |
536 | rng1[1] = wi::to_wide (size_max, prec); | |
537 | return size_max; | |
538 | } | |
539 | ||
540 | return wide_int_to_tree (sizetype, rng1[1]); | |
541 | } | |
542 | ||
543 | /* For an access to an object referenced to by the function parameter PTR | |
544 | of pointer type, and set RNG[] to the range of sizes of the object | |
545 | obtainedfrom the attribute access specification for the current function. | |
546 | Set STATIC_ARRAY if the array parameter has been declared [static]. | |
547 | Return the function parameter on success and null otherwise. */ | |
548 | ||
9a27acc3 | 549 | static tree |
2a837de2 MS |
550 | gimple_parm_array_size (tree ptr, wide_int rng[2], |
551 | bool *static_array /* = NULL */) | |
552 | { | |
553 | /* For a function argument try to determine the byte size of the array | |
554 | from the current function declaratation (e.g., attribute access or | |
555 | related). */ | |
556 | tree var = SSA_NAME_VAR (ptr); | |
557 | if (TREE_CODE (var) != PARM_DECL) | |
558 | return NULL_TREE; | |
559 | ||
560 | const unsigned prec = TYPE_PRECISION (sizetype); | |
561 | ||
562 | rdwr_map rdwr_idx; | |
563 | attr_access *access = get_parm_access (rdwr_idx, var); | |
564 | if (!access) | |
565 | return NULL_TREE; | |
566 | ||
567 | if (access->sizarg != UINT_MAX) | |
568 | { | |
569 | /* TODO: Try to extract the range from the argument based on | |
570 | those of subsequent assertions or based on known calls to | |
571 | the current function. */ | |
572 | return NULL_TREE; | |
573 | } | |
574 | ||
575 | if (!access->minsize) | |
576 | return NULL_TREE; | |
577 | ||
578 | /* Only consider ordinary array bound at level 2 (or above if it's | |
579 | ever added). */ | |
580 | if (warn_array_parameter < 2 && !access->static_p) | |
581 | return NULL_TREE; | |
582 | ||
583 | if (static_array) | |
584 | *static_array = access->static_p; | |
585 | ||
586 | rng[0] = wi::zero (prec); | |
587 | rng[1] = wi::uhwi (access->minsize, prec); | |
588 | /* Multiply the array bound encoded in the attribute by the size | |
589 | of what the pointer argument to which it decays points to. */ | |
590 | tree eltype = TREE_TYPE (TREE_TYPE (ptr)); | |
591 | tree size = TYPE_SIZE_UNIT (eltype); | |
592 | if (!size || TREE_CODE (size) != INTEGER_CST) | |
593 | return NULL_TREE; | |
594 | ||
595 | rng[1] *= wi::to_wide (size, prec); | |
596 | return var; | |
597 | } | |
598 | ||
9a27acc3 MS |
599 | /* Given a statement STMT, set the bounds of the reference to at most |
600 | as many bytes as BOUND or unknown when null, and at least one when | |
601 | the MINACCESS is true unless BOUND is a constant zero. STMT is | |
602 | used for context to get accurate range info. */ | |
603 | ||
604 | access_ref::access_ref (range_query *qry /* = nullptr */, | |
605 | tree bound /* = NULL_TREE */, | |
606 | gimple *stmt /* = nullptr */, | |
2a837de2 | 607 | bool minaccess /* = false */) |
9a27acc3 MS |
608 | : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true), |
609 | base0 (true), parmarray () | |
2a837de2 MS |
610 | { |
611 | /* Set to valid. */ | |
612 | offrng[0] = offrng[1] = 0; | |
613 | offmax[0] = offmax[1] = 0; | |
614 | /* Invalidate. */ | |
615 | sizrng[0] = sizrng[1] = -1; | |
616 | ||
617 | /* Set the default bounds of the access and adjust below. */ | |
618 | bndrng[0] = minaccess ? 1 : 0; | |
619 | bndrng[1] = HOST_WIDE_INT_M1U; | |
620 | ||
621 | /* When BOUND is nonnull and a range can be extracted from it, | |
622 | set the bounds of the access to reflect both it and MINACCESS. | |
623 | BNDRNG[0] is the size of the minimum access. */ | |
624 | tree rng[2]; | |
9a27acc3 | 625 | if (bound && get_size_range (qry, bound, stmt, rng, SR_ALLOW_ZERO)) |
2a837de2 MS |
626 | { |
627 | bndrng[0] = wi::to_offset (rng[0]); | |
628 | bndrng[1] = wi::to_offset (rng[1]); | |
629 | bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0; | |
630 | } | |
631 | } | |
632 | ||
633 | /* Return the PHI node REF refers to or null if it doesn't. */ | |
634 | ||
635 | gphi * | |
636 | access_ref::phi () const | |
637 | { | |
638 | if (!ref || TREE_CODE (ref) != SSA_NAME) | |
639 | return NULL; | |
640 | ||
641 | gimple *def_stmt = SSA_NAME_DEF_STMT (ref); | |
ece28da9 | 642 | if (!def_stmt || gimple_code (def_stmt) != GIMPLE_PHI) |
2a837de2 MS |
643 | return NULL; |
644 | ||
645 | return as_a <gphi *> (def_stmt); | |
646 | } | |
647 | ||
820f0940 MS |
648 | /* Determine and return the largest object to which *THIS refers. If |
649 | *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details | |
650 | of the object determined by compute_objsize(ARG, OSTYPE) for each PHI | |
651 | argument ARG. */ | |
2a837de2 MS |
652 | |
653 | tree | |
654 | access_ref::get_ref (vec<access_ref> *all_refs, | |
655 | access_ref *pref /* = NULL */, | |
656 | int ostype /* = 1 */, | |
657 | ssa_name_limit_t *psnlim /* = NULL */, | |
658 | pointer_query *qry /* = NULL */) const | |
659 | { | |
660 | gphi *phi_stmt = this->phi (); | |
661 | if (!phi_stmt) | |
662 | return ref; | |
663 | ||
664 | /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might | |
665 | cause unbounded recursion. */ | |
666 | ssa_name_limit_t snlim_buf; | |
667 | if (!psnlim) | |
668 | psnlim = &snlim_buf; | |
669 | ||
670 | if (!psnlim->visit_phi (ref)) | |
671 | return NULL_TREE; | |
672 | ||
2a837de2 MS |
673 | pointer_query empty_qry; |
674 | if (!qry) | |
675 | qry = &empty_qry; | |
676 | ||
820f0940 MS |
677 | /* The conservative result of the PHI reflecting the offset and size |
678 | of the largest PHI argument, regardless of whether or not they all | |
679 | refer to the same object. */ | |
2a837de2 MS |
680 | access_ref phi_ref; |
681 | if (pref) | |
682 | { | |
820f0940 MS |
683 | /* The identity of the object has not been determined yet but |
684 | PREF->REF is set by the caller to the PHI for convenience. | |
685 | The size is negative/invalid and the offset is zero (it's | |
686 | updated only after the identity of the object has been | |
687 | established). */ | |
688 | gcc_assert (pref->sizrng[0] < 0); | |
689 | gcc_assert (pref->offrng[0] == 0 && pref->offrng[1] == 0); | |
690 | ||
2a837de2 | 691 | phi_ref = *pref; |
2a837de2 MS |
692 | } |
693 | ||
694 | /* Set if any argument is a function array (or VLA) parameter not | |
695 | declared [static]. */ | |
696 | bool parmarray = false; | |
697 | /* The size of the smallest object referenced by the PHI arguments. */ | |
698 | offset_int minsize = 0; | |
699 | const offset_int maxobjsize = wi::to_offset (max_object_size ()); | |
2a837de2 MS |
700 | |
701 | const unsigned nargs = gimple_phi_num_args (phi_stmt); | |
702 | for (unsigned i = 0; i < nargs; ++i) | |
703 | { | |
704 | access_ref phi_arg_ref; | |
705 | tree arg = gimple_phi_arg_def (phi_stmt, i); | |
9a27acc3 MS |
706 | if (!compute_objsize_r (arg, phi_stmt, ostype, &phi_arg_ref, *psnlim, |
707 | qry) | |
2a837de2 MS |
708 | || phi_arg_ref.sizrng[0] < 0) |
709 | /* A PHI with all null pointer arguments. */ | |
710 | return NULL_TREE; | |
711 | ||
2a837de2 MS |
712 | if (TREE_CODE (arg) == SSA_NAME) |
713 | qry->put_ref (arg, phi_arg_ref); | |
714 | ||
715 | if (all_refs) | |
716 | all_refs->safe_push (phi_arg_ref); | |
717 | ||
2a837de2 MS |
718 | parmarray |= phi_arg_ref.parmarray; |
719 | ||
720 | const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs); | |
721 | ||
722 | if (phi_ref.sizrng[0] < 0) | |
723 | { | |
820f0940 MS |
724 | /* If PHI_REF doesn't contain a meaningful result yet set it |
725 | to the result for the first argument. */ | |
2a837de2 | 726 | if (!nullp) |
820f0940 MS |
727 | phi_ref = phi_arg_ref; |
728 | ||
729 | /* Set if the current argument refers to one or more objects of | |
730 | known size (or range of sizes), as opposed to referring to | |
731 | one or more unknown object(s). */ | |
732 | const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0 | |
733 | || phi_arg_ref.sizrng[1] != maxobjsize); | |
2a837de2 MS |
734 | if (arg_known_size) |
735 | minsize = phi_arg_ref.sizrng[0]; | |
820f0940 | 736 | |
2a837de2 MS |
737 | continue; |
738 | } | |
739 | ||
740 | const bool phi_known_size = (phi_ref.sizrng[0] != 0 | |
741 | || phi_ref.sizrng[1] != maxobjsize); | |
742 | ||
743 | if (phi_known_size && phi_arg_ref.sizrng[0] < minsize) | |
744 | minsize = phi_arg_ref.sizrng[0]; | |
745 | ||
746 | /* Disregard null pointers in PHIs with two or more arguments. | |
747 | TODO: Handle this better! */ | |
748 | if (nullp) | |
749 | continue; | |
750 | ||
751 | /* Determine the amount of remaining space in the argument. */ | |
752 | offset_int argrem[2]; | |
753 | argrem[1] = phi_arg_ref.size_remaining (argrem); | |
754 | ||
755 | /* Determine the amount of remaining space computed so far and | |
756 | if the remaining space in the argument is more use it instead. */ | |
757 | offset_int phirem[2]; | |
758 | phirem[1] = phi_ref.size_remaining (phirem); | |
759 | ||
820f0940 MS |
760 | /* Reset the PHI's BASE0 flag if any of the nonnull arguments |
761 | refers to an object at an unknown offset. */ | |
762 | if (!phi_arg_ref.base0) | |
763 | phi_ref.base0 = false; | |
2a837de2 MS |
764 | |
765 | if (phirem[1] < argrem[1] | |
766 | || (phirem[1] == argrem[1] | |
767 | && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1])) | |
768 | /* Use the argument with the most space remaining as the result, | |
769 | or the larger one if the space is equal. */ | |
770 | phi_ref = phi_arg_ref; | |
2a837de2 MS |
771 | } |
772 | ||
820f0940 MS |
773 | /* Replace the lower bound of the largest argument with the size |
774 | of the smallest argument, and set PARMARRAY if any argument | |
775 | was one. */ | |
776 | phi_ref.sizrng[0] = minsize; | |
777 | phi_ref.parmarray = parmarray; | |
2a837de2 MS |
778 | |
779 | if (phi_ref.sizrng[0] < 0) | |
780 | { | |
781 | /* Fail if none of the PHI's arguments resulted in updating PHI_REF | |
782 | (perhaps because they have all been already visited by prior | |
783 | recursive calls). */ | |
784 | psnlim->leave_phi (ref); | |
785 | return NULL_TREE; | |
786 | } | |
787 | ||
788 | /* Avoid changing *THIS. */ | |
789 | if (pref && pref != this) | |
790 | *pref = phi_ref; | |
791 | ||
792 | psnlim->leave_phi (ref); | |
793 | ||
794 | return phi_ref.ref; | |
795 | } | |
796 | ||
797 | /* Return the maximum amount of space remaining and if non-null, set | |
798 | argument to the minimum. */ | |
799 | ||
800 | offset_int | |
801 | access_ref::size_remaining (offset_int *pmin /* = NULL */) const | |
802 | { | |
803 | offset_int minbuf; | |
804 | if (!pmin) | |
805 | pmin = &minbuf; | |
806 | ||
820f0940 MS |
807 | if (sizrng[0] < 0) |
808 | { | |
809 | /* If the identity of the object hasn't been determined return | |
810 | the maximum size range. */ | |
811 | *pmin = 0; | |
812 | return wi::to_offset (max_object_size ()); | |
813 | } | |
814 | ||
2a837de2 MS |
815 | /* add_offset() ensures the offset range isn't inverted. */ |
816 | gcc_checking_assert (offrng[0] <= offrng[1]); | |
817 | ||
818 | if (base0) | |
819 | { | |
820 | /* The offset into referenced object is zero-based (i.e., it's | |
821 | not referenced by a pointer into middle of some unknown object). */ | |
822 | if (offrng[0] < 0 && offrng[1] < 0) | |
823 | { | |
824 | /* If the offset is negative the remaining size is zero. */ | |
825 | *pmin = 0; | |
826 | return 0; | |
827 | } | |
828 | ||
829 | if (sizrng[1] <= offrng[0]) | |
830 | { | |
831 | /* If the starting offset is greater than or equal to the upper | |
832 | bound on the size of the object, the space remaining is zero. | |
833 | As a special case, if it's equal, set *PMIN to -1 to let | |
834 | the caller know the offset is valid and just past the end. */ | |
835 | *pmin = sizrng[1] == offrng[0] ? -1 : 0; | |
836 | return 0; | |
837 | } | |
838 | ||
839 | /* Otherwise return the size minus the lower bound of the offset. */ | |
840 | offset_int or0 = offrng[0] < 0 ? 0 : offrng[0]; | |
841 | ||
842 | *pmin = sizrng[0] - or0; | |
843 | return sizrng[1] - or0; | |
844 | } | |
845 | ||
846 | /* The offset to the referenced object isn't zero-based (i.e., it may | |
847 | refer to a byte other than the first. The size of such an object | |
848 | is constrained only by the size of the address space (the result | |
849 | of max_object_size()). */ | |
850 | if (sizrng[1] <= offrng[0]) | |
851 | { | |
852 | *pmin = 0; | |
853 | return 0; | |
854 | } | |
855 | ||
856 | offset_int or0 = offrng[0] < 0 ? 0 : offrng[0]; | |
857 | ||
858 | *pmin = sizrng[0] - or0; | |
859 | return sizrng[1] - or0; | |
860 | } | |
861 | ||
862 | /* Return true if the offset and object size are in range for SIZE. */ | |
863 | ||
864 | bool | |
865 | access_ref::offset_in_range (const offset_int &size) const | |
866 | { | |
867 | if (size_remaining () < size) | |
868 | return false; | |
869 | ||
870 | if (base0) | |
871 | return offmax[0] >= 0 && offmax[1] <= sizrng[1]; | |
872 | ||
873 | offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); | |
874 | return offmax[0] > -maxoff && offmax[1] < maxoff; | |
875 | } | |
876 | ||
877 | /* Add the range [MIN, MAX] to the offset range. For known objects (with | |
878 | zero-based offsets) at least one of whose offset's bounds is in range, | |
879 | constrain the other (or both) to the bounds of the object (i.e., zero | |
880 | and the upper bound of its size). This improves the quality of | |
881 | diagnostics. */ | |
882 | ||
883 | void access_ref::add_offset (const offset_int &min, const offset_int &max) | |
884 | { | |
885 | if (min <= max) | |
886 | { | |
887 | /* To add an ordinary range just add it to the bounds. */ | |
888 | offrng[0] += min; | |
889 | offrng[1] += max; | |
890 | } | |
891 | else if (!base0) | |
892 | { | |
893 | /* To add an inverted range to an offset to an unknown object | |
894 | expand it to the maximum. */ | |
895 | add_max_offset (); | |
896 | return; | |
897 | } | |
898 | else | |
899 | { | |
900 | /* To add an inverted range to an offset to an known object set | |
901 | the upper bound to the maximum representable offset value | |
902 | (which may be greater than MAX_OBJECT_SIZE). | |
903 | The lower bound is either the sum of the current offset and | |
904 | MIN when abs(MAX) is greater than the former, or zero otherwise. | |
905 | Zero because then then inverted range includes the negative of | |
906 | the lower bound. */ | |
907 | offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); | |
908 | offrng[1] = maxoff; | |
909 | ||
910 | if (max >= 0) | |
911 | { | |
912 | offrng[0] = 0; | |
913 | if (offmax[0] > 0) | |
914 | offmax[0] = 0; | |
915 | return; | |
916 | } | |
917 | ||
918 | offset_int absmax = wi::abs (max); | |
919 | if (offrng[0] < absmax) | |
920 | { | |
921 | offrng[0] += min; | |
922 | /* Cap the lower bound at the upper (set to MAXOFF above) | |
923 | to avoid inadvertently recreating an inverted range. */ | |
924 | if (offrng[1] < offrng[0]) | |
925 | offrng[0] = offrng[1]; | |
926 | } | |
927 | else | |
928 | offrng[0] = 0; | |
929 | } | |
930 | ||
931 | /* Set the minimum and maximmum computed so far. */ | |
932 | if (offrng[1] < 0 && offrng[1] < offmax[0]) | |
933 | offmax[0] = offrng[1]; | |
934 | if (offrng[0] > 0 && offrng[0] > offmax[1]) | |
935 | offmax[1] = offrng[0]; | |
936 | ||
937 | if (!base0) | |
938 | return; | |
939 | ||
940 | /* When referencing a known object check to see if the offset computed | |
941 | so far is in bounds... */ | |
942 | offset_int remrng[2]; | |
943 | remrng[1] = size_remaining (remrng); | |
944 | if (remrng[1] > 0 || remrng[0] < 0) | |
945 | { | |
946 | /* ...if so, constrain it so that neither bound exceeds the size of | |
947 | the object. Out of bounds offsets are left unchanged, and, for | |
948 | better or worse, become in bounds later. They should be detected | |
949 | and diagnosed at the point they first become invalid by | |
950 | -Warray-bounds. */ | |
951 | if (offrng[0] < 0) | |
952 | offrng[0] = 0; | |
953 | if (offrng[1] > sizrng[1]) | |
954 | offrng[1] = sizrng[1]; | |
955 | } | |
956 | } | |
957 | ||
958 | /* Issue one inform message describing each target of an access REF. | |
959 | WRITE is set for a write access and clear for a read access. */ | |
960 | ||
961 | void | |
962 | access_ref::inform_access (access_mode mode) const | |
963 | { | |
964 | const access_ref &aref = *this; | |
965 | if (!aref.ref) | |
966 | return; | |
967 | ||
968 | if (aref.phi ()) | |
969 | { | |
970 | /* Set MAXREF to refer to the largest object and fill ALL_REFS | |
971 | with data for all objects referenced by the PHI arguments. */ | |
972 | access_ref maxref; | |
973 | auto_vec<access_ref> all_refs; | |
974 | if (!get_ref (&all_refs, &maxref)) | |
975 | return; | |
976 | ||
977 | /* Except for MAXREF, the rest of the arguments' offsets need not | |
978 | reflect one added to the PHI itself. Determine the latter from | |
979 | MAXREF on which the result is based. */ | |
980 | const offset_int orng[] = | |
981 | { | |
982 | offrng[0] - maxref.offrng[0], | |
983 | wi::smax (offrng[1] - maxref.offrng[1], offrng[0]), | |
984 | }; | |
985 | ||
986 | /* Add the final PHI's offset to that of each of the arguments | |
987 | and recurse to issue an inform message for it. */ | |
988 | for (unsigned i = 0; i != all_refs.length (); ++i) | |
989 | { | |
990 | /* Skip any PHIs; those could lead to infinite recursion. */ | |
991 | if (all_refs[i].phi ()) | |
992 | continue; | |
993 | ||
994 | all_refs[i].add_offset (orng[0], orng[1]); | |
995 | all_refs[i].inform_access (mode); | |
996 | } | |
997 | return; | |
998 | } | |
999 | ||
1000 | /* Convert offset range and avoid including a zero range since it | |
1001 | isn't necessarily meaningful. */ | |
1002 | HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node)); | |
1003 | HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node)); | |
1004 | HOST_WIDE_INT minoff; | |
1005 | HOST_WIDE_INT maxoff = diff_max; | |
1006 | if (wi::fits_shwi_p (aref.offrng[0])) | |
1007 | minoff = aref.offrng[0].to_shwi (); | |
1008 | else | |
1009 | minoff = aref.offrng[0] < 0 ? diff_min : diff_max; | |
1010 | ||
1011 | if (wi::fits_shwi_p (aref.offrng[1])) | |
1012 | maxoff = aref.offrng[1].to_shwi (); | |
1013 | ||
1014 | if (maxoff <= diff_min || maxoff >= diff_max) | |
1015 | /* Avoid mentioning an upper bound that's equal to or in excess | |
1016 | of the maximum of ptrdiff_t. */ | |
1017 | maxoff = minoff; | |
1018 | ||
1019 | /* Convert size range and always include it since all sizes are | |
1020 | meaningful. */ | |
1021 | unsigned long long minsize = 0, maxsize = 0; | |
1022 | if (wi::fits_shwi_p (aref.sizrng[0]) | |
1023 | && wi::fits_shwi_p (aref.sizrng[1])) | |
1024 | { | |
1025 | minsize = aref.sizrng[0].to_shwi (); | |
1026 | maxsize = aref.sizrng[1].to_shwi (); | |
1027 | } | |
1028 | ||
1029 | /* SIZRNG doesn't necessarily have the same range as the allocation | |
1030 | size determined by gimple_call_alloc_size (). */ | |
1031 | char sizestr[80]; | |
1032 | if (minsize == maxsize) | |
1033 | sprintf (sizestr, "%llu", minsize); | |
1034 | else | |
1035 | sprintf (sizestr, "[%llu, %llu]", minsize, maxsize); | |
1036 | ||
1037 | char offstr[80]; | |
1038 | if (minoff == 0 | |
1039 | && (maxoff == 0 || aref.sizrng[1] <= maxoff)) | |
1040 | offstr[0] = '\0'; | |
1041 | else if (minoff == maxoff) | |
1042 | sprintf (offstr, "%lli", (long long) minoff); | |
1043 | else | |
1044 | sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff); | |
1045 | ||
1046 | location_t loc = UNKNOWN_LOCATION; | |
1047 | ||
1048 | tree ref = this->ref; | |
1049 | tree allocfn = NULL_TREE; | |
1050 | if (TREE_CODE (ref) == SSA_NAME) | |
1051 | { | |
1052 | gimple *stmt = SSA_NAME_DEF_STMT (ref); | |
ece28da9 MS |
1053 | if (!stmt) |
1054 | return; | |
1055 | ||
2a837de2 MS |
1056 | if (is_gimple_call (stmt)) |
1057 | { | |
1058 | loc = gimple_location (stmt); | |
1059 | if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN)) | |
1060 | { | |
1061 | /* Strip the SSA_NAME suffix from the variable name and | |
1062 | recreate an identifier with the VLA's original name. */ | |
1063 | ref = gimple_call_lhs (stmt); | |
1064 | if (SSA_NAME_IDENTIFIER (ref)) | |
1065 | { | |
1066 | ref = SSA_NAME_IDENTIFIER (ref); | |
1067 | const char *id = IDENTIFIER_POINTER (ref); | |
1068 | size_t len = strcspn (id, ".$"); | |
1069 | if (!len) | |
1070 | len = strlen (id); | |
1071 | ref = get_identifier_with_length (id, len); | |
1072 | } | |
1073 | } | |
1074 | else | |
1075 | { | |
1076 | /* Except for VLAs, retrieve the allocation function. */ | |
1077 | allocfn = gimple_call_fndecl (stmt); | |
1078 | if (!allocfn) | |
1079 | allocfn = gimple_call_fn (stmt); | |
1080 | if (TREE_CODE (allocfn) == SSA_NAME) | |
1081 | { | |
1082 | /* For an ALLOC_CALL via a function pointer make a small | |
1083 | effort to determine the destination of the pointer. */ | |
1084 | gimple *def = SSA_NAME_DEF_STMT (allocfn); | |
1085 | if (gimple_assign_single_p (def)) | |
1086 | { | |
1087 | tree rhs = gimple_assign_rhs1 (def); | |
1088 | if (DECL_P (rhs)) | |
1089 | allocfn = rhs; | |
1090 | else if (TREE_CODE (rhs) == COMPONENT_REF) | |
1091 | allocfn = TREE_OPERAND (rhs, 1); | |
1092 | } | |
1093 | } | |
1094 | } | |
1095 | } | |
1096 | else if (gimple_nop_p (stmt)) | |
1097 | /* Handle DECL_PARM below. */ | |
1098 | ref = SSA_NAME_VAR (ref); | |
31e924c5 MS |
1099 | else if (is_gimple_assign (stmt) |
1100 | && (gimple_assign_rhs_code (stmt) == MIN_EXPR | |
1101 | || gimple_assign_rhs_code (stmt) == MAX_EXPR)) | |
1102 | { | |
1103 | /* MIN or MAX_EXPR here implies a reference to a known object | |
1104 | and either an unknown or distinct one (the latter being | |
1105 | the result of an invalid relational expression). Determine | |
1106 | the identity of the former and point to it in the note. | |
1107 | TODO: Consider merging with PHI handling. */ | |
1108 | access_ref arg_ref[2]; | |
1109 | tree arg = gimple_assign_rhs1 (stmt); | |
1110 | compute_objsize (arg, /* ostype = */ 1 , &arg_ref[0]); | |
1111 | arg = gimple_assign_rhs2 (stmt); | |
1112 | compute_objsize (arg, /* ostype = */ 1 , &arg_ref[1]); | |
1113 | ||
1114 | /* Use the argument that references a known object with more | |
1115 | space remaining. */ | |
1116 | const bool idx | |
1117 | = (!arg_ref[0].ref || !arg_ref[0].base0 | |
1118 | || (arg_ref[0].base0 && arg_ref[1].base0 | |
1119 | && (arg_ref[0].size_remaining () | |
1120 | < arg_ref[1].size_remaining ()))); | |
1121 | ||
1122 | arg_ref[idx].offrng[0] = offrng[0]; | |
1123 | arg_ref[idx].offrng[1] = offrng[1]; | |
1124 | arg_ref[idx].inform_access (mode); | |
1125 | return; | |
1126 | } | |
2a837de2 MS |
1127 | } |
1128 | ||
1129 | if (DECL_P (ref)) | |
1130 | loc = DECL_SOURCE_LOCATION (ref); | |
1131 | else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref)) | |
1132 | loc = EXPR_LOCATION (ref); | |
1133 | else if (TREE_CODE (ref) != IDENTIFIER_NODE | |
1134 | && TREE_CODE (ref) != SSA_NAME) | |
1135 | return; | |
1136 | ||
1137 | if (mode == access_read_write || mode == access_write_only) | |
1138 | { | |
1139 | if (allocfn == NULL_TREE) | |
1140 | { | |
1141 | if (*offstr) | |
1142 | inform (loc, "at offset %s into destination object %qE of size %s", | |
1143 | offstr, ref, sizestr); | |
1144 | else | |
1145 | inform (loc, "destination object %qE of size %s", ref, sizestr); | |
1146 | return; | |
1147 | } | |
1148 | ||
1149 | if (*offstr) | |
1150 | inform (loc, | |
1151 | "at offset %s into destination object of size %s " | |
1152 | "allocated by %qE", offstr, sizestr, allocfn); | |
1153 | else | |
1154 | inform (loc, "destination object of size %s allocated by %qE", | |
1155 | sizestr, allocfn); | |
1156 | return; | |
1157 | } | |
1158 | ||
1159 | if (mode == access_read_only) | |
1160 | { | |
1161 | if (allocfn == NULL_TREE) | |
1162 | { | |
1163 | if (*offstr) | |
1164 | inform (loc, "at offset %s into source object %qE of size %s", | |
1165 | offstr, ref, sizestr); | |
1166 | else | |
1167 | inform (loc, "source object %qE of size %s", ref, sizestr); | |
1168 | ||
1169 | return; | |
1170 | } | |
1171 | ||
1172 | if (*offstr) | |
1173 | inform (loc, | |
1174 | "at offset %s into source object of size %s allocated by %qE", | |
1175 | offstr, sizestr, allocfn); | |
1176 | else | |
1177 | inform (loc, "source object of size %s allocated by %qE", | |
1178 | sizestr, allocfn); | |
1179 | return; | |
1180 | } | |
1181 | ||
1182 | if (allocfn == NULL_TREE) | |
1183 | { | |
1184 | if (*offstr) | |
1185 | inform (loc, "at offset %s into object %qE of size %s", | |
1186 | offstr, ref, sizestr); | |
1187 | else | |
1188 | inform (loc, "object %qE of size %s", ref, sizestr); | |
1189 | ||
1190 | return; | |
1191 | } | |
1192 | ||
1193 | if (*offstr) | |
1194 | inform (loc, | |
1195 | "at offset %s into object of size %s allocated by %qE", | |
1196 | offstr, sizestr, allocfn); | |
1197 | else | |
1198 | inform (loc, "object of size %s allocated by %qE", | |
1199 | sizestr, allocfn); | |
1200 | } | |
1201 | ||
1202 | /* Set a bit for the PHI in VISITED and return true if it wasn't | |
1203 | already set. */ | |
1204 | ||
1205 | bool | |
1206 | ssa_name_limit_t::visit_phi (tree ssa_name) | |
1207 | { | |
1208 | if (!visited) | |
1209 | visited = BITMAP_ALLOC (NULL); | |
1210 | ||
1211 | /* Return false if SSA_NAME has already been visited. */ | |
1212 | return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name)); | |
1213 | } | |
1214 | ||
1215 | /* Clear a bit for the PHI in VISITED. */ | |
1216 | ||
1217 | void | |
1218 | ssa_name_limit_t::leave_phi (tree ssa_name) | |
1219 | { | |
1220 | /* Return false if SSA_NAME has already been visited. */ | |
1221 | bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name)); | |
1222 | } | |
1223 | ||
1224 | /* Return false if the SSA_NAME chain length counter has reached | |
1225 | the limit, otherwise increment the counter and return true. */ | |
1226 | ||
1227 | bool | |
1228 | ssa_name_limit_t::next () | |
1229 | { | |
1230 | /* Return a negative value to let caller avoid recursing beyond | |
1231 | the specified limit. */ | |
1232 | if (ssa_def_max == 0) | |
1233 | return false; | |
1234 | ||
1235 | --ssa_def_max; | |
1236 | return true; | |
1237 | } | |
1238 | ||
1239 | /* If the SSA_NAME has already been "seen" return a positive value. | |
1240 | Otherwise add it to VISITED. If the SSA_NAME limit has been | |
1241 | reached, return a negative value. Otherwise return zero. */ | |
1242 | ||
1243 | int | |
1244 | ssa_name_limit_t::next_phi (tree ssa_name) | |
1245 | { | |
1246 | { | |
1247 | gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name); | |
1248 | /* Return a positive value if the PHI has already been visited. */ | |
1249 | if (gimple_code (def_stmt) == GIMPLE_PHI | |
1250 | && !visit_phi (ssa_name)) | |
1251 | return 1; | |
1252 | } | |
1253 | ||
1254 | /* Return a negative value to let caller avoid recursing beyond | |
1255 | the specified limit. */ | |
1256 | if (ssa_def_max == 0) | |
1257 | return -1; | |
1258 | ||
1259 | --ssa_def_max; | |
1260 | ||
1261 | return 0; | |
1262 | } | |
1263 | ||
1264 | ssa_name_limit_t::~ssa_name_limit_t () | |
1265 | { | |
1266 | if (visited) | |
1267 | BITMAP_FREE (visited); | |
1268 | } | |
1269 | ||
1270 | /* Default ctor. Initialize object with pointers to the range_query | |
1271 | and cache_type instances to use or null. */ | |
1272 | ||
1273 | pointer_query::pointer_query (range_query *qry /* = NULL */, | |
1274 | cache_type *cache /* = NULL */) | |
1275 | : rvals (qry), var_cache (cache), hits (), misses (), | |
1276 | failures (), depth (), max_depth () | |
1277 | { | |
1278 | /* No op. */ | |
1279 | } | |
1280 | ||
1281 | /* Return a pointer to the cached access_ref instance for the SSA_NAME | |
1282 | PTR if it's there or null otherwise. */ | |
1283 | ||
1284 | const access_ref * | |
1285 | pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const | |
1286 | { | |
1287 | if (!var_cache) | |
1288 | { | |
1289 | ++misses; | |
1290 | return NULL; | |
1291 | } | |
1292 | ||
1293 | unsigned version = SSA_NAME_VERSION (ptr); | |
1294 | unsigned idx = version << 1 | (ostype & 1); | |
1295 | if (var_cache->indices.length () <= idx) | |
1296 | { | |
1297 | ++misses; | |
1298 | return NULL; | |
1299 | } | |
1300 | ||
1301 | unsigned cache_idx = var_cache->indices[idx]; | |
1302 | if (var_cache->access_refs.length () <= cache_idx) | |
1303 | { | |
1304 | ++misses; | |
1305 | return NULL; | |
1306 | } | |
1307 | ||
1308 | access_ref &cache_ref = var_cache->access_refs[cache_idx]; | |
1309 | if (cache_ref.ref) | |
1310 | { | |
1311 | ++hits; | |
1312 | return &cache_ref; | |
1313 | } | |
1314 | ||
1315 | ++misses; | |
1316 | return NULL; | |
1317 | } | |
1318 | ||
1319 | /* Retrieve the access_ref instance for a variable from the cache if it's | |
1320 | there or compute it and insert it into the cache if it's nonnonull. */ | |
1321 | ||
1322 | bool | |
9a27acc3 | 1323 | pointer_query::get_ref (tree ptr, gimple *stmt, access_ref *pref, int ostype /* = 1 */) |
2a837de2 MS |
1324 | { |
1325 | const unsigned version | |
1326 | = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0; | |
1327 | ||
1328 | if (var_cache && version) | |
1329 | { | |
1330 | unsigned idx = version << 1 | (ostype & 1); | |
1331 | if (idx < var_cache->indices.length ()) | |
1332 | { | |
1333 | unsigned cache_idx = var_cache->indices[idx] - 1; | |
1334 | if (cache_idx < var_cache->access_refs.length () | |
1335 | && var_cache->access_refs[cache_idx].ref) | |
1336 | { | |
1337 | ++hits; | |
1338 | *pref = var_cache->access_refs[cache_idx]; | |
1339 | return true; | |
1340 | } | |
1341 | } | |
1342 | ||
1343 | ++misses; | |
1344 | } | |
1345 | ||
9a27acc3 | 1346 | if (!compute_objsize (ptr, stmt, ostype, pref, this)) |
2a837de2 MS |
1347 | { |
1348 | ++failures; | |
1349 | return false; | |
1350 | } | |
1351 | ||
1352 | return true; | |
1353 | } | |
1354 | ||
1355 | /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's | |
1356 | nonnull. */ | |
1357 | ||
1358 | void | |
1359 | pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */) | |
1360 | { | |
1361 | /* Only add populated/valid entries. */ | |
1362 | if (!var_cache || !ref.ref || ref.sizrng[0] < 0) | |
1363 | return; | |
1364 | ||
1365 | /* Add REF to the two-level cache. */ | |
1366 | unsigned version = SSA_NAME_VERSION (ptr); | |
1367 | unsigned idx = version << 1 | (ostype & 1); | |
1368 | ||
1369 | /* Grow INDICES if necessary. An index is valid if it's nonzero. | |
1370 | Its value minus one is the index into ACCESS_REFS. Not all | |
1371 | entries are valid. */ | |
1372 | if (var_cache->indices.length () <= idx) | |
1373 | var_cache->indices.safe_grow_cleared (idx + 1); | |
1374 | ||
1375 | if (!var_cache->indices[idx]) | |
1376 | var_cache->indices[idx] = var_cache->access_refs.length () + 1; | |
1377 | ||
1378 | /* Grow ACCESS_REF cache if necessary. An entry is valid if its | |
1379 | REF member is nonnull. All entries except for the last two | |
1380 | are valid. Once nonnull, the REF value must stay unchanged. */ | |
1381 | unsigned cache_idx = var_cache->indices[idx]; | |
1382 | if (var_cache->access_refs.length () <= cache_idx) | |
1383 | var_cache->access_refs.safe_grow_cleared (cache_idx + 1); | |
1384 | ||
ece28da9 | 1385 | access_ref &cache_ref = var_cache->access_refs[cache_idx]; |
2a837de2 MS |
1386 | if (cache_ref.ref) |
1387 | { | |
1388 | gcc_checking_assert (cache_ref.ref == ref.ref); | |
1389 | return; | |
1390 | } | |
1391 | ||
1392 | cache_ref = ref; | |
1393 | } | |
1394 | ||
1395 | /* Flush the cache if it's nonnull. */ | |
1396 | ||
1397 | void | |
1398 | pointer_query::flush_cache () | |
1399 | { | |
1400 | if (!var_cache) | |
1401 | return; | |
1402 | var_cache->indices.release (); | |
1403 | var_cache->access_refs.release (); | |
1404 | } | |
1405 | ||
ece28da9 MS |
1406 | /* Dump statistics and, optionally, cache contents to DUMP_FILE. */ |
1407 | ||
1408 | void | |
1409 | pointer_query::dump (FILE *dump_file, bool contents /* = false */) | |
1410 | { | |
1411 | unsigned nused = 0, nrefs = 0; | |
1412 | unsigned nidxs = var_cache->indices.length (); | |
1413 | for (unsigned i = 0; i != nidxs; ++i) | |
1414 | { | |
1415 | unsigned ari = var_cache->indices[i]; | |
1416 | if (!ari) | |
1417 | continue; | |
1418 | ||
1419 | ++nused; | |
1420 | ||
1421 | const access_ref &aref = var_cache->access_refs[ari]; | |
1422 | if (!aref.ref) | |
1423 | continue; | |
1424 | ||
1425 | ++nrefs; | |
1426 | } | |
1427 | ||
1428 | fprintf (dump_file, "pointer_query counters:\n" | |
1429 | " index cache size: %u\n" | |
1430 | " index entries: %u\n" | |
1431 | " access cache size: %u\n" | |
1432 | " access entries: %u\n" | |
1433 | " hits: %u\n" | |
1434 | " misses: %u\n" | |
1435 | " failures: %u\n" | |
1436 | " max_depth: %u\n", | |
1437 | nidxs, nused, | |
1438 | var_cache->access_refs.length (), nrefs, | |
1439 | hits, misses, failures, max_depth); | |
1440 | ||
1441 | if (!contents || !nidxs) | |
1442 | return; | |
1443 | ||
1444 | fputs ("\npointer_query cache contents:\n", dump_file); | |
1445 | ||
1446 | for (unsigned i = 0; i != nidxs; ++i) | |
1447 | { | |
1448 | unsigned ari = var_cache->indices[i]; | |
1449 | if (!ari) | |
1450 | continue; | |
1451 | ||
1452 | const access_ref &aref = var_cache->access_refs[ari]; | |
1453 | if (!aref.ref) | |
1454 | continue; | |
1455 | ||
1456 | /* The level-1 cache index corresponds to the SSA_NAME_VERSION | |
1457 | shifted left by one and ORed with the Object Size Type in | |
1458 | the lowest bit. Print the two separately. */ | |
1459 | unsigned ver = i >> 1; | |
1460 | unsigned ost = i & 1; | |
1461 | ||
1462 | fprintf (dump_file, " %u.%u[%u]: ", ver, ost, ari); | |
1463 | if (tree name = ssa_name (ver)) | |
1464 | { | |
1465 | print_generic_expr (dump_file, name); | |
1466 | fputs (" = ", dump_file); | |
1467 | } | |
1468 | else | |
1469 | fprintf (dump_file, " _%u = ", ver); | |
1470 | ||
1471 | if (gphi *phi = aref.phi ()) | |
1472 | { | |
1473 | fputs ("PHI <", dump_file); | |
1474 | unsigned nargs = gimple_phi_num_args (phi); | |
1475 | for (unsigned i = 0; i != nargs; ++i) | |
1476 | { | |
1477 | tree arg = gimple_phi_arg_def (phi, i); | |
1478 | print_generic_expr (dump_file, arg); | |
1479 | if (i + 1 < nargs) | |
1480 | fputs (", ", dump_file); | |
1481 | } | |
1482 | fputc ('>', dump_file); | |
1483 | } | |
1484 | else | |
1485 | print_generic_expr (dump_file, aref.ref); | |
1486 | ||
1487 | if (aref.offrng[0] != aref.offrng[1]) | |
1488 | fprintf (dump_file, " + [%lli, %lli]", | |
1489 | (long long) aref.offrng[0].to_shwi (), | |
1490 | (long long) aref.offrng[1].to_shwi ()); | |
1491 | else if (aref.offrng[0] != 0) | |
1492 | fprintf (dump_file, " %c %lli", | |
1493 | aref.offrng[0] < 0 ? '-' : '+', | |
1494 | (long long) aref.offrng[0].to_shwi ()); | |
1495 | ||
1496 | fputc ('\n', dump_file); | |
1497 | } | |
1498 | ||
1499 | fputc ('\n', dump_file); | |
1500 | } | |
1501 | ||
2a837de2 | 1502 | /* A helper of compute_objsize_r() to determine the size from an assignment |
31e924c5 MS |
1503 | statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success |
1504 | set PREF->REF to the operand with more or less space remaining, | |
1505 | respectively, if both refer to the same (sub)object, or to PTR if they | |
1506 | might not, and return true. Otherwise, if the identity of neither | |
1507 | operand can be determined, return false. */ | |
2a837de2 MS |
1508 | |
1509 | static bool | |
31e924c5 | 1510 | handle_min_max_size (tree ptr, int ostype, access_ref *pref, |
2a837de2 MS |
1511 | ssa_name_limit_t &snlim, pointer_query *qry) |
1512 | { | |
9a27acc3 | 1513 | gimple *stmt = SSA_NAME_DEF_STMT (ptr); |
31e924c5 | 1514 | const tree_code code = gimple_assign_rhs_code (stmt); |
2a837de2 MS |
1515 | |
1516 | /* In a valid MAX_/MIN_EXPR both operands must refer to the same array. | |
1517 | Determine the size/offset of each and use the one with more or less | |
1518 | space remaining, respectively. If either fails, use the information | |
1519 | determined from the other instead, adjusted up or down as appropriate | |
1520 | for the expression. */ | |
1521 | access_ref aref[2] = { *pref, *pref }; | |
31e924c5 | 1522 | tree arg1 = gimple_assign_rhs1 (stmt); |
9a27acc3 | 1523 | if (!compute_objsize_r (arg1, stmt, ostype, &aref[0], snlim, qry)) |
2a837de2 MS |
1524 | { |
1525 | aref[0].base0 = false; | |
1526 | aref[0].offrng[0] = aref[0].offrng[1] = 0; | |
1527 | aref[0].add_max_offset (); | |
1528 | aref[0].set_max_size_range (); | |
1529 | } | |
1530 | ||
31e924c5 | 1531 | tree arg2 = gimple_assign_rhs2 (stmt); |
9a27acc3 | 1532 | if (!compute_objsize_r (arg2, stmt, ostype, &aref[1], snlim, qry)) |
2a837de2 MS |
1533 | { |
1534 | aref[1].base0 = false; | |
1535 | aref[1].offrng[0] = aref[1].offrng[1] = 0; | |
1536 | aref[1].add_max_offset (); | |
1537 | aref[1].set_max_size_range (); | |
1538 | } | |
1539 | ||
1540 | if (!aref[0].ref && !aref[1].ref) | |
1541 | /* Fail if the identity of neither argument could be determined. */ | |
1542 | return false; | |
1543 | ||
1544 | bool i0 = false; | |
1545 | if (aref[0].ref && aref[0].base0) | |
1546 | { | |
1547 | if (aref[1].ref && aref[1].base0) | |
1548 | { | |
1549 | /* If the object referenced by both arguments has been determined | |
1550 | set *PREF to the one with more or less space remainng, whichever | |
1551 | is appopriate for CODE. | |
1552 | TODO: Indicate when the objects are distinct so it can be | |
1553 | diagnosed. */ | |
1554 | i0 = code == MAX_EXPR; | |
1555 | const bool i1 = !i0; | |
1556 | ||
1557 | if (aref[i0].size_remaining () < aref[i1].size_remaining ()) | |
1558 | *pref = aref[i1]; | |
1559 | else | |
1560 | *pref = aref[i0]; | |
31e924c5 MS |
1561 | |
1562 | if (aref[i0].ref != aref[i1].ref) | |
1563 | /* If the operands don't refer to the same (sub)object set | |
1564 | PREF->REF to the SSA_NAME from which STMT was obtained | |
1565 | so that both can be identified in a diagnostic. */ | |
1566 | pref->ref = ptr; | |
1567 | ||
2a837de2 MS |
1568 | return true; |
1569 | } | |
1570 | ||
1571 | /* If only the object referenced by one of the arguments could be | |
1572 | determined, use it and... */ | |
1573 | *pref = aref[0]; | |
1574 | i0 = true; | |
1575 | } | |
1576 | else | |
1577 | *pref = aref[1]; | |
1578 | ||
1579 | const bool i1 = !i0; | |
1580 | /* ...see if the offset obtained from the other pointer can be used | |
1581 | to tighten up the bound on the offset obtained from the first. */ | |
1582 | if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0]) | |
1583 | || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1])) | |
1584 | { | |
1585 | pref->offrng[0] = aref[i0].offrng[0]; | |
1586 | pref->offrng[1] = aref[i0].offrng[1]; | |
1587 | } | |
31e924c5 MS |
1588 | |
1589 | /* Replace PTR->REF with the SSA_NAME to indicate the expression | |
1590 | might not refer to the same (sub)object. */ | |
1591 | pref->ref = ptr; | |
2a837de2 MS |
1592 | return true; |
1593 | } | |
1594 | ||
1595 | /* A helper of compute_objsize_r() to determine the size from ARRAY_REF | |
1596 | AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true | |
1597 | on success and false on failure. */ | |
1598 | ||
1599 | static bool | |
9a27acc3 MS |
1600 | handle_array_ref (tree aref, gimple *stmt, bool addr, int ostype, |
1601 | access_ref *pref, ssa_name_limit_t &snlim, | |
1602 | pointer_query *qry) | |
2a837de2 MS |
1603 | { |
1604 | gcc_assert (TREE_CODE (aref) == ARRAY_REF); | |
1605 | ||
1606 | ++pref->deref; | |
1607 | ||
1608 | tree arefop = TREE_OPERAND (aref, 0); | |
1609 | tree reftype = TREE_TYPE (arefop); | |
1610 | if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE) | |
1611 | /* Avoid arrays of pointers. FIXME: Hande pointers to arrays | |
1612 | of known bound. */ | |
1613 | return false; | |
1614 | ||
9a27acc3 | 1615 | if (!compute_objsize_r (arefop, stmt, ostype, pref, snlim, qry)) |
2a837de2 MS |
1616 | return false; |
1617 | ||
1618 | offset_int orng[2]; | |
1619 | tree off = pref->eval (TREE_OPERAND (aref, 1)); | |
1620 | range_query *const rvals = qry ? qry->rvals : NULL; | |
1621 | if (!get_offset_range (off, NULL, orng, rvals)) | |
1622 | { | |
1623 | /* Set ORNG to the maximum offset representable in ptrdiff_t. */ | |
1624 | orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); | |
1625 | orng[0] = -orng[1] - 1; | |
1626 | } | |
1627 | ||
1628 | /* Convert the array index range determined above to a byte | |
1629 | offset. */ | |
1630 | tree lowbnd = array_ref_low_bound (aref); | |
1631 | if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd)) | |
1632 | { | |
1633 | /* Adjust the index by the low bound of the array domain | |
1634 | (normally zero but 1 in Fortran). */ | |
1635 | unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd); | |
1636 | orng[0] -= lb; | |
1637 | orng[1] -= lb; | |
1638 | } | |
1639 | ||
1640 | tree eltype = TREE_TYPE (aref); | |
1641 | tree tpsize = TYPE_SIZE_UNIT (eltype); | |
1642 | if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST) | |
1643 | { | |
1644 | pref->add_max_offset (); | |
1645 | return true; | |
1646 | } | |
1647 | ||
1648 | offset_int sz = wi::to_offset (tpsize); | |
1649 | orng[0] *= sz; | |
1650 | orng[1] *= sz; | |
1651 | ||
1652 | if (ostype && TREE_CODE (eltype) == ARRAY_TYPE) | |
1653 | { | |
1654 | /* Except for the permissive raw memory functions which use | |
1655 | the size of the whole object determined above, use the size | |
1656 | of the referenced array. Because the overall offset is from | |
1657 | the beginning of the complete array object add this overall | |
1658 | offset to the size of array. */ | |
1659 | offset_int sizrng[2] = | |
1660 | { | |
1661 | pref->offrng[0] + orng[0] + sz, | |
1662 | pref->offrng[1] + orng[1] + sz | |
1663 | }; | |
1664 | if (sizrng[1] < sizrng[0]) | |
1665 | std::swap (sizrng[0], sizrng[1]); | |
1666 | if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0]) | |
1667 | pref->sizrng[0] = sizrng[0]; | |
1668 | if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1]) | |
1669 | pref->sizrng[1] = sizrng[1]; | |
1670 | } | |
1671 | ||
1672 | pref->add_offset (orng[0], orng[1]); | |
1673 | return true; | |
1674 | } | |
1675 | ||
1676 | /* A helper of compute_objsize_r() to determine the size from MEM_REF | |
1677 | MREF. Return true on success and false on failure. */ | |
1678 | ||
1679 | static bool | |
9a27acc3 | 1680 | handle_mem_ref (tree mref, gimple *stmt, int ostype, access_ref *pref, |
2a837de2 MS |
1681 | ssa_name_limit_t &snlim, pointer_query *qry) |
1682 | { | |
1683 | gcc_assert (TREE_CODE (mref) == MEM_REF); | |
1684 | ||
1685 | ++pref->deref; | |
1686 | ||
1687 | if (VECTOR_TYPE_P (TREE_TYPE (mref))) | |
1688 | { | |
1689 | /* Hack: Handle MEM_REFs of vector types as those to complete | |
1690 | objects; those may be synthesized from multiple assignments | |
1691 | to consecutive data members (see PR 93200 and 96963). | |
1692 | FIXME: Vectorized assignments should only be present after | |
1693 | vectorization so this hack is only necessary after it has | |
1694 | run and could be avoided in calls from prior passes (e.g., | |
1695 | tree-ssa-strlen.c). | |
1696 | FIXME: Deal with this more generally, e.g., by marking up | |
1697 | such MEM_REFs at the time they're created. */ | |
1698 | ostype = 0; | |
1699 | } | |
1700 | ||
1701 | tree mrefop = TREE_OPERAND (mref, 0); | |
9a27acc3 | 1702 | if (!compute_objsize_r (mrefop, stmt, ostype, pref, snlim, qry)) |
2a837de2 MS |
1703 | return false; |
1704 | ||
1705 | offset_int orng[2]; | |
1706 | tree off = pref->eval (TREE_OPERAND (mref, 1)); | |
1707 | range_query *const rvals = qry ? qry->rvals : NULL; | |
1708 | if (!get_offset_range (off, NULL, orng, rvals)) | |
1709 | { | |
1710 | /* Set ORNG to the maximum offset representable in ptrdiff_t. */ | |
1711 | orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); | |
1712 | orng[0] = -orng[1] - 1; | |
1713 | } | |
1714 | ||
1715 | pref->add_offset (orng[0], orng[1]); | |
1716 | return true; | |
1717 | } | |
1718 | ||
1719 | /* Helper to compute the size of the object referenced by the PTR | |
1720 | expression which must have pointer type, using Object Size type | |
1721 | OSTYPE (only the least significant 2 bits are used). | |
1722 | On success, sets PREF->REF to the DECL of the referenced object | |
1723 | if it's unique, otherwise to null, PREF->OFFRNG to the range of | |
1724 | offsets into it, and PREF->SIZRNG to the range of sizes of | |
1725 | the object(s). | |
1726 | SNLIM is used to avoid visiting the same PHI operand multiple | |
1727 | times, and, when nonnull, RVALS to determine range information. | |
1728 | Returns true on success, false when a meaningful size (or range) | |
1729 | cannot be determined. | |
1730 | ||
1731 | The function is intended for diagnostics and should not be used | |
1732 | to influence code generation or optimization. */ | |
1733 | ||
1734 | static bool | |
9a27acc3 | 1735 | compute_objsize_r (tree ptr, gimple *stmt, int ostype, access_ref *pref, |
2a837de2 MS |
1736 | ssa_name_limit_t &snlim, pointer_query *qry) |
1737 | { | |
1738 | STRIP_NOPS (ptr); | |
1739 | ||
1740 | const bool addr = TREE_CODE (ptr) == ADDR_EXPR; | |
1741 | if (addr) | |
1742 | { | |
1743 | --pref->deref; | |
1744 | ptr = TREE_OPERAND (ptr, 0); | |
1745 | } | |
1746 | ||
1747 | if (DECL_P (ptr)) | |
1748 | { | |
1749 | pref->ref = ptr; | |
1750 | ||
820f0940 MS |
1751 | /* Reset the offset in case it was set by a prior call and not |
1752 | cleared by the caller. The offset is only adjusted after | |
1753 | the identity of the object has been determined. */ | |
1754 | pref->offrng[0] = pref->offrng[1] = 0; | |
1755 | ||
2a837de2 MS |
1756 | if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr))) |
1757 | { | |
1758 | /* Set the maximum size if the reference is to the pointer | |
1759 | itself (as opposed to what it points to), and clear | |
1760 | BASE0 since the offset isn't necessarily zero-based. */ | |
1761 | pref->set_max_size_range (); | |
1762 | pref->base0 = false; | |
1763 | return true; | |
1764 | } | |
1765 | ||
820f0940 MS |
1766 | /* Valid offsets into the object are nonnegative. */ |
1767 | pref->base0 = true; | |
1768 | ||
2a837de2 MS |
1769 | if (tree size = decl_init_size (ptr, false)) |
1770 | if (TREE_CODE (size) == INTEGER_CST) | |
1771 | { | |
1772 | pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size); | |
1773 | return true; | |
1774 | } | |
1775 | ||
1776 | pref->set_max_size_range (); | |
1777 | return true; | |
1778 | } | |
1779 | ||
1780 | const tree_code code = TREE_CODE (ptr); | |
1781 | range_query *const rvals = qry ? qry->rvals : NULL; | |
1782 | ||
1783 | if (code == BIT_FIELD_REF) | |
1784 | { | |
1785 | tree ref = TREE_OPERAND (ptr, 0); | |
9a27acc3 | 1786 | if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry)) |
2a837de2 MS |
1787 | return false; |
1788 | ||
1789 | offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2))); | |
1790 | pref->add_offset (off / BITS_PER_UNIT); | |
1791 | return true; | |
1792 | } | |
1793 | ||
1794 | if (code == COMPONENT_REF) | |
1795 | { | |
1796 | tree ref = TREE_OPERAND (ptr, 0); | |
1797 | if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE) | |
1798 | /* In accesses through union types consider the entire unions | |
1799 | rather than just their members. */ | |
1800 | ostype = 0; | |
1801 | tree field = TREE_OPERAND (ptr, 1); | |
1802 | ||
1803 | if (ostype == 0) | |
1804 | { | |
1805 | /* In OSTYPE zero (for raw memory functions like memcpy), use | |
1806 | the maximum size instead if the identity of the enclosing | |
1807 | object cannot be determined. */ | |
9a27acc3 | 1808 | if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry)) |
2a837de2 MS |
1809 | return false; |
1810 | ||
1811 | /* Otherwise, use the size of the enclosing object and add | |
1812 | the offset of the member to the offset computed so far. */ | |
1813 | tree offset = byte_position (field); | |
1814 | if (TREE_CODE (offset) == INTEGER_CST) | |
1815 | pref->add_offset (wi::to_offset (offset)); | |
1816 | else | |
1817 | pref->add_max_offset (); | |
1818 | ||
1819 | if (!pref->ref) | |
1820 | /* REF may have been already set to an SSA_NAME earlier | |
1821 | to provide better context for diagnostics. In that case, | |
1822 | leave it unchanged. */ | |
1823 | pref->ref = ref; | |
1824 | return true; | |
1825 | } | |
1826 | ||
1827 | pref->ref = field; | |
1828 | ||
1829 | if (!addr && POINTER_TYPE_P (TREE_TYPE (field))) | |
1830 | { | |
1831 | /* Set maximum size if the reference is to the pointer member | |
1832 | itself (as opposed to what it points to). */ | |
1833 | pref->set_max_size_range (); | |
1834 | return true; | |
1835 | } | |
1836 | ||
1837 | /* SAM is set for array members that might need special treatment. */ | |
1838 | special_array_member sam; | |
1839 | tree size = component_ref_size (ptr, &sam); | |
1840 | if (sam == special_array_member::int_0) | |
1841 | pref->sizrng[0] = pref->sizrng[1] = 0; | |
1842 | else if (!pref->trail1special && sam == special_array_member::trail_1) | |
1843 | pref->sizrng[0] = pref->sizrng[1] = 1; | |
1844 | else if (size && TREE_CODE (size) == INTEGER_CST) | |
1845 | pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size); | |
1846 | else | |
1847 | { | |
1848 | /* When the size of the member is unknown it's either a flexible | |
1849 | array member or a trailing special array member (either zero | |
1850 | length or one-element). Set the size to the maximum minus | |
1851 | the constant size of the type. */ | |
1852 | pref->sizrng[0] = 0; | |
1853 | pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)); | |
1854 | if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref))) | |
1855 | if (TREE_CODE (recsize) == INTEGER_CST) | |
1856 | pref->sizrng[1] -= wi::to_offset (recsize); | |
1857 | } | |
1858 | return true; | |
1859 | } | |
1860 | ||
1861 | if (code == ARRAY_REF) | |
9a27acc3 | 1862 | return handle_array_ref (ptr, stmt, addr, ostype, pref, snlim, qry); |
2a837de2 MS |
1863 | |
1864 | if (code == MEM_REF) | |
9a27acc3 | 1865 | return handle_mem_ref (ptr, stmt, ostype, pref, snlim, qry); |
2a837de2 MS |
1866 | |
1867 | if (code == TARGET_MEM_REF) | |
1868 | { | |
1869 | tree ref = TREE_OPERAND (ptr, 0); | |
9a27acc3 | 1870 | if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry)) |
2a837de2 MS |
1871 | return false; |
1872 | ||
1873 | /* TODO: Handle remaining operands. Until then, add maximum offset. */ | |
1874 | pref->ref = ptr; | |
1875 | pref->add_max_offset (); | |
1876 | return true; | |
1877 | } | |
1878 | ||
1879 | if (code == INTEGER_CST) | |
1880 | { | |
1881 | /* Pointer constants other than null are most likely the result | |
54fa5567 MS |
1882 | of erroneous null pointer addition/subtraction. Unless zero |
1883 | is a valid address set size to zero. For null pointers, set | |
1884 | size to the maximum for now since those may be the result of | |
1885 | jump threading. */ | |
2a837de2 MS |
1886 | if (integer_zerop (ptr)) |
1887 | pref->set_max_size_range (); | |
54fa5567 MS |
1888 | else if (POINTER_TYPE_P (TREE_TYPE (ptr))) |
1889 | { | |
1890 | tree deref_type = TREE_TYPE (TREE_TYPE (ptr)); | |
1891 | addr_space_t as = TYPE_ADDR_SPACE (deref_type); | |
1892 | if (targetm.addr_space.zero_address_valid (as)) | |
1893 | pref->set_max_size_range (); | |
1894 | else | |
1895 | pref->sizrng[0] = pref->sizrng[1] = 0; | |
1896 | } | |
2a837de2 MS |
1897 | else |
1898 | pref->sizrng[0] = pref->sizrng[1] = 0; | |
54fa5567 | 1899 | |
2a837de2 MS |
1900 | pref->ref = ptr; |
1901 | ||
1902 | return true; | |
1903 | } | |
1904 | ||
1905 | if (code == STRING_CST) | |
1906 | { | |
1907 | pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr); | |
1908 | pref->ref = ptr; | |
1909 | return true; | |
1910 | } | |
1911 | ||
1912 | if (code == POINTER_PLUS_EXPR) | |
1913 | { | |
1914 | tree ref = TREE_OPERAND (ptr, 0); | |
9a27acc3 | 1915 | if (!compute_objsize_r (ref, stmt, ostype, pref, snlim, qry)) |
2a837de2 MS |
1916 | return false; |
1917 | ||
1918 | /* Clear DEREF since the offset is being applied to the target | |
1919 | of the dereference. */ | |
1920 | pref->deref = 0; | |
1921 | ||
1922 | offset_int orng[2]; | |
1923 | tree off = pref->eval (TREE_OPERAND (ptr, 1)); | |
1924 | if (get_offset_range (off, NULL, orng, rvals)) | |
1925 | pref->add_offset (orng[0], orng[1]); | |
1926 | else | |
1927 | pref->add_max_offset (); | |
1928 | return true; | |
1929 | } | |
1930 | ||
1931 | if (code == VIEW_CONVERT_EXPR) | |
1932 | { | |
1933 | ptr = TREE_OPERAND (ptr, 0); | |
9a27acc3 | 1934 | return compute_objsize_r (ptr, stmt, ostype, pref, snlim, qry); |
2a837de2 MS |
1935 | } |
1936 | ||
1937 | if (code == SSA_NAME) | |
1938 | { | |
1939 | if (!snlim.next ()) | |
1940 | return false; | |
1941 | ||
1942 | /* Only process an SSA_NAME if the recursion limit has not yet | |
1943 | been reached. */ | |
1944 | if (qry) | |
1945 | { | |
1946 | if (++qry->depth) | |
1947 | qry->max_depth = qry->depth; | |
1948 | if (const access_ref *cache_ref = qry->get_ref (ptr)) | |
1949 | { | |
1950 | /* If the pointer is in the cache set *PREF to what it refers | |
ece28da9 MS |
1951 | to and return success. |
1952 | FIXME: BNDRNG is determined by each access and so it doesn't | |
1953 | belong in access_ref. Until the design is changed, keep it | |
1954 | unchanged here. */ | |
1955 | const offset_int bndrng[2] = { pref->bndrng[0], pref->bndrng[1] }; | |
2a837de2 | 1956 | *pref = *cache_ref; |
ece28da9 MS |
1957 | pref->bndrng[0] = bndrng[0]; |
1958 | pref->bndrng[1] = bndrng[1]; | |
2a837de2 MS |
1959 | return true; |
1960 | } | |
1961 | } | |
1962 | ||
9a27acc3 | 1963 | stmt = SSA_NAME_DEF_STMT (ptr); |
2a837de2 MS |
1964 | if (is_gimple_call (stmt)) |
1965 | { | |
1966 | /* If STMT is a call to an allocation function get the size | |
1967 | from its argument(s). If successful, also set *PREF->REF | |
1968 | to PTR for the caller to include in diagnostics. */ | |
1969 | wide_int wr[2]; | |
1970 | if (gimple_call_alloc_size (stmt, wr, rvals)) | |
1971 | { | |
1972 | pref->ref = ptr; | |
1973 | pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED); | |
1974 | pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED); | |
1975 | /* Constrain both bounds to a valid size. */ | |
1976 | offset_int maxsize = wi::to_offset (max_object_size ()); | |
1977 | if (pref->sizrng[0] > maxsize) | |
1978 | pref->sizrng[0] = maxsize; | |
1979 | if (pref->sizrng[1] > maxsize) | |
1980 | pref->sizrng[1] = maxsize; | |
1981 | } | |
1982 | else | |
1983 | { | |
1984 | /* For functions known to return one of their pointer arguments | |
1985 | try to determine what the returned pointer points to, and on | |
1986 | success add OFFRNG which was set to the offset added by | |
1987 | the function (e.g., memchr) to the overall offset. */ | |
1988 | bool past_end; | |
1989 | offset_int offrng[2]; | |
1990 | if (tree ret = gimple_call_return_array (stmt, offrng, | |
9a27acc3 | 1991 | &past_end, snlim, qry)) |
2a837de2 | 1992 | { |
9a27acc3 | 1993 | if (!compute_objsize_r (ret, stmt, ostype, pref, snlim, qry)) |
2a837de2 MS |
1994 | return false; |
1995 | ||
1996 | /* Cap OFFRNG[1] to at most the remaining size of | |
1997 | the object. */ | |
1998 | offset_int remrng[2]; | |
1999 | remrng[1] = pref->size_remaining (remrng); | |
2000 | if (remrng[1] != 0 && !past_end) | |
2001 | /* Decrement the size for functions that never return | |
2002 | a past-the-end pointer. */ | |
2003 | remrng[1] -= 1; | |
2004 | ||
2005 | if (remrng[1] < offrng[1]) | |
2006 | offrng[1] = remrng[1]; | |
2007 | pref->add_offset (offrng[0], offrng[1]); | |
2008 | } | |
2009 | else | |
2010 | { | |
2011 | /* For other calls that might return arbitrary pointers | |
2012 | including into the middle of objects set the size | |
2013 | range to maximum, clear PREF->BASE0, and also set | |
2014 | PREF->REF to include in diagnostics. */ | |
2015 | pref->set_max_size_range (); | |
2016 | pref->base0 = false; | |
2017 | pref->ref = ptr; | |
2018 | } | |
2019 | } | |
2020 | qry->put_ref (ptr, *pref); | |
2021 | return true; | |
2022 | } | |
2023 | ||
2024 | if (gimple_nop_p (stmt)) | |
2025 | { | |
2026 | /* For a function argument try to determine the byte size | |
2027 | of the array from the current function declaratation | |
2028 | (e.g., attribute access or related). */ | |
2029 | wide_int wr[2]; | |
2030 | bool static_array = false; | |
2031 | if (tree ref = gimple_parm_array_size (ptr, wr, &static_array)) | |
2032 | { | |
2033 | pref->parmarray = !static_array; | |
2034 | pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED); | |
2035 | pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED); | |
2036 | pref->ref = ref; | |
2037 | qry->put_ref (ptr, *pref); | |
2038 | return true; | |
2039 | } | |
2040 | ||
2041 | pref->set_max_size_range (); | |
2042 | pref->base0 = false; | |
2043 | pref->ref = ptr; | |
2044 | qry->put_ref (ptr, *pref); | |
2045 | return true; | |
2046 | } | |
2047 | ||
2048 | if (gimple_code (stmt) == GIMPLE_PHI) | |
2049 | { | |
2050 | pref->ref = ptr; | |
2051 | access_ref phi_ref = *pref; | |
2052 | if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry)) | |
2053 | return false; | |
2054 | *pref = phi_ref; | |
2055 | pref->ref = ptr; | |
2056 | qry->put_ref (ptr, *pref); | |
2057 | return true; | |
2058 | } | |
2059 | ||
2060 | if (!is_gimple_assign (stmt)) | |
2061 | { | |
2062 | /* Clear BASE0 since the assigned pointer might point into | |
2063 | the middle of the object, set the maximum size range and, | |
2064 | if the SSA_NAME refers to a function argumnent, set | |
2065 | PREF->REF to it. */ | |
2066 | pref->base0 = false; | |
2067 | pref->set_max_size_range (); | |
2068 | pref->ref = ptr; | |
2069 | return true; | |
2070 | } | |
2071 | ||
2072 | tree_code code = gimple_assign_rhs_code (stmt); | |
2073 | ||
2074 | if (code == MAX_EXPR || code == MIN_EXPR) | |
2075 | { | |
31e924c5 | 2076 | if (!handle_min_max_size (ptr, ostype, pref, snlim, qry)) |
2a837de2 | 2077 | return false; |
31e924c5 | 2078 | |
2a837de2 MS |
2079 | qry->put_ref (ptr, *pref); |
2080 | return true; | |
2081 | } | |
2082 | ||
2083 | tree rhs = gimple_assign_rhs1 (stmt); | |
2084 | ||
2085 | if (code == ASSERT_EXPR) | |
2086 | { | |
2087 | rhs = TREE_OPERAND (rhs, 0); | |
9a27acc3 | 2088 | return compute_objsize_r (rhs, stmt, ostype, pref, snlim, qry); |
2a837de2 MS |
2089 | } |
2090 | ||
2091 | if (code == POINTER_PLUS_EXPR | |
2092 | && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE) | |
2093 | { | |
2094 | /* Compute the size of the object first. */ | |
9a27acc3 | 2095 | if (!compute_objsize_r (rhs, stmt, ostype, pref, snlim, qry)) |
2a837de2 MS |
2096 | return false; |
2097 | ||
2098 | offset_int orng[2]; | |
2099 | tree off = gimple_assign_rhs2 (stmt); | |
2100 | if (get_offset_range (off, stmt, orng, rvals)) | |
2101 | pref->add_offset (orng[0], orng[1]); | |
2102 | else | |
2103 | pref->add_max_offset (); | |
ece28da9 | 2104 | |
2a837de2 MS |
2105 | qry->put_ref (ptr, *pref); |
2106 | return true; | |
2107 | } | |
2108 | ||
ece28da9 MS |
2109 | if (code == ADDR_EXPR || code == SSA_NAME) |
2110 | { | |
9a27acc3 | 2111 | if (!compute_objsize_r (rhs, stmt, ostype, pref, snlim, qry)) |
ece28da9 MS |
2112 | return false; |
2113 | qry->put_ref (ptr, *pref); | |
2114 | return true; | |
2115 | } | |
2a837de2 MS |
2116 | |
2117 | /* (This could also be an assignment from a nonlocal pointer.) Save | |
2118 | PTR to mention in diagnostics but otherwise treat it as a pointer | |
2119 | to an unknown object. */ | |
2120 | pref->ref = rhs; | |
2121 | pref->base0 = false; | |
2122 | pref->set_max_size_range (); | |
2123 | return true; | |
2124 | } | |
2125 | ||
2126 | /* Assume all other expressions point into an unknown object | |
2127 | of the maximum valid size. */ | |
2128 | pref->ref = ptr; | |
2129 | pref->base0 = false; | |
2130 | pref->set_max_size_range (); | |
2131 | if (TREE_CODE (ptr) == SSA_NAME) | |
2132 | qry->put_ref (ptr, *pref); | |
2133 | return true; | |
2134 | } | |
2135 | ||
2136 | /* A "public" wrapper around the above. Clients should use this overload | |
2137 | instead. */ | |
2138 | ||
2139 | tree | |
9a27acc3 MS |
2140 | compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref, |
2141 | pointer_query *ptr_qry) | |
2a837de2 MS |
2142 | { |
2143 | pointer_query qry; | |
9a27acc3 MS |
2144 | if (ptr_qry) |
2145 | ptr_qry->depth = 0; | |
2146 | else | |
2147 | ptr_qry = &qry; | |
820f0940 MS |
2148 | |
2149 | /* Clear and invalidate in case *PREF is being reused. */ | |
2150 | pref->offrng[0] = pref->offrng[1] = 0; | |
2151 | pref->sizrng[0] = pref->sizrng[1] = -1; | |
2152 | ||
2a837de2 | 2153 | ssa_name_limit_t snlim; |
9a27acc3 | 2154 | if (!compute_objsize_r (ptr, stmt, ostype, pref, snlim, ptr_qry)) |
2a837de2 MS |
2155 | return NULL_TREE; |
2156 | ||
2157 | offset_int maxsize = pref->size_remaining (); | |
2158 | if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0) | |
2159 | pref->offrng[0] = 0; | |
2160 | return wide_int_to_tree (sizetype, maxsize); | |
2161 | } | |
2162 | ||
2163 | /* Transitional wrapper. The function should be removed once callers | |
2164 | transition to the pointer_query API. */ | |
2165 | ||
2166 | tree | |
9a27acc3 MS |
2167 | compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref, |
2168 | range_query *rvals /* = NULL */) | |
2a837de2 MS |
2169 | { |
2170 | pointer_query qry; | |
9a27acc3 MS |
2171 | qry.rvals = rvals; |
2172 | return compute_objsize (ptr, stmt, ostype, pref, &qry); | |
2a837de2 MS |
2173 | } |
2174 | ||
2175 | /* Legacy wrapper around the above. The function should be removed | |
2176 | once callers transition to one of the two above. */ | |
2177 | ||
2178 | tree | |
2179 | compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */, | |
2180 | tree *poff /* = NULL */, range_query *rvals /* = NULL */) | |
2181 | { | |
2182 | /* Set the initial offsets to zero and size to negative to indicate | |
2183 | none has been computed yet. */ | |
2184 | access_ref ref; | |
9a27acc3 | 2185 | tree size = compute_objsize (ptr, nullptr, ostype, &ref, rvals); |
2a837de2 MS |
2186 | if (!size || !ref.base0) |
2187 | return NULL_TREE; | |
2188 | ||
2189 | if (pdecl) | |
2190 | *pdecl = ref.ref; | |
2191 | ||
2192 | if (poff) | |
2193 | *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]); | |
2194 | ||
2195 | return size; | |
2196 | } | |
1ff4dbdd MS |
2197 | |
2198 | /* Determine the offset *FLDOFF of the first byte of a struct member | |
2199 | of TYPE (possibly recursively) into which the byte offset OFF points, | |
2200 | starting after the field START_AFTER if it's non-null. On success, | |
2201 | if nonnull, set *FLDOFF to the offset of the first byte, and return | |
2202 | the field decl. If nonnull, set *NEXTOFF to the offset of the next | |
2203 | field (which reflects any padding between the returned field and | |
2204 | the next). Otherwise, if no such member can be found, return null. */ | |
2205 | ||
2206 | tree | |
2207 | field_at_offset (tree type, tree start_after, HOST_WIDE_INT off, | |
2208 | HOST_WIDE_INT *fldoff /* = nullptr */, | |
2209 | HOST_WIDE_INT *nextoff /* = nullptr */) | |
2210 | { | |
2211 | tree first_fld = TYPE_FIELDS (type); | |
2212 | ||
2213 | HOST_WIDE_INT offbuf = 0, nextbuf = 0; | |
2214 | if (!fldoff) | |
2215 | fldoff = &offbuf; | |
2216 | if (!nextoff) | |
2217 | nextoff = &nextbuf; | |
2218 | ||
2219 | *nextoff = 0; | |
2220 | ||
2221 | /* The field to return. */ | |
2222 | tree last_fld = NULL_TREE; | |
2223 | /* The next field to advance to. */ | |
2224 | tree next_fld = NULL_TREE; | |
2225 | ||
2226 | /* NEXT_FLD's cached offset. */ | |
2227 | HOST_WIDE_INT next_pos = -1; | |
2228 | ||
2229 | for (tree fld = first_fld; fld; fld = next_fld) | |
2230 | { | |
2231 | next_fld = fld; | |
2232 | do | |
2233 | /* Advance to the next relevant data member. */ | |
2234 | next_fld = TREE_CHAIN (next_fld); | |
2235 | while (next_fld | |
2236 | && (TREE_CODE (next_fld) != FIELD_DECL | |
2237 | || DECL_ARTIFICIAL (next_fld))); | |
2238 | ||
2239 | if (TREE_CODE (fld) != FIELD_DECL || DECL_ARTIFICIAL (fld)) | |
2240 | continue; | |
2241 | ||
2242 | if (fld == start_after) | |
2243 | continue; | |
2244 | ||
2245 | tree fldtype = TREE_TYPE (fld); | |
2246 | /* The offset of FLD within its immediately enclosing structure. */ | |
2247 | HOST_WIDE_INT fldpos = next_pos < 0 ? int_byte_position (fld) : next_pos; | |
2248 | ||
2249 | /* If the size is not available the field is a flexible array | |
2250 | member. Treat this case as success. */ | |
2251 | tree typesize = TYPE_SIZE_UNIT (fldtype); | |
2252 | HOST_WIDE_INT fldsize = (tree_fits_uhwi_p (typesize) | |
2253 | ? tree_to_uhwi (typesize) | |
2254 | : off); | |
2255 | ||
2256 | /* If OFF is beyond the end of the current field continue. */ | |
2257 | HOST_WIDE_INT fldend = fldpos + fldsize; | |
2258 | if (fldend < off) | |
2259 | continue; | |
2260 | ||
2261 | if (next_fld) | |
2262 | { | |
2263 | /* If OFF is equal to the offset of the next field continue | |
2264 | to it and skip the array/struct business below. */ | |
2265 | next_pos = int_byte_position (next_fld); | |
2266 | *nextoff = *fldoff + next_pos; | |
2267 | if (*nextoff == off && TREE_CODE (type) != UNION_TYPE) | |
2268 | continue; | |
2269 | } | |
2270 | else | |
2271 | *nextoff = HOST_WIDE_INT_MAX; | |
2272 | ||
2273 | /* OFF refers somewhere into the current field or just past its end, | |
2274 | which could mean it refers to the next field. */ | |
2275 | if (TREE_CODE (fldtype) == ARRAY_TYPE) | |
2276 | { | |
2277 | /* Will be set to the offset of the first byte of the array | |
2278 | element (which may be an array) of FLDTYPE into which | |
2279 | OFF - FLDPOS points (which may be past ELTOFF). */ | |
2280 | HOST_WIDE_INT eltoff = 0; | |
2281 | if (tree ft = array_elt_at_offset (fldtype, off - fldpos, &eltoff)) | |
2282 | fldtype = ft; | |
2283 | else | |
2284 | continue; | |
2285 | ||
2286 | /* Advance the position to include the array element above. | |
2287 | If OFF - FLPOS refers to a member of FLDTYPE, the member | |
2288 | will be determined below. */ | |
2289 | fldpos += eltoff; | |
2290 | } | |
2291 | ||
2292 | *fldoff += fldpos; | |
2293 | ||
2294 | if (TREE_CODE (fldtype) == RECORD_TYPE) | |
2295 | /* Drill down into the current field if it's a struct. */ | |
2296 | fld = field_at_offset (fldtype, start_after, off - fldpos, | |
2297 | fldoff, nextoff); | |
2298 | ||
2299 | last_fld = fld; | |
2300 | ||
2301 | /* Unless the offset is just past the end of the field return it. | |
2302 | Otherwise save it and return it only if the offset of the next | |
2303 | next field is greater (i.e., there is padding between the two) | |
2304 | or if there is no next field. */ | |
2305 | if (off < fldend) | |
2306 | break; | |
2307 | } | |
2308 | ||
2309 | if (*nextoff == HOST_WIDE_INT_MAX && next_fld) | |
2310 | *nextoff = next_pos; | |
2311 | ||
2312 | return last_fld; | |
2313 | } | |
2314 | ||
2315 | /* Determine the offset *ELTOFF of the first byte of the array element | |
2316 | of array ARTYPE into which the byte offset OFF points. On success | |
2317 | set *ELTOFF to the offset of the first byte and return type. | |
2318 | Otherwise, if no such element can be found, return null. */ | |
2319 | ||
2320 | tree | |
2321 | array_elt_at_offset (tree artype, HOST_WIDE_INT off, | |
2322 | HOST_WIDE_INT *eltoff /* = nullptr */, | |
2323 | HOST_WIDE_INT *subar_size /* = nullptr */) | |
2324 | { | |
2325 | gcc_assert (TREE_CODE (artype) == ARRAY_TYPE); | |
2326 | ||
2327 | HOST_WIDE_INT dummy; | |
2328 | if (!eltoff) | |
2329 | eltoff = &dummy; | |
2330 | if (!subar_size) | |
2331 | subar_size = &dummy; | |
2332 | ||
2333 | tree eltype = artype; | |
2334 | while (TREE_CODE (TREE_TYPE (eltype)) == ARRAY_TYPE) | |
2335 | eltype = TREE_TYPE (eltype); | |
2336 | ||
2337 | tree subartype = eltype; | |
2338 | if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (eltype)) | |
2339 | || TYPE_MODE (TREE_TYPE (eltype)) != TYPE_MODE (char_type_node)) | |
2340 | eltype = TREE_TYPE (eltype); | |
2341 | ||
2342 | *subar_size = int_size_in_bytes (subartype); | |
2343 | ||
2344 | if (eltype == artype) | |
2345 | { | |
2346 | *eltoff = 0; | |
2347 | return artype; | |
2348 | } | |
2349 | ||
2350 | HOST_WIDE_INT artype_size = int_size_in_bytes (artype); | |
2351 | HOST_WIDE_INT eltype_size = int_size_in_bytes (eltype); | |
2352 | ||
2353 | if (off < artype_size)// * eltype_size) | |
2354 | { | |
2355 | *eltoff = (off / eltype_size) * eltype_size; | |
2356 | return TREE_CODE (eltype) == ARRAY_TYPE ? TREE_TYPE (eltype) : eltype; | |
2357 | } | |
2358 | ||
2359 | return NULL_TREE; | |
2360 | } |