]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-object-size.cc
Don't build readline/libreadline.a, when --with-system-readline is supplied
[thirdparty/gcc.git] / gcc / tree-object-size.cc
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-iterator.h"
33 #include "gimple-fold.h"
34 #include "tree-cfg.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "builtins.h"
39 #include "gimplify-me.h"
40
41 struct object_size_info
42 {
43 int object_size_type;
44 unsigned char pass;
45 bool changed;
46 bitmap visited, reexamine, unknowns;
47 unsigned int *depths;
48 unsigned int *stack, *tos;
49 };
50
51 struct GTY(()) object_size
52 {
53 /* Estimate of bytes till the end of the object. */
54 tree size;
55 /* Estimate of the size of the whole object. */
56 tree wholesize;
57 };
58
59 static tree compute_object_offset (const_tree, const_tree);
60 static bool addr_object_size (struct object_size_info *,
61 const_tree, int, tree *, tree *t = NULL);
62 static tree alloc_object_size (const gcall *, int);
63 static tree pass_through_call (const gcall *);
64 static void collect_object_sizes_for (struct object_size_info *, tree);
65 static void expr_object_size (struct object_size_info *, tree, tree);
66 static bool merge_object_sizes (struct object_size_info *, tree, tree);
67 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
68 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
69 static void init_offset_limit (void);
70 static void check_for_plus_in_loops (struct object_size_info *, tree);
71 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
72 unsigned int);
73
74 /* object_sizes[0] is upper bound for the object size and number of bytes till
75 the end of the object.
76 object_sizes[1] is upper bound for the object size and number of bytes till
77 the end of the subobject (innermost array or field with address taken).
78 object_sizes[2] is lower bound for the object size and number of bytes till
79 the end of the object and object_sizes[3] lower bound for subobject.
80
81 For static object sizes, the object size and the bytes till the end of the
82 object are both INTEGER_CST. In the dynamic case, they are finally either a
83 gimple variable or an INTEGER_CST. */
84 static vec<object_size> object_sizes[OST_END];
85
86 /* Bitmaps what object sizes have been computed already. */
87 static bitmap computed[OST_END];
88
89 /* Maximum value of offset we consider to be addition. */
90 static unsigned HOST_WIDE_INT offset_limit;
91
92 /* Return true if VAL represents an initial size for OBJECT_SIZE_TYPE. */
93
94 static inline bool
95 size_initval_p (tree val, int object_size_type)
96 {
97 return ((object_size_type & OST_MINIMUM)
98 ? integer_all_onesp (val) : integer_zerop (val));
99 }
100
101 /* Return true if VAL represents an unknown size for OBJECT_SIZE_TYPE. */
102
103 static inline bool
104 size_unknown_p (tree val, int object_size_type)
105 {
106 return ((object_size_type & OST_MINIMUM)
107 ? integer_zerop (val) : integer_all_onesp (val));
108 }
109
110 /* Return true if VAL represents a valid size for OBJECT_SIZE_TYPE. */
111
112 static inline bool
113 size_valid_p (tree val, int object_size_type)
114 {
115 return ((object_size_type & OST_DYNAMIC) || TREE_CODE (val) == INTEGER_CST);
116 }
117
118 /* Return true if VAL is usable as an object size in the object_sizes
119 vectors. */
120
121 static inline bool
122 size_usable_p (tree val)
123 {
124 return TREE_CODE (val) == SSA_NAME || TREE_CODE (val) == INTEGER_CST;
125 }
126
127 /* Return a tree with initial value for OBJECT_SIZE_TYPE. */
128
129 static inline tree
130 size_initval (int object_size_type)
131 {
132 return ((object_size_type & OST_MINIMUM)
133 ? TYPE_MAX_VALUE (sizetype) : size_zero_node);
134 }
135
136 /* Return a tree with unknown value for OBJECT_SIZE_TYPE. */
137
138 static inline tree
139 size_unknown (int object_size_type)
140 {
141 return ((object_size_type & OST_MINIMUM)
142 ? size_zero_node : TYPE_MAX_VALUE (sizetype));
143 }
144
145 /* Grow object_sizes[OBJECT_SIZE_TYPE] to num_ssa_names. */
146
147 static inline void
148 object_sizes_grow (int object_size_type)
149 {
150 if (num_ssa_names > object_sizes[object_size_type].length ())
151 object_sizes[object_size_type].safe_grow (num_ssa_names, true);
152 }
153
154 /* Release object_sizes[OBJECT_SIZE_TYPE]. */
155
156 static inline void
157 object_sizes_release (int object_size_type)
158 {
159 object_sizes[object_size_type].release ();
160 }
161
162 /* Return true if object_sizes[OBJECT_SIZE_TYPE][VARNO] is unknown. */
163
164 static inline bool
165 object_sizes_unknown_p (int object_size_type, unsigned varno)
166 {
167 return size_unknown_p (object_sizes[object_size_type][varno].size,
168 object_size_type);
169 }
170
171 /* Return the raw size expression for VARNO corresponding to OSI. This returns
172 the TREE_VEC as is and should only be used during gimplification. */
173
174 static inline object_size
175 object_sizes_get_raw (struct object_size_info *osi, unsigned varno)
176 {
177 gcc_assert (osi->pass != 0);
178 return object_sizes[osi->object_size_type][varno];
179 }
180
181 /* Return a size tree for VARNO corresponding to OSI. If WHOLE is true, return
182 the whole object size. Use this for building size expressions based on size
183 of VARNO. */
184
185 static inline tree
186 object_sizes_get (struct object_size_info *osi, unsigned varno,
187 bool whole = false)
188 {
189 tree ret;
190 int object_size_type = osi->object_size_type;
191
192 if (whole)
193 ret = object_sizes[object_size_type][varno].wholesize;
194 else
195 ret = object_sizes[object_size_type][varno].size;
196
197 if (object_size_type & OST_DYNAMIC)
198 {
199 if (TREE_CODE (ret) == MODIFY_EXPR)
200 return TREE_OPERAND (ret, 0);
201 else if (TREE_CODE (ret) == TREE_VEC)
202 return TREE_VEC_ELT (ret, TREE_VEC_LENGTH (ret) - 1);
203 else
204 gcc_checking_assert (size_usable_p (ret));
205 }
206
207 return ret;
208 }
209
210 /* Set size for VARNO corresponding to OSI to VAL. */
211
212 static inline void
213 object_sizes_initialize (struct object_size_info *osi, unsigned varno,
214 tree val, tree wholeval)
215 {
216 int object_size_type = osi->object_size_type;
217
218 object_sizes[object_size_type][varno].size = val;
219 object_sizes[object_size_type][varno].wholesize = wholeval;
220 }
221
222 /* Return a MODIFY_EXPR for cases where SSA and EXPR have the same type. The
223 TREE_VEC is returned only in case of PHI nodes. */
224
225 static tree
226 bundle_sizes (tree name, tree expr)
227 {
228 gcc_checking_assert (TREE_TYPE (name) == sizetype);
229
230 if (TREE_CODE (expr) == TREE_VEC)
231 {
232 TREE_VEC_ELT (expr, TREE_VEC_LENGTH (expr) - 1) = name;
233 return expr;
234 }
235
236 gcc_checking_assert (types_compatible_p (TREE_TYPE (expr), sizetype));
237 return build2 (MODIFY_EXPR, sizetype, name, expr);
238 }
239
240 /* Set size for VARNO corresponding to OSI to VAL if it is the new minimum or
241 maximum. For static sizes, each element of TREE_VEC is always INTEGER_CST
242 throughout the computation. For dynamic sizes, each element may either be a
243 gimple variable, a MODIFY_EXPR or a TREE_VEC. The MODIFY_EXPR is for
244 expressions that need to be gimplified. TREE_VECs are special, they're
245 emitted only for GIMPLE_PHI and the PHI result variable is the last element
246 of the vector. */
247
248 static bool
249 object_sizes_set (struct object_size_info *osi, unsigned varno, tree val,
250 tree wholeval)
251 {
252 int object_size_type = osi->object_size_type;
253 object_size osize = object_sizes[object_size_type][varno];
254 bool changed = true;
255
256 tree oldval = osize.size;
257 tree old_wholeval = osize.wholesize;
258
259 if (object_size_type & OST_DYNAMIC)
260 {
261 if (bitmap_bit_p (osi->reexamine, varno))
262 {
263 if (size_unknown_p (val, object_size_type))
264 {
265 oldval = object_sizes_get (osi, varno);
266 old_wholeval = object_sizes_get (osi, varno, true);
267 bitmap_set_bit (osi->unknowns, SSA_NAME_VERSION (oldval));
268 bitmap_set_bit (osi->unknowns, SSA_NAME_VERSION (old_wholeval));
269 bitmap_clear_bit (osi->reexamine, varno);
270 }
271 else
272 {
273 val = bundle_sizes (oldval, val);
274 wholeval = bundle_sizes (old_wholeval, wholeval);
275 }
276 }
277 else
278 {
279 gcc_checking_assert (size_initval_p (oldval, object_size_type));
280 gcc_checking_assert (size_initval_p (old_wholeval,
281 object_size_type));
282 /* For dynamic object sizes, all object sizes that are not gimple
283 variables will need to be gimplified. */
284 if (wholeval != val && !size_usable_p (wholeval))
285 {
286 bitmap_set_bit (osi->reexamine, varno);
287 wholeval = bundle_sizes (make_ssa_name (sizetype), wholeval);
288 }
289 if (!size_usable_p (val))
290 {
291 bitmap_set_bit (osi->reexamine, varno);
292 tree newval = bundle_sizes (make_ssa_name (sizetype), val);
293 if (val == wholeval)
294 wholeval = newval;
295 val = newval;
296 }
297 /* If the new value is a temporary variable, mark it for
298 reexamination. */
299 else if (TREE_CODE (val) == SSA_NAME && !SSA_NAME_DEF_STMT (val))
300 bitmap_set_bit (osi->reexamine, varno);
301 }
302 }
303 else
304 {
305 enum tree_code code = (object_size_type & OST_MINIMUM
306 ? MIN_EXPR : MAX_EXPR);
307
308 val = size_binop (code, val, oldval);
309 wholeval = size_binop (code, wholeval, old_wholeval);
310 changed = (tree_int_cst_compare (val, oldval) != 0
311 || tree_int_cst_compare (old_wholeval, wholeval) != 0);
312 }
313
314 object_sizes[object_size_type][varno].size = val;
315 object_sizes[object_size_type][varno].wholesize = wholeval;
316
317 return changed;
318 }
319
320 /* Set temporary SSA names for object size and whole size to resolve dependency
321 loops in dynamic size computation. */
322
323 static inline void
324 object_sizes_set_temp (struct object_size_info *osi, unsigned varno)
325 {
326 tree val = object_sizes_get (osi, varno);
327
328 if (size_initval_p (val, osi->object_size_type))
329 object_sizes_set (osi, varno,
330 make_ssa_name (sizetype),
331 make_ssa_name (sizetype));
332 }
333
334 /* Initialize OFFSET_LIMIT variable. */
335 static void
336 init_offset_limit (void)
337 {
338 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
339 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
340 else
341 offset_limit = -1;
342 offset_limit /= 2;
343 }
344
345 /* Bytes at end of the object with SZ from offset OFFSET. If WHOLESIZE is not
346 NULL_TREE, use it to get the net offset of the pointer, which should always
347 be positive and hence, be within OFFSET_LIMIT for valid offsets. */
348
349 static tree
350 size_for_offset (tree sz, tree offset, tree wholesize = NULL_TREE)
351 {
352 gcc_checking_assert (types_compatible_p (TREE_TYPE (sz), sizetype));
353
354 /* For negative offsets, if we have a distinct WHOLESIZE, use it to get a net
355 offset from the whole object. */
356 if (wholesize && wholesize != sz
357 && (TREE_CODE (sz) != INTEGER_CST
358 || TREE_CODE (wholesize) != INTEGER_CST
359 || tree_int_cst_compare (sz, wholesize)))
360 {
361 gcc_checking_assert (types_compatible_p (TREE_TYPE (wholesize),
362 sizetype));
363
364 /* Restructure SZ - OFFSET as
365 WHOLESIZE - (WHOLESIZE + OFFSET - SZ) so that the offset part, i.e.
366 WHOLESIZE + OFFSET - SZ is only allowed to be positive. */
367 tree tmp = size_binop (MAX_EXPR, wholesize, sz);
368 offset = fold_build2 (PLUS_EXPR, sizetype, tmp, offset);
369 offset = fold_build2 (MINUS_EXPR, sizetype, offset, sz);
370 sz = tmp;
371 }
372
373 /* Safe to convert now, since a valid net offset should be non-negative. */
374 if (!useless_type_conversion_p (sizetype, TREE_TYPE (offset)))
375 offset = fold_convert (sizetype, offset);
376
377 if (TREE_CODE (offset) == INTEGER_CST)
378 {
379 if (integer_zerop (offset))
380 return sz;
381
382 /* Negative or too large offset even after adjustment, cannot be within
383 bounds of an object. */
384 if (compare_tree_int (offset, offset_limit) > 0)
385 return size_zero_node;
386 }
387
388 return size_binop (MINUS_EXPR, size_binop (MAX_EXPR, sz, offset), offset);
389 }
390
391 /* Compute offset of EXPR within VAR. Return error_mark_node
392 if unknown. */
393
394 static tree
395 compute_object_offset (const_tree expr, const_tree var)
396 {
397 enum tree_code code = PLUS_EXPR;
398 tree base, off, t;
399
400 if (expr == var)
401 return size_zero_node;
402
403 switch (TREE_CODE (expr))
404 {
405 case COMPONENT_REF:
406 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
407 if (base == error_mark_node)
408 return base;
409
410 t = TREE_OPERAND (expr, 1);
411 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
412 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
413 / BITS_PER_UNIT));
414 break;
415
416 case REALPART_EXPR:
417 CASE_CONVERT:
418 case VIEW_CONVERT_EXPR:
419 case NON_LVALUE_EXPR:
420 return compute_object_offset (TREE_OPERAND (expr, 0), var);
421
422 case IMAGPART_EXPR:
423 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
424 if (base == error_mark_node)
425 return base;
426
427 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
428 break;
429
430 case ARRAY_REF:
431 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
432 if (base == error_mark_node)
433 return base;
434
435 t = TREE_OPERAND (expr, 1);
436 tree low_bound, unit_size;
437 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
438 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
439 if (! integer_zerop (low_bound))
440 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
441 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
442 {
443 code = MINUS_EXPR;
444 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
445 }
446 t = fold_convert (sizetype, t);
447 off = size_binop (MULT_EXPR, unit_size, t);
448 break;
449
450 case MEM_REF:
451 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
452 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
453
454 default:
455 return error_mark_node;
456 }
457
458 return size_binop (code, base, off);
459 }
460
461 /* Returns the size of the object designated by DECL considering its
462 initializer if it either has one or if it would not affect its size,
463 otherwise the size of the object without the initializer when MIN
464 is true, else null. An object's initializer affects the object's
465 size if it's a struct type with a flexible array member. */
466
467 tree
468 decl_init_size (tree decl, bool min)
469 {
470 tree size = DECL_SIZE_UNIT (decl);
471 tree type = TREE_TYPE (decl);
472 if (TREE_CODE (type) != RECORD_TYPE)
473 return size;
474
475 tree last = last_field (type);
476 if (!last)
477 return size;
478
479 tree last_type = TREE_TYPE (last);
480 if (TREE_CODE (last_type) != ARRAY_TYPE
481 || TYPE_SIZE (last_type))
482 return size;
483
484 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
485 of the initializer and sometimes doesn't. */
486 size = TYPE_SIZE_UNIT (type);
487 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
488 tree compsize = component_ref_size (ref);
489 if (!compsize)
490 return min ? size : NULL_TREE;
491
492 /* The size includes tail padding and initializer elements. */
493 tree pos = byte_position (last);
494 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
495 return size;
496 }
497
498 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
499 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
500 If unknown, return size_unknown (object_size_type). */
501
502 static bool
503 addr_object_size (struct object_size_info *osi, const_tree ptr,
504 int object_size_type, tree *psize, tree *pwholesize)
505 {
506 tree pt_var, pt_var_size = NULL_TREE, pt_var_wholesize = NULL_TREE;
507 tree var_size, bytes, wholebytes;
508
509 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
510
511 /* Set to unknown and overwrite just before returning if the size
512 could be determined. */
513 *psize = size_unknown (object_size_type);
514 if (pwholesize)
515 *pwholesize = size_unknown (object_size_type);
516
517 pt_var = TREE_OPERAND (ptr, 0);
518 while (handled_component_p (pt_var))
519 pt_var = TREE_OPERAND (pt_var, 0);
520
521 if (!pt_var)
522 return false;
523
524 if (TREE_CODE (pt_var) == MEM_REF)
525 {
526 tree sz, wholesize;
527
528 if (!osi || (object_size_type & OST_SUBOBJECT) != 0
529 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
530 {
531 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
532 object_size_type & ~OST_SUBOBJECT, &sz);
533 wholesize = sz;
534 }
535 else
536 {
537 tree var = TREE_OPERAND (pt_var, 0);
538 if (osi->pass == 0)
539 collect_object_sizes_for (osi, var);
540 if (bitmap_bit_p (computed[object_size_type],
541 SSA_NAME_VERSION (var)))
542 {
543 sz = object_sizes_get (osi, SSA_NAME_VERSION (var));
544 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (var), true);
545 }
546 else
547 sz = wholesize = size_unknown (object_size_type);
548 }
549 if (!size_unknown_p (sz, object_size_type))
550 sz = size_for_offset (sz, TREE_OPERAND (pt_var, 1), wholesize);
551
552 if (!size_unknown_p (sz, object_size_type)
553 && (TREE_CODE (sz) != INTEGER_CST
554 || compare_tree_int (sz, offset_limit) < 0))
555 {
556 pt_var_size = sz;
557 pt_var_wholesize = wholesize;
558 }
559 }
560 else if (DECL_P (pt_var))
561 {
562 pt_var_size = pt_var_wholesize
563 = decl_init_size (pt_var, object_size_type & OST_MINIMUM);
564 if (!pt_var_size)
565 return false;
566 }
567 else if (TREE_CODE (pt_var) == STRING_CST)
568 pt_var_size = pt_var_wholesize = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
569 else
570 return false;
571
572 if (pt_var_size)
573 {
574 /* Validate the size determined above if it is a constant. */
575 if (TREE_CODE (pt_var_size) == INTEGER_CST
576 && compare_tree_int (pt_var_size, offset_limit) >= 0)
577 return false;
578 }
579
580 if (pt_var != TREE_OPERAND (ptr, 0))
581 {
582 tree var;
583
584 if (object_size_type & OST_SUBOBJECT)
585 {
586 var = TREE_OPERAND (ptr, 0);
587
588 while (var != pt_var
589 && TREE_CODE (var) != BIT_FIELD_REF
590 && TREE_CODE (var) != COMPONENT_REF
591 && TREE_CODE (var) != ARRAY_REF
592 && TREE_CODE (var) != ARRAY_RANGE_REF
593 && TREE_CODE (var) != REALPART_EXPR
594 && TREE_CODE (var) != IMAGPART_EXPR)
595 var = TREE_OPERAND (var, 0);
596 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
597 var = TREE_OPERAND (var, 0);
598 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
599 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
600 || (pt_var_size && TREE_CODE (pt_var_size) == INTEGER_CST
601 && tree_int_cst_lt (pt_var_size,
602 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
603 var = pt_var;
604 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
605 {
606 tree v = var;
607 /* For &X->fld, compute object size if fld isn't a flexible array
608 member. */
609 bool is_flexible_array_mem_ref = false;
610 while (v && v != pt_var)
611 switch (TREE_CODE (v))
612 {
613 case ARRAY_REF:
614 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))))
615 {
616 tree domain
617 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
618 if (domain && TYPE_MAX_VALUE (domain))
619 {
620 v = NULL_TREE;
621 break;
622 }
623 }
624 v = TREE_OPERAND (v, 0);
625 break;
626 case REALPART_EXPR:
627 case IMAGPART_EXPR:
628 v = NULL_TREE;
629 break;
630 case COMPONENT_REF:
631 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
632 {
633 v = NULL_TREE;
634 break;
635 }
636 is_flexible_array_mem_ref = array_at_struct_end_p (v);
637 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
638 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
639 != UNION_TYPE
640 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
641 != QUAL_UNION_TYPE)
642 break;
643 else
644 v = TREE_OPERAND (v, 0);
645 if (TREE_CODE (v) == COMPONENT_REF
646 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
647 == RECORD_TYPE)
648 {
649 /* compute object size only if v is not a
650 flexible array member. */
651 if (!is_flexible_array_mem_ref)
652 {
653 v = NULL_TREE;
654 break;
655 }
656 v = TREE_OPERAND (v, 0);
657 }
658 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
659 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
660 != UNION_TYPE
661 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
662 != QUAL_UNION_TYPE)
663 break;
664 else
665 v = TREE_OPERAND (v, 0);
666 if (v != pt_var)
667 v = NULL_TREE;
668 else
669 v = pt_var;
670 break;
671 default:
672 v = pt_var;
673 break;
674 }
675 if (v == pt_var)
676 var = pt_var;
677 }
678 }
679 else
680 var = pt_var;
681
682 if (var != pt_var)
683 {
684 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
685 if (!TREE_CONSTANT (var_size))
686 var_size = get_or_create_ssa_default_def (cfun, var_size);
687 if (!var_size)
688 return false;
689 }
690 else if (!pt_var_size)
691 return false;
692 else
693 var_size = pt_var_size;
694 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
695 if (bytes != error_mark_node)
696 {
697 bytes = size_for_offset (var_size, bytes);
698 if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF)
699 {
700 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0),
701 pt_var);
702 if (bytes2 != error_mark_node)
703 {
704 bytes2 = size_for_offset (pt_var_size, bytes2);
705 bytes = size_binop (MIN_EXPR, bytes, bytes2);
706 }
707 }
708 }
709 else
710 bytes = size_unknown (object_size_type);
711
712 wholebytes
713 = object_size_type & OST_SUBOBJECT ? var_size : pt_var_wholesize;
714 }
715 else if (!pt_var_size)
716 return false;
717 else
718 {
719 bytes = pt_var_size;
720 wholebytes = pt_var_wholesize;
721 }
722
723 if (!size_unknown_p (bytes, object_size_type)
724 && size_valid_p (bytes, object_size_type)
725 && !size_unknown_p (bytes, object_size_type)
726 && size_valid_p (wholebytes, object_size_type))
727 {
728 *psize = bytes;
729 if (pwholesize)
730 *pwholesize = wholebytes;
731 return true;
732 }
733
734 return false;
735 }
736
737
738 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
739 Handles calls to functions declared with attribute alloc_size.
740 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
741 If unknown, return size_unknown (object_size_type). */
742
743 static tree
744 alloc_object_size (const gcall *call, int object_size_type)
745 {
746 gcc_assert (is_gimple_call (call));
747
748 tree calltype;
749 tree callfn = gimple_call_fndecl (call);
750 if (callfn)
751 calltype = TREE_TYPE (callfn);
752 else
753 calltype = gimple_call_fntype (call);
754
755 if (!calltype)
756 return size_unknown (object_size_type);
757
758 /* Set to positions of alloc_size arguments. */
759 int arg1 = -1, arg2 = -1;
760 tree alloc_size = lookup_attribute ("alloc_size",
761 TYPE_ATTRIBUTES (calltype));
762 if (alloc_size && TREE_VALUE (alloc_size))
763 {
764 tree p = TREE_VALUE (alloc_size);
765
766 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
767 if (TREE_CHAIN (p))
768 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
769 }
770 else if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
771 && callfn && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callfn)))
772 arg1 = 0;
773
774 /* Non-const arguments are OK here, let the caller handle constness. */
775 if (arg1 < 0 || arg1 >= (int) gimple_call_num_args (call)
776 || arg2 >= (int) gimple_call_num_args (call))
777 return size_unknown (object_size_type);
778
779 tree bytes = NULL_TREE;
780 if (arg2 >= 0)
781 bytes = size_binop (MULT_EXPR,
782 fold_convert (sizetype, gimple_call_arg (call, arg1)),
783 fold_convert (sizetype, gimple_call_arg (call, arg2)));
784 else if (arg1 >= 0)
785 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
786
787 return bytes ? bytes : size_unknown (object_size_type);
788 }
789
790
791 /* If object size is propagated from one of function's arguments directly
792 to its return value, return that argument for GIMPLE_CALL statement CALL.
793 Otherwise return NULL. */
794
795 static tree
796 pass_through_call (const gcall *call)
797 {
798 unsigned rf = gimple_call_return_flags (call);
799 if (rf & ERF_RETURNS_ARG)
800 {
801 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
802 if (argnum < gimple_call_num_args (call))
803 return gimple_call_arg (call, argnum);
804 }
805
806 /* __builtin_assume_aligned is intentionally not marked RET1. */
807 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
808 return gimple_call_arg (call, 0);
809
810 return NULL_TREE;
811 }
812
813 /* Emit PHI nodes for size expressions fo. */
814
815 static void
816 emit_phi_nodes (gimple *stmt, tree size, tree wholesize)
817 {
818 tree phires;
819 gphi *wholephi = NULL;
820
821 if (wholesize != size)
822 {
823 phires = TREE_VEC_ELT (wholesize, TREE_VEC_LENGTH (wholesize) - 1);
824 wholephi = create_phi_node (phires, gimple_bb (stmt));
825 }
826
827 phires = TREE_VEC_ELT (size, TREE_VEC_LENGTH (size) - 1);
828 gphi *phi = create_phi_node (phires, gimple_bb (stmt));
829 gphi *obj_phi = as_a <gphi *> (stmt);
830
831 gcc_checking_assert (TREE_CODE (wholesize) == TREE_VEC);
832 gcc_checking_assert (TREE_CODE (size) == TREE_VEC);
833
834 for (unsigned i = 0; i < gimple_phi_num_args (stmt); i++)
835 {
836 gimple_seq seq = NULL;
837 tree wsz = TREE_VEC_ELT (wholesize, i);
838 tree sz = TREE_VEC_ELT (size, i);
839
840 /* If we built an expression, we will need to build statements
841 and insert them on the edge right away. */
842 if (TREE_CODE (wsz) != SSA_NAME)
843 wsz = force_gimple_operand (wsz, &seq, true, NULL);
844 if (TREE_CODE (sz) != SSA_NAME)
845 {
846 gimple_seq s;
847 sz = force_gimple_operand (sz, &s, true, NULL);
848 gimple_seq_add_seq (&seq, s);
849 }
850
851 if (seq)
852 gsi_insert_seq_on_edge (gimple_phi_arg_edge (obj_phi, i), seq);
853
854 if (wholephi)
855 add_phi_arg (wholephi, wsz,
856 gimple_phi_arg_edge (obj_phi, i),
857 gimple_phi_arg_location (obj_phi, i));
858
859 add_phi_arg (phi, sz,
860 gimple_phi_arg_edge (obj_phi, i),
861 gimple_phi_arg_location (obj_phi, i));
862 }
863 }
864
865 /* Descend through EXPR and return size_unknown if it uses any SSA variable
866 object_size_set or object_size_set_temp generated, which turned out to be
867 size_unknown, as noted in UNKNOWNS. */
868
869 static tree
870 propagate_unknowns (object_size_info *osi, tree expr)
871 {
872 int object_size_type = osi->object_size_type;
873
874 switch (TREE_CODE (expr))
875 {
876 case SSA_NAME:
877 if (bitmap_bit_p (osi->unknowns, SSA_NAME_VERSION (expr)))
878 return size_unknown (object_size_type);
879 return expr;
880
881 case MIN_EXPR:
882 case MAX_EXPR:
883 {
884 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0));
885 if (size_unknown_p (res, object_size_type))
886 return res;
887
888 res = propagate_unknowns (osi, TREE_OPERAND (expr, 1));
889 if (size_unknown_p (res, object_size_type))
890 return res;
891
892 return expr;
893 }
894 case MODIFY_EXPR:
895 {
896 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 1));
897 if (size_unknown_p (res, object_size_type))
898 return res;
899 return expr;
900 }
901 case TREE_VEC:
902 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
903 {
904 tree res = propagate_unknowns (osi, TREE_VEC_ELT (expr, i));
905 if (size_unknown_p (res, object_size_type))
906 return res;
907 }
908 return expr;
909 case PLUS_EXPR:
910 case MINUS_EXPR:
911 {
912 tree res = propagate_unknowns (osi, TREE_OPERAND (expr, 0));
913 if (size_unknown_p (res, object_size_type))
914 return res;
915
916 return expr;
917 }
918 default:
919 return expr;
920 }
921 }
922
923 /* Walk through size expressions that need reexamination and generate
924 statements for them. */
925
926 static void
927 gimplify_size_expressions (object_size_info *osi)
928 {
929 int object_size_type = osi->object_size_type;
930 bitmap_iterator bi;
931 unsigned int i;
932 bool changed;
933
934 /* Step 1: Propagate unknowns into expressions. */
935 bitmap reexamine = BITMAP_ALLOC (NULL);
936 bitmap_copy (reexamine, osi->reexamine);
937 do
938 {
939 changed = false;
940 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
941 {
942 object_size cur = object_sizes_get_raw (osi, i);
943
944 if (size_unknown_p (propagate_unknowns (osi, cur.size),
945 object_size_type)
946 || size_unknown_p (propagate_unknowns (osi, cur.wholesize),
947 object_size_type))
948 {
949 object_sizes_set (osi, i,
950 size_unknown (object_size_type),
951 size_unknown (object_size_type));
952 changed = true;
953 }
954 }
955 bitmap_copy (reexamine, osi->reexamine);
956 }
957 while (changed);
958
959 /* Release all unknowns. */
960 EXECUTE_IF_SET_IN_BITMAP (osi->unknowns, 0, i, bi)
961 release_ssa_name (ssa_name (i));
962
963 /* Expand all size expressions to put their definitions close to the objects
964 for which size is being computed. */
965 EXECUTE_IF_SET_IN_BITMAP (osi->reexamine, 0, i, bi)
966 {
967 gimple_seq seq = NULL;
968 object_size osize = object_sizes_get_raw (osi, i);
969
970 gimple *stmt = SSA_NAME_DEF_STMT (ssa_name (i));
971 enum gimple_code code = gimple_code (stmt);
972
973 /* PHI nodes need special attention. */
974 if (code == GIMPLE_PHI)
975 emit_phi_nodes (stmt, osize.size, osize.wholesize);
976 else
977 {
978 tree size_expr = NULL_TREE;
979
980 /* Bundle wholesize in with the size to gimplify if needed. */
981 if (osize.wholesize != osize.size
982 && !size_usable_p (osize.wholesize))
983 size_expr = size_binop (COMPOUND_EXPR,
984 osize.wholesize,
985 osize.size);
986 else if (!size_usable_p (osize.size))
987 size_expr = osize.size;
988
989 if (size_expr)
990 {
991 gimple_stmt_iterator gsi;
992 if (code == GIMPLE_NOP)
993 gsi = gsi_start_bb (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
994 else
995 gsi = gsi_for_stmt (stmt);
996
997 force_gimple_operand (size_expr, &seq, true, NULL);
998 gsi_insert_seq_before (&gsi, seq, GSI_CONTINUE_LINKING);
999 }
1000 }
1001
1002 /* We're done, so replace the MODIFY_EXPRs with the SSA names. */
1003 object_sizes_initialize (osi, i,
1004 object_sizes_get (osi, i),
1005 object_sizes_get (osi, i, true));
1006 }
1007 }
1008
1009 /* Compute __builtin_object_size value for PTR and set *PSIZE to
1010 the resulting value. If the declared object is known and PDECL
1011 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
1012 is the second argument to __builtin_object_size.
1013 Returns true on success and false when the object size could not
1014 be determined. */
1015
1016 bool
1017 compute_builtin_object_size (tree ptr, int object_size_type,
1018 tree *psize)
1019 {
1020 gcc_assert (object_size_type >= 0 && object_size_type < OST_END);
1021
1022 /* Set to unknown and overwrite just before returning if the size
1023 could be determined. */
1024 *psize = size_unknown (object_size_type);
1025
1026 if (! offset_limit)
1027 init_offset_limit ();
1028
1029 if (TREE_CODE (ptr) == ADDR_EXPR)
1030 return addr_object_size (NULL, ptr, object_size_type, psize);
1031
1032 if (TREE_CODE (ptr) != SSA_NAME
1033 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
1034 return false;
1035
1036 if (computed[object_size_type] == NULL)
1037 {
1038 if (optimize || object_size_type & OST_SUBOBJECT)
1039 return false;
1040
1041 /* When not optimizing, rather than failing, make a small effort
1042 to determine the object size without the full benefit of
1043 the (costly) computation below. */
1044 gimple *def = SSA_NAME_DEF_STMT (ptr);
1045 if (gimple_code (def) == GIMPLE_ASSIGN)
1046 {
1047 tree_code code = gimple_assign_rhs_code (def);
1048 if (code == POINTER_PLUS_EXPR)
1049 {
1050 tree offset = gimple_assign_rhs2 (def);
1051 ptr = gimple_assign_rhs1 (def);
1052
1053 if (((object_size_type & OST_DYNAMIC)
1054 || (tree_fits_shwi_p (offset)
1055 && compare_tree_int (offset, offset_limit) <= 0))
1056 && compute_builtin_object_size (ptr, object_size_type,
1057 psize))
1058 {
1059 *psize = size_for_offset (*psize, offset);
1060 return true;
1061 }
1062 }
1063 }
1064 return false;
1065 }
1066
1067 struct object_size_info osi;
1068 osi.object_size_type = object_size_type;
1069 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
1070 {
1071 bitmap_iterator bi;
1072 unsigned int i;
1073
1074 object_sizes_grow (object_size_type);
1075 if (dump_file)
1076 {
1077 fprintf (dump_file, "Computing %s %s%sobject size for ",
1078 (object_size_type & OST_MINIMUM) ? "minimum" : "maximum",
1079 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1080 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1081 print_generic_expr (dump_file, ptr, dump_flags);
1082 fprintf (dump_file, ":\n");
1083 }
1084
1085 osi.visited = BITMAP_ALLOC (NULL);
1086 osi.reexamine = BITMAP_ALLOC (NULL);
1087
1088 if (object_size_type & OST_DYNAMIC)
1089 osi.unknowns = BITMAP_ALLOC (NULL);
1090 else
1091 {
1092 osi.depths = NULL;
1093 osi.stack = NULL;
1094 osi.tos = NULL;
1095 }
1096
1097 /* First pass: walk UD chains, compute object sizes that
1098 can be computed. osi.reexamine bitmap at the end will
1099 contain what variables were found in dependency cycles
1100 and therefore need to be reexamined. */
1101 osi.pass = 0;
1102 osi.changed = false;
1103 collect_object_sizes_for (&osi, ptr);
1104
1105 if (object_size_type & OST_DYNAMIC)
1106 {
1107 osi.pass = 1;
1108 gimplify_size_expressions (&osi);
1109 BITMAP_FREE (osi.unknowns);
1110 bitmap_clear (osi.reexamine);
1111 }
1112
1113 /* Second pass: keep recomputing object sizes of variables
1114 that need reexamination, until no object sizes are
1115 increased or all object sizes are computed. */
1116 if (! bitmap_empty_p (osi.reexamine))
1117 {
1118 bitmap reexamine = BITMAP_ALLOC (NULL);
1119
1120 /* If looking for minimum instead of maximum object size,
1121 detect cases where a pointer is increased in a loop.
1122 Although even without this detection pass 2 would eventually
1123 terminate, it could take a long time. If a pointer is
1124 increasing this way, we need to assume 0 object size.
1125 E.g. p = &buf[0]; while (cond) p = p + 4; */
1126 if (object_size_type & OST_MINIMUM)
1127 {
1128 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
1129 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
1130 osi.tos = osi.stack;
1131 osi.pass = 1;
1132 /* collect_object_sizes_for is changing
1133 osi.reexamine bitmap, so iterate over a copy. */
1134 bitmap_copy (reexamine, osi.reexamine);
1135 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1136 if (bitmap_bit_p (osi.reexamine, i))
1137 check_for_plus_in_loops (&osi, ssa_name (i));
1138
1139 free (osi.depths);
1140 osi.depths = NULL;
1141 free (osi.stack);
1142 osi.stack = NULL;
1143 osi.tos = NULL;
1144 }
1145
1146 do
1147 {
1148 osi.pass = 2;
1149 osi.changed = false;
1150 /* collect_object_sizes_for is changing
1151 osi.reexamine bitmap, so iterate over a copy. */
1152 bitmap_copy (reexamine, osi.reexamine);
1153 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
1154 if (bitmap_bit_p (osi.reexamine, i))
1155 {
1156 collect_object_sizes_for (&osi, ssa_name (i));
1157 if (dump_file && (dump_flags & TDF_DETAILS))
1158 {
1159 fprintf (dump_file, "Reexamining ");
1160 print_generic_expr (dump_file, ssa_name (i),
1161 dump_flags);
1162 fprintf (dump_file, "\n");
1163 }
1164 }
1165 }
1166 while (osi.changed);
1167
1168 BITMAP_FREE (reexamine);
1169 }
1170 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
1171 bitmap_set_bit (computed[object_size_type], i);
1172
1173 /* Debugging dumps. */
1174 if (dump_file)
1175 {
1176 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
1177 if (!object_sizes_unknown_p (object_size_type, i))
1178 {
1179 print_generic_expr (dump_file, ssa_name (i),
1180 dump_flags);
1181 fprintf (dump_file,
1182 ": %s %s%sobject size ",
1183 ((object_size_type & OST_MINIMUM) ? "minimum"
1184 : "maximum"),
1185 (object_size_type & OST_DYNAMIC) ? "dynamic " : "",
1186 (object_size_type & OST_SUBOBJECT) ? "sub" : "");
1187 print_generic_expr (dump_file, object_sizes_get (&osi, i),
1188 dump_flags);
1189 fprintf (dump_file, "\n");
1190 }
1191 }
1192
1193 BITMAP_FREE (osi.reexamine);
1194 BITMAP_FREE (osi.visited);
1195 }
1196
1197 *psize = object_sizes_get (&osi, SSA_NAME_VERSION (ptr));
1198 return !size_unknown_p (*psize, object_size_type);
1199 }
1200
1201 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
1202
1203 static void
1204 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
1205 {
1206 int object_size_type = osi->object_size_type;
1207 unsigned int varno = SSA_NAME_VERSION (ptr);
1208 tree bytes, wholesize;
1209
1210 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1211 gcc_assert (osi->pass == 0);
1212
1213 if (TREE_CODE (value) == WITH_SIZE_EXPR)
1214 value = TREE_OPERAND (value, 0);
1215
1216 /* Pointer variables should have been handled by merge_object_sizes. */
1217 gcc_assert (TREE_CODE (value) != SSA_NAME
1218 || !POINTER_TYPE_P (TREE_TYPE (value)));
1219
1220 if (TREE_CODE (value) == ADDR_EXPR)
1221 addr_object_size (osi, value, object_size_type, &bytes, &wholesize);
1222 else
1223 bytes = wholesize = size_unknown (object_size_type);
1224
1225 object_sizes_set (osi, varno, bytes, wholesize);
1226 }
1227
1228
1229 /* Compute object_sizes for PTR, defined to the result of a call. */
1230
1231 static void
1232 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
1233 {
1234 int object_size_type = osi->object_size_type;
1235 unsigned int varno = SSA_NAME_VERSION (ptr);
1236
1237 gcc_assert (is_gimple_call (call));
1238
1239 gcc_assert (!object_sizes_unknown_p (object_size_type, varno));
1240 gcc_assert (osi->pass == 0);
1241 tree bytes = alloc_object_size (call, object_size_type);
1242
1243 if (!size_valid_p (bytes, object_size_type))
1244 bytes = size_unknown (object_size_type);
1245
1246 object_sizes_set (osi, varno, bytes, bytes);
1247 }
1248
1249
1250 /* Compute object_sizes for PTR, defined to an unknown value. */
1251
1252 static void
1253 unknown_object_size (struct object_size_info *osi, tree ptr)
1254 {
1255 int object_size_type = osi->object_size_type;
1256 unsigned int varno = SSA_NAME_VERSION (ptr);
1257
1258 gcc_checking_assert (!object_sizes_unknown_p (object_size_type, varno));
1259 gcc_checking_assert (osi->pass == 0);
1260 tree bytes = size_unknown (object_size_type);
1261
1262 object_sizes_set (osi, varno, bytes, bytes);
1263 }
1264
1265
1266 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
1267 the object size might need reexamination later. */
1268
1269 static bool
1270 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig)
1271 {
1272 int object_size_type = osi->object_size_type;
1273 unsigned int varno = SSA_NAME_VERSION (dest);
1274 tree orig_bytes, wholesize;
1275
1276 if (object_sizes_unknown_p (object_size_type, varno))
1277 return false;
1278
1279 if (osi->pass == 0)
1280 collect_object_sizes_for (osi, orig);
1281
1282 orig_bytes = object_sizes_get (osi, SSA_NAME_VERSION (orig));
1283 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (orig), true);
1284
1285 if (object_sizes_set (osi, varno, orig_bytes, wholesize))
1286 osi->changed = true;
1287
1288 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
1289 }
1290
1291
1292 /* Compute object_sizes for VAR, defined to the result of an assignment
1293 with operator POINTER_PLUS_EXPR. Return true if the object size might
1294 need reexamination later. */
1295
1296 static bool
1297 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1298 {
1299 int object_size_type = osi->object_size_type;
1300 unsigned int varno = SSA_NAME_VERSION (var);
1301 tree bytes, wholesize;
1302 tree op0, op1;
1303 bool reexamine = false;
1304
1305 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1306 {
1307 op0 = gimple_assign_rhs1 (stmt);
1308 op1 = gimple_assign_rhs2 (stmt);
1309 }
1310 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
1311 {
1312 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
1313 gcc_assert (TREE_CODE (rhs) == MEM_REF);
1314 op0 = TREE_OPERAND (rhs, 0);
1315 op1 = TREE_OPERAND (rhs, 1);
1316 }
1317 else
1318 gcc_unreachable ();
1319
1320 if (object_sizes_unknown_p (object_size_type, varno))
1321 return false;
1322
1323 /* Handle PTR + OFFSET here. */
1324 if (size_valid_p (op1, object_size_type)
1325 && (TREE_CODE (op0) == SSA_NAME || TREE_CODE (op0) == ADDR_EXPR))
1326 {
1327 if (TREE_CODE (op0) == SSA_NAME)
1328 {
1329 if (osi->pass == 0)
1330 collect_object_sizes_for (osi, op0);
1331
1332 bytes = object_sizes_get (osi, SSA_NAME_VERSION (op0));
1333 wholesize = object_sizes_get (osi, SSA_NAME_VERSION (op0), true);
1334 reexamine = bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (op0));
1335 }
1336 else
1337 {
1338 /* op0 will be ADDR_EXPR here. We should never come here during
1339 reexamination. */
1340 gcc_checking_assert (osi->pass == 0);
1341 addr_object_size (osi, op0, object_size_type, &bytes, &wholesize);
1342 }
1343
1344 /* size_for_offset doesn't make sense for -1 size, but it does for size 0
1345 since the wholesize could be non-zero and a negative offset could give
1346 a non-zero size. */
1347 if (size_unknown_p (bytes, 0))
1348 ;
1349 else if ((object_size_type & OST_DYNAMIC)
1350 || compare_tree_int (op1, offset_limit) <= 0)
1351 bytes = size_for_offset (bytes, op1, wholesize);
1352 /* In the static case, with a negative offset, the best estimate for
1353 minimum size is size_unknown but for maximum size, the wholesize is a
1354 better estimate than size_unknown. */
1355 else if (object_size_type & OST_MINIMUM)
1356 bytes = size_unknown (object_size_type);
1357 else
1358 bytes = wholesize;
1359 }
1360 else
1361 bytes = wholesize = size_unknown (object_size_type);
1362
1363 if (!size_valid_p (bytes, object_size_type)
1364 || !size_valid_p (wholesize, object_size_type))
1365 bytes = wholesize = size_unknown (object_size_type);
1366
1367 if (object_sizes_set (osi, varno, bytes, wholesize))
1368 osi->changed = true;
1369 return reexamine;
1370 }
1371
1372 /* Compute the dynamic object size for VAR. Return the result in SIZE and
1373 WHOLESIZE. */
1374
1375 static void
1376 dynamic_object_size (struct object_size_info *osi, tree var,
1377 tree *size, tree *wholesize)
1378 {
1379 int object_size_type = osi->object_size_type;
1380
1381 if (TREE_CODE (var) == SSA_NAME)
1382 {
1383 unsigned varno = SSA_NAME_VERSION (var);
1384
1385 collect_object_sizes_for (osi, var);
1386 *size = object_sizes_get (osi, varno);
1387 *wholesize = object_sizes_get (osi, varno, true);
1388 }
1389 else if (TREE_CODE (var) == ADDR_EXPR)
1390 addr_object_size (osi, var, object_size_type, size, wholesize);
1391 else
1392 *size = *wholesize = size_unknown (object_size_type);
1393 }
1394
1395 /* Compute object_sizes for VAR, defined at STMT, which is
1396 a COND_EXPR. Return true if the object size might need reexamination
1397 later. */
1398
1399 static bool
1400 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
1401 {
1402 tree then_, else_;
1403 int object_size_type = osi->object_size_type;
1404 unsigned int varno = SSA_NAME_VERSION (var);
1405 bool reexamine = false;
1406
1407 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
1408
1409 if (object_sizes_unknown_p (object_size_type, varno))
1410 return false;
1411
1412 then_ = gimple_assign_rhs2 (stmt);
1413 else_ = gimple_assign_rhs3 (stmt);
1414
1415 if (object_size_type & OST_DYNAMIC)
1416 {
1417 tree then_size, then_wholesize, else_size, else_wholesize;
1418
1419 dynamic_object_size (osi, then_, &then_size, &then_wholesize);
1420 if (!size_unknown_p (then_size, object_size_type))
1421 dynamic_object_size (osi, else_, &else_size, &else_wholesize);
1422
1423 tree cond_size, cond_wholesize;
1424 if (size_unknown_p (then_size, object_size_type)
1425 || size_unknown_p (else_size, object_size_type))
1426 cond_size = cond_wholesize = size_unknown (object_size_type);
1427 else
1428 {
1429 cond_size = fold_build3 (COND_EXPR, sizetype,
1430 gimple_assign_rhs1 (stmt),
1431 then_size, else_size);
1432 cond_wholesize = fold_build3 (COND_EXPR, sizetype,
1433 gimple_assign_rhs1 (stmt),
1434 then_wholesize, else_wholesize);
1435 }
1436
1437 object_sizes_set (osi, varno, cond_size, cond_wholesize);
1438
1439 return false;
1440 }
1441
1442 if (TREE_CODE (then_) == SSA_NAME)
1443 reexamine |= merge_object_sizes (osi, var, then_);
1444 else
1445 expr_object_size (osi, var, then_);
1446
1447 if (object_sizes_unknown_p (object_size_type, varno))
1448 return reexamine;
1449
1450 if (TREE_CODE (else_) == SSA_NAME)
1451 reexamine |= merge_object_sizes (osi, var, else_);
1452 else
1453 expr_object_size (osi, var, else_);
1454
1455 return reexamine;
1456 }
1457
1458 /* Find size of an object passed as a parameter to the function. */
1459
1460 static void
1461 parm_object_size (struct object_size_info *osi, tree var)
1462 {
1463 int object_size_type = osi->object_size_type;
1464 tree parm = SSA_NAME_VAR (var);
1465
1466 if (!(object_size_type & OST_DYNAMIC) || !POINTER_TYPE_P (TREE_TYPE (parm)))
1467 {
1468 expr_object_size (osi, var, parm);
1469 return;
1470 }
1471
1472 /* Look for access attribute. */
1473 rdwr_map rdwr_idx;
1474
1475 tree fndecl = cfun->decl;
1476 const attr_access *access = get_parm_access (rdwr_idx, parm, fndecl);
1477 tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (parm)));
1478 tree sz = NULL_TREE;
1479
1480 /* If we have an explicit access attribute with a usable size argument... */
1481 if (access && access->sizarg != UINT_MAX && !access->internal_p
1482 /* ... and either PARM is void * or has a type that is complete and has a
1483 constant size... */
1484 && ((typesize && poly_int_tree_p (typesize))
1485 || (!typesize && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))))
1486 {
1487 tree fnargs = DECL_ARGUMENTS (fndecl);
1488 tree arg = NULL_TREE;
1489 unsigned argpos = 0;
1490
1491 /* ... then walk through the parameters to pick the size parameter and
1492 safely scale it by the type size if needed. */
1493 for (arg = fnargs; arg; arg = TREE_CHAIN (arg), ++argpos)
1494 if (argpos == access->sizarg && INTEGRAL_TYPE_P (TREE_TYPE (arg)))
1495 {
1496 sz = get_or_create_ssa_default_def (cfun, arg);
1497 if (sz != NULL_TREE)
1498 {
1499 sz = fold_convert (sizetype, sz);
1500 if (typesize)
1501 sz = size_binop (MULT_EXPR, sz, typesize);
1502 }
1503 break;
1504 }
1505 }
1506 if (!sz)
1507 sz = size_unknown (object_size_type);
1508
1509 object_sizes_set (osi, SSA_NAME_VERSION (var), sz, sz);
1510 }
1511
1512 /* Compute an object size expression for VAR, which is the result of a PHI
1513 node. */
1514
1515 static void
1516 phi_dynamic_object_size (struct object_size_info *osi, tree var)
1517 {
1518 int object_size_type = osi->object_size_type;
1519 unsigned int varno = SSA_NAME_VERSION (var);
1520 gimple *stmt = SSA_NAME_DEF_STMT (var);
1521 unsigned i, num_args = gimple_phi_num_args (stmt);
1522 bool wholesize_needed = false;
1523
1524 /* The extra space is for the PHI result at the end, which object_sizes_set
1525 sets for us. */
1526 tree sizes = make_tree_vec (num_args + 1);
1527 tree wholesizes = make_tree_vec (num_args + 1);
1528
1529 /* Bail out if the size of any of the PHI arguments cannot be
1530 determined. */
1531 for (i = 0; i < num_args; i++)
1532 {
1533 edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), i);
1534 if (e->flags & EDGE_COMPLEX)
1535 break;
1536
1537 tree rhs = gimple_phi_arg_def (stmt, i);
1538 tree size, wholesize;
1539
1540 dynamic_object_size (osi, rhs, &size, &wholesize);
1541
1542 if (size_unknown_p (size, object_size_type))
1543 break;
1544
1545 if (size != wholesize)
1546 wholesize_needed = true;
1547
1548 TREE_VEC_ELT (sizes, i) = size;
1549 TREE_VEC_ELT (wholesizes, i) = wholesize;
1550 }
1551
1552 if (i < num_args)
1553 {
1554 ggc_free (sizes);
1555 ggc_free (wholesizes);
1556 sizes = wholesizes = size_unknown (object_size_type);
1557 }
1558
1559 /* Point to the same TREE_VEC so that we can avoid emitting two PHI
1560 nodes. */
1561 else if (!wholesize_needed)
1562 {
1563 ggc_free (wholesizes);
1564 wholesizes = sizes;
1565 }
1566
1567 object_sizes_set (osi, varno, sizes, wholesizes);
1568 }
1569
1570 /* Compute object sizes for VAR.
1571 For ADDR_EXPR an object size is the number of remaining bytes
1572 to the end of the object (where what is considered an object depends on
1573 OSI->object_size_type).
1574 For allocation GIMPLE_CALL like malloc or calloc object size is the size
1575 of the allocation.
1576 For POINTER_PLUS_EXPR where second operand is a constant integer,
1577 object size is object size of the first operand minus the constant.
1578 If the constant is bigger than the number of remaining bytes until the
1579 end of the object, object size is 0, but if it is instead a pointer
1580 subtraction, object size is size_unknown (object_size_type).
1581 To differentiate addition from subtraction, ADDR_EXPR returns
1582 size_unknown (object_size_type) for all objects bigger than half of the
1583 address space, and constants less than half of the address space are
1584 considered addition, while bigger constants subtraction.
1585 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
1586 object size is object size of that argument.
1587 Otherwise, object size is the maximum of object sizes of variables
1588 that it might be set to. */
1589
1590 static void
1591 collect_object_sizes_for (struct object_size_info *osi, tree var)
1592 {
1593 int object_size_type = osi->object_size_type;
1594 unsigned int varno = SSA_NAME_VERSION (var);
1595 gimple *stmt;
1596 bool reexamine;
1597
1598 if (bitmap_bit_p (computed[object_size_type], varno))
1599 return;
1600
1601 if (osi->pass == 0)
1602 {
1603 if (bitmap_set_bit (osi->visited, varno))
1604 {
1605 /* Initialize to 0 for maximum size and M1U for minimum size so that
1606 it gets immediately overridden. */
1607 object_sizes_initialize (osi, varno,
1608 size_initval (object_size_type),
1609 size_initval (object_size_type));
1610 }
1611 else
1612 {
1613 /* Found a dependency loop. Mark the variable for later
1614 re-examination. */
1615 if (object_size_type & OST_DYNAMIC)
1616 object_sizes_set_temp (osi, varno);
1617
1618 bitmap_set_bit (osi->reexamine, varno);
1619 if (dump_file && (dump_flags & TDF_DETAILS))
1620 {
1621 fprintf (dump_file, "Found a dependency loop at ");
1622 print_generic_expr (dump_file, var, dump_flags);
1623 fprintf (dump_file, "\n");
1624 }
1625 return;
1626 }
1627 }
1628
1629 if (dump_file && (dump_flags & TDF_DETAILS))
1630 {
1631 fprintf (dump_file, "Visiting use-def links for ");
1632 print_generic_expr (dump_file, var, dump_flags);
1633 fprintf (dump_file, "\n");
1634 }
1635
1636 stmt = SSA_NAME_DEF_STMT (var);
1637 reexamine = false;
1638
1639 switch (gimple_code (stmt))
1640 {
1641 case GIMPLE_ASSIGN:
1642 {
1643 tree rhs = gimple_assign_rhs1 (stmt);
1644 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1645 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1646 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1647 reexamine = plus_stmt_object_size (osi, var, stmt);
1648 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1649 reexamine = cond_expr_object_size (osi, var, stmt);
1650 else if (gimple_assign_single_p (stmt)
1651 || gimple_assign_unary_nop_p (stmt))
1652 {
1653 if (TREE_CODE (rhs) == SSA_NAME
1654 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1655 reexamine = merge_object_sizes (osi, var, rhs);
1656 else
1657 expr_object_size (osi, var, rhs);
1658 }
1659 else
1660 unknown_object_size (osi, var);
1661 break;
1662 }
1663
1664 case GIMPLE_CALL:
1665 {
1666 gcall *call_stmt = as_a <gcall *> (stmt);
1667 tree arg = pass_through_call (call_stmt);
1668 if (arg)
1669 {
1670 if (TREE_CODE (arg) == SSA_NAME
1671 && POINTER_TYPE_P (TREE_TYPE (arg)))
1672 reexamine = merge_object_sizes (osi, var, arg);
1673 else
1674 expr_object_size (osi, var, arg);
1675 }
1676 else
1677 call_object_size (osi, var, call_stmt);
1678 break;
1679 }
1680
1681 case GIMPLE_ASM:
1682 /* Pointers defined by __asm__ statements can point anywhere. */
1683 unknown_object_size (osi, var);
1684 break;
1685
1686 case GIMPLE_NOP:
1687 if (SSA_NAME_VAR (var)
1688 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1689 parm_object_size (osi, var);
1690 else
1691 /* Uninitialized SSA names point nowhere. */
1692 unknown_object_size (osi, var);
1693 break;
1694
1695 case GIMPLE_PHI:
1696 {
1697 unsigned i;
1698
1699 if (object_size_type & OST_DYNAMIC)
1700 {
1701 phi_dynamic_object_size (osi, var);
1702 break;
1703 }
1704
1705 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1706 {
1707 tree rhs = gimple_phi_arg (stmt, i)->def;
1708
1709 if (object_sizes_unknown_p (object_size_type, varno))
1710 break;
1711
1712 if (TREE_CODE (rhs) == SSA_NAME)
1713 reexamine |= merge_object_sizes (osi, var, rhs);
1714 else if (osi->pass == 0)
1715 expr_object_size (osi, var, rhs);
1716 }
1717 break;
1718 }
1719
1720 default:
1721 gcc_unreachable ();
1722 }
1723
1724 if (! reexamine || object_sizes_unknown_p (object_size_type, varno))
1725 {
1726 bitmap_set_bit (computed[object_size_type], varno);
1727 if (!(object_size_type & OST_DYNAMIC))
1728 bitmap_clear_bit (osi->reexamine, varno);
1729 }
1730 else
1731 {
1732 bitmap_set_bit (osi->reexamine, varno);
1733 if (dump_file && (dump_flags & TDF_DETAILS))
1734 {
1735 fprintf (dump_file, "Need to reexamine ");
1736 print_generic_expr (dump_file, var, dump_flags);
1737 fprintf (dump_file, "\n");
1738 }
1739 }
1740 }
1741
1742
1743 /* Helper function for check_for_plus_in_loops. Called recursively
1744 to detect loops. */
1745
1746 static void
1747 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1748 unsigned int depth)
1749 {
1750 gimple *stmt = SSA_NAME_DEF_STMT (var);
1751 unsigned int varno = SSA_NAME_VERSION (var);
1752
1753 if (osi->depths[varno])
1754 {
1755 if (osi->depths[varno] != depth)
1756 {
1757 unsigned int *sp;
1758
1759 /* Found a loop involving pointer addition. */
1760 for (sp = osi->tos; sp > osi->stack; )
1761 {
1762 --sp;
1763 bitmap_clear_bit (osi->reexamine, *sp);
1764 bitmap_set_bit (computed[osi->object_size_type], *sp);
1765 object_sizes_set (osi, *sp, size_zero_node,
1766 object_sizes_get (osi, *sp, true));
1767 if (*sp == varno)
1768 break;
1769 }
1770 }
1771 return;
1772 }
1773 else if (! bitmap_bit_p (osi->reexamine, varno))
1774 return;
1775
1776 osi->depths[varno] = depth;
1777 *osi->tos++ = varno;
1778
1779 switch (gimple_code (stmt))
1780 {
1781
1782 case GIMPLE_ASSIGN:
1783 {
1784 if ((gimple_assign_single_p (stmt)
1785 || gimple_assign_unary_nop_p (stmt))
1786 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1787 {
1788 tree rhs = gimple_assign_rhs1 (stmt);
1789
1790 check_for_plus_in_loops_1 (osi, rhs, depth);
1791 }
1792 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1793 {
1794 tree basevar = gimple_assign_rhs1 (stmt);
1795 tree cst = gimple_assign_rhs2 (stmt);
1796
1797 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1798
1799 check_for_plus_in_loops_1 (osi, basevar,
1800 depth + !integer_zerop (cst));
1801 }
1802 else
1803 gcc_unreachable ();
1804 break;
1805 }
1806
1807 case GIMPLE_CALL:
1808 {
1809 gcall *call_stmt = as_a <gcall *> (stmt);
1810 tree arg = pass_through_call (call_stmt);
1811 if (arg)
1812 {
1813 if (TREE_CODE (arg) == SSA_NAME)
1814 check_for_plus_in_loops_1 (osi, arg, depth);
1815 else
1816 gcc_unreachable ();
1817 }
1818 break;
1819 }
1820
1821 case GIMPLE_PHI:
1822 {
1823 unsigned i;
1824
1825 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1826 {
1827 tree rhs = gimple_phi_arg (stmt, i)->def;
1828
1829 if (TREE_CODE (rhs) == SSA_NAME)
1830 check_for_plus_in_loops_1 (osi, rhs, depth);
1831 }
1832 break;
1833 }
1834
1835 default:
1836 gcc_unreachable ();
1837 }
1838
1839 osi->depths[varno] = 0;
1840 osi->tos--;
1841 }
1842
1843
1844 /* Check if some pointer we are computing object size of is being increased
1845 within a loop. If yes, assume all the SSA variables participating in
1846 that loop have minimum object sizes 0. */
1847
1848 static void
1849 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1850 {
1851 gimple *stmt = SSA_NAME_DEF_STMT (var);
1852
1853 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1854 and looked for a POINTER_PLUS_EXPR in the pass-through
1855 argument, if any. In GIMPLE, however, such an expression
1856 is not a valid call operand. */
1857
1858 if (is_gimple_assign (stmt)
1859 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1860 {
1861 tree basevar = gimple_assign_rhs1 (stmt);
1862 tree cst = gimple_assign_rhs2 (stmt);
1863
1864 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1865
1866 /* Skip non-positive offsets. */
1867 if (integer_zerop (cst) || compare_tree_int (cst, offset_limit) > 0)
1868 return;
1869
1870 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1871 *osi->tos++ = SSA_NAME_VERSION (basevar);
1872 check_for_plus_in_loops_1 (osi, var, 2);
1873 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1874 osi->tos--;
1875 }
1876 }
1877
1878
1879 /* Initialize data structures for the object size computation. */
1880
1881 void
1882 init_object_sizes (void)
1883 {
1884 int object_size_type;
1885
1886 if (computed[0])
1887 return;
1888
1889 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1890 {
1891 object_sizes_grow (object_size_type);
1892 computed[object_size_type] = BITMAP_ALLOC (NULL);
1893 }
1894
1895 init_offset_limit ();
1896 }
1897
1898
1899 /* Destroy data structures after the object size computation. */
1900
1901 void
1902 fini_object_sizes (void)
1903 {
1904 int object_size_type;
1905
1906 for (object_size_type = 0; object_size_type < OST_END; object_size_type++)
1907 {
1908 object_sizes_release (object_size_type);
1909 BITMAP_FREE (computed[object_size_type]);
1910 }
1911 }
1912
1913 /* Dummy valueize function. */
1914
1915 static tree
1916 do_valueize (tree t)
1917 {
1918 return t;
1919 }
1920
1921 /* Process a __builtin_object_size or __builtin_dynamic_object_size call in
1922 CALL early for subobjects before any object information is lost due to
1923 optimization. Insert a MIN or MAX expression of the result and
1924 __builtin_object_size at I so that it may be processed in the second pass.
1925 __builtin_dynamic_object_size is treated like __builtin_object_size here
1926 since we're only looking for constant bounds. */
1927
1928 static void
1929 early_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
1930 {
1931 tree ost = gimple_call_arg (call, 1);
1932 tree lhs = gimple_call_lhs (call);
1933 gcc_assert (lhs != NULL_TREE);
1934
1935 if (!tree_fits_uhwi_p (ost))
1936 return;
1937
1938 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1939 tree ptr = gimple_call_arg (call, 0);
1940
1941 if (object_size_type != 1 && object_size_type != 3)
1942 return;
1943
1944 if (TREE_CODE (ptr) != ADDR_EXPR && TREE_CODE (ptr) != SSA_NAME)
1945 return;
1946
1947 tree type = TREE_TYPE (lhs);
1948 tree bytes;
1949 if (!compute_builtin_object_size (ptr, object_size_type, &bytes)
1950 || !int_fits_type_p (bytes, type))
1951 return;
1952
1953 tree tem = make_ssa_name (type);
1954 gimple_call_set_lhs (call, tem);
1955 enum tree_code code = object_size_type & OST_MINIMUM ? MAX_EXPR : MIN_EXPR;
1956 tree cst = fold_convert (type, bytes);
1957 gimple *g = gimple_build_assign (lhs, code, tem, cst);
1958 gsi_insert_after (i, g, GSI_NEW_STMT);
1959 update_stmt (call);
1960 }
1961
1962 /* Attempt to fold one __builtin_dynamic_object_size call in CALL into an
1963 expression and insert it at I. Return true if it succeeds. */
1964
1965 static bool
1966 dynamic_object_sizes_execute_one (gimple_stmt_iterator *i, gimple *call)
1967 {
1968 gcc_assert (gimple_call_num_args (call) == 2);
1969
1970 tree args[2];
1971 args[0] = gimple_call_arg (call, 0);
1972 args[1] = gimple_call_arg (call, 1);
1973
1974 location_t loc = EXPR_LOC_OR_LOC (args[0], input_location);
1975 tree result_type = gimple_call_return_type (as_a <gcall *> (call));
1976 tree result = fold_builtin_call_array (loc, result_type,
1977 gimple_call_fn (call), 2, args);
1978
1979 if (!result)
1980 return false;
1981
1982 /* fold_builtin_call_array may wrap the result inside a
1983 NOP_EXPR. */
1984 STRIP_NOPS (result);
1985 gimplify_and_update_call_from_tree (i, result);
1986
1987 if (dump_file && (dump_flags & TDF_DETAILS))
1988 {
1989 fprintf (dump_file, "Simplified (dynamic)\n ");
1990 print_gimple_stmt (dump_file, call, 0, dump_flags);
1991 fprintf (dump_file, " to ");
1992 print_generic_expr (dump_file, result);
1993 fprintf (dump_file, "\n");
1994 }
1995 return true;
1996 }
1997
1998 static unsigned int
1999 object_sizes_execute (function *fun, bool early)
2000 {
2001 basic_block bb;
2002 FOR_EACH_BB_FN (bb, fun)
2003 {
2004 gimple_stmt_iterator i;
2005 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
2006 {
2007 tree result;
2008 bool dynamic = false;
2009
2010 gimple *call = gsi_stmt (i);
2011 if (gimple_call_builtin_p (call, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2012 dynamic = true;
2013 else if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
2014 continue;
2015
2016 tree lhs = gimple_call_lhs (call);
2017 if (!lhs)
2018 continue;
2019
2020 init_object_sizes ();
2021
2022 /* If early, only attempt to fold
2023 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
2024 and rather than folding the builtin to the constant if any,
2025 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
2026 call result and the computed constant. Do the same for
2027 __builtin_dynamic_object_size too. */
2028 if (early)
2029 {
2030 early_object_sizes_execute_one (&i, call);
2031 continue;
2032 }
2033
2034 if (dynamic)
2035 {
2036 if (dynamic_object_sizes_execute_one (&i, call))
2037 continue;
2038 else
2039 {
2040 /* If we could not find a suitable size expression, lower to
2041 __builtin_object_size so that we may at least get a
2042 constant lower or higher estimate. */
2043 tree bosfn = builtin_decl_implicit (BUILT_IN_OBJECT_SIZE);
2044 gimple_call_set_fndecl (call, bosfn);
2045 update_stmt (call);
2046
2047 if (dump_file && (dump_flags & TDF_DETAILS))
2048 {
2049 print_generic_expr (dump_file, gimple_call_arg (call, 0),
2050 dump_flags);
2051 fprintf (dump_file,
2052 ": Retrying as __builtin_object_size\n");
2053 }
2054 }
2055 }
2056
2057 result = gimple_fold_stmt_to_constant (call, do_valueize);
2058 if (!result)
2059 {
2060 tree ost = gimple_call_arg (call, 1);
2061
2062 if (tree_fits_uhwi_p (ost))
2063 {
2064 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
2065
2066 if (object_size_type & OST_MINIMUM)
2067 result = build_zero_cst (size_type_node);
2068 else if (object_size_type < OST_END)
2069 result = fold_convert (size_type_node,
2070 integer_minus_one_node);
2071 }
2072
2073 if (!result)
2074 continue;
2075 }
2076
2077 gcc_assert (TREE_CODE (result) == INTEGER_CST);
2078
2079 if (dump_file && (dump_flags & TDF_DETAILS))
2080 {
2081 fprintf (dump_file, "Simplified\n ");
2082 print_gimple_stmt (dump_file, call, 0, dump_flags);
2083 fprintf (dump_file, " to ");
2084 print_generic_expr (dump_file, result);
2085 fprintf (dump_file, "\n");
2086 }
2087
2088 /* Propagate into all uses and fold those stmts. */
2089 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2090 replace_uses_by (lhs, result);
2091 else
2092 replace_call_with_value (&i, result);
2093 }
2094 }
2095
2096 fini_object_sizes ();
2097 return 0;
2098 }
2099
2100 /* Simple pass to optimize all __builtin_object_size () builtins. */
2101
2102 namespace {
2103
2104 const pass_data pass_data_object_sizes =
2105 {
2106 GIMPLE_PASS, /* type */
2107 "objsz", /* name */
2108 OPTGROUP_NONE, /* optinfo_flags */
2109 TV_NONE, /* tv_id */
2110 ( PROP_cfg | PROP_ssa ), /* properties_required */
2111 PROP_objsz, /* properties_provided */
2112 0, /* properties_destroyed */
2113 0, /* todo_flags_start */
2114 0, /* todo_flags_finish */
2115 };
2116
2117 class pass_object_sizes : public gimple_opt_pass
2118 {
2119 public:
2120 pass_object_sizes (gcc::context *ctxt)
2121 : gimple_opt_pass (pass_data_object_sizes, ctxt)
2122 {}
2123
2124 /* opt_pass methods: */
2125 opt_pass * clone () final override { return new pass_object_sizes (m_ctxt); }
2126 unsigned int execute (function *fun) final override
2127 {
2128 return object_sizes_execute (fun, false);
2129 }
2130 }; // class pass_object_sizes
2131
2132 } // anon namespace
2133
2134 gimple_opt_pass *
2135 make_pass_object_sizes (gcc::context *ctxt)
2136 {
2137 return new pass_object_sizes (ctxt);
2138 }
2139
2140 /* Early version of pass to optimize all __builtin_object_size () builtins. */
2141
2142 namespace {
2143
2144 const pass_data pass_data_early_object_sizes =
2145 {
2146 GIMPLE_PASS, /* type */
2147 "early_objsz", /* name */
2148 OPTGROUP_NONE, /* optinfo_flags */
2149 TV_NONE, /* tv_id */
2150 ( PROP_cfg | PROP_ssa ), /* properties_required */
2151 0, /* properties_provided */
2152 0, /* properties_destroyed */
2153 0, /* todo_flags_start */
2154 0, /* todo_flags_finish */
2155 };
2156
2157 class pass_early_object_sizes : public gimple_opt_pass
2158 {
2159 public:
2160 pass_early_object_sizes (gcc::context *ctxt)
2161 : gimple_opt_pass (pass_data_early_object_sizes, ctxt)
2162 {}
2163
2164 /* opt_pass methods: */
2165 unsigned int execute (function *fun) final override
2166 {
2167 return object_sizes_execute (fun, true);
2168 }
2169 }; // class pass_object_sizes
2170
2171 } // anon namespace
2172
2173 gimple_opt_pass *
2174 make_pass_early_object_sizes (gcc::context *ctxt)
2175 {
2176 return new pass_early_object_sizes (ctxt);
2177 }