]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-object-size.c
2015-06-08 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "input.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "tree-object-size.h"
31 #include "diagnostic-core.h"
32 #include "gimple-pretty-print.h"
33 #include "bitmap.h"
34 #include "predict.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "dominance.h"
39 #include "cfg.h"
40 #include "basic-block.h"
41 #include "tree-ssa-alias.h"
42 #include "internal-fn.h"
43 #include "gimple-fold.h"
44 #include "gimple-expr.h"
45 #include "is-a.h"
46 #include "gimple.h"
47 #include "gimple-iterator.h"
48 #include "gimple-ssa.h"
49 #include "stringpool.h"
50 #include "tree-ssanames.h"
51 #include "tree-pass.h"
52 #include "tree-ssa-propagate.h"
53 #include "tree-phinodes.h"
54 #include "ssa-iterators.h"
55 #include "builtins.h"
56
57 struct object_size_info
58 {
59 int object_size_type;
60 bitmap visited, reexamine;
61 int pass;
62 bool changed;
63 unsigned int *depths;
64 unsigned int *stack, *tos;
65 };
66
67 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
68
69 static tree compute_object_offset (const_tree, const_tree);
70 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
71 const_tree, int);
72 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
73 static tree pass_through_call (const gcall *);
74 static void collect_object_sizes_for (struct object_size_info *, tree);
75 static void expr_object_size (struct object_size_info *, tree, tree);
76 static bool merge_object_sizes (struct object_size_info *, tree, tree,
77 unsigned HOST_WIDE_INT);
78 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
79 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
80 static void init_offset_limit (void);
81 static void check_for_plus_in_loops (struct object_size_info *, tree);
82 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
83 unsigned int);
84
85 /* object_sizes[0] is upper bound for number of bytes till the end of
86 the object.
87 object_sizes[1] is upper bound for number of bytes till the end of
88 the subobject (innermost array or field with address taken).
89 object_sizes[2] is lower bound for number of bytes till the end of
90 the object and object_sizes[3] lower bound for subobject. */
91 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
92
93 /* Bitmaps what object sizes have been computed already. */
94 static bitmap computed[4];
95
96 /* Maximum value of offset we consider to be addition. */
97 static unsigned HOST_WIDE_INT offset_limit;
98
99
100 /* Initialize OFFSET_LIMIT variable. */
101 static void
102 init_offset_limit (void)
103 {
104 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
105 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
106 else
107 offset_limit = -1;
108 offset_limit /= 2;
109 }
110
111
112 /* Compute offset of EXPR within VAR. Return error_mark_node
113 if unknown. */
114
115 static tree
116 compute_object_offset (const_tree expr, const_tree var)
117 {
118 enum tree_code code = PLUS_EXPR;
119 tree base, off, t;
120
121 if (expr == var)
122 return size_zero_node;
123
124 switch (TREE_CODE (expr))
125 {
126 case COMPONENT_REF:
127 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
128 if (base == error_mark_node)
129 return base;
130
131 t = TREE_OPERAND (expr, 1);
132 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
133 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
134 / BITS_PER_UNIT));
135 break;
136
137 case REALPART_EXPR:
138 CASE_CONVERT:
139 case VIEW_CONVERT_EXPR:
140 case NON_LVALUE_EXPR:
141 return compute_object_offset (TREE_OPERAND (expr, 0), var);
142
143 case IMAGPART_EXPR:
144 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
145 if (base == error_mark_node)
146 return base;
147
148 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
149 break;
150
151 case ARRAY_REF:
152 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
153 if (base == error_mark_node)
154 return base;
155
156 t = TREE_OPERAND (expr, 1);
157 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
158 {
159 code = MINUS_EXPR;
160 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
161 }
162 t = fold_convert (sizetype, t);
163 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
164 break;
165
166 case MEM_REF:
167 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
168 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
169
170 default:
171 return error_mark_node;
172 }
173
174 return size_binop (code, base, off);
175 }
176
177
178 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
179 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
180 If unknown, return unknown[object_size_type]. */
181
182 static unsigned HOST_WIDE_INT
183 addr_object_size (struct object_size_info *osi, const_tree ptr,
184 int object_size_type)
185 {
186 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
187
188 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
189
190 pt_var = TREE_OPERAND (ptr, 0);
191 while (handled_component_p (pt_var))
192 pt_var = TREE_OPERAND (pt_var, 0);
193
194 if (pt_var
195 && TREE_CODE (pt_var) == MEM_REF)
196 {
197 unsigned HOST_WIDE_INT sz;
198
199 if (!osi || (object_size_type & 1) != 0
200 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
201 {
202 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
203 object_size_type & ~1);
204 }
205 else
206 {
207 tree var = TREE_OPERAND (pt_var, 0);
208 if (osi->pass == 0)
209 collect_object_sizes_for (osi, var);
210 if (bitmap_bit_p (computed[object_size_type],
211 SSA_NAME_VERSION (var)))
212 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
213 else
214 sz = unknown[object_size_type];
215 }
216 if (sz != unknown[object_size_type])
217 {
218 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
219 if (wi::neg_p (dsz))
220 sz = 0;
221 else if (wi::fits_uhwi_p (dsz))
222 sz = dsz.to_uhwi ();
223 else
224 sz = unknown[object_size_type];
225 }
226
227 if (sz != unknown[object_size_type] && sz < offset_limit)
228 pt_var_size = size_int (sz);
229 }
230 else if (pt_var
231 && DECL_P (pt_var)
232 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
233 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
234 pt_var_size = DECL_SIZE_UNIT (pt_var);
235 else if (pt_var
236 && TREE_CODE (pt_var) == STRING_CST
237 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
238 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
239 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
240 < offset_limit)
241 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
242 else
243 return unknown[object_size_type];
244
245 if (pt_var != TREE_OPERAND (ptr, 0))
246 {
247 tree var;
248
249 if (object_size_type & 1)
250 {
251 var = TREE_OPERAND (ptr, 0);
252
253 while (var != pt_var
254 && TREE_CODE (var) != BIT_FIELD_REF
255 && TREE_CODE (var) != COMPONENT_REF
256 && TREE_CODE (var) != ARRAY_REF
257 && TREE_CODE (var) != ARRAY_RANGE_REF
258 && TREE_CODE (var) != REALPART_EXPR
259 && TREE_CODE (var) != IMAGPART_EXPR)
260 var = TREE_OPERAND (var, 0);
261 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
262 var = TREE_OPERAND (var, 0);
263 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
264 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
265 || (pt_var_size
266 && tree_int_cst_lt (pt_var_size,
267 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
268 var = pt_var;
269 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
270 {
271 tree v = var;
272 /* For &X->fld, compute object size only if fld isn't the last
273 field, as struct { int i; char c[1]; } is often used instead
274 of flexible array member. */
275 while (v && v != pt_var)
276 switch (TREE_CODE (v))
277 {
278 case ARRAY_REF:
279 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
280 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
281 {
282 tree domain
283 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
284 if (domain
285 && TYPE_MAX_VALUE (domain)
286 && TREE_CODE (TYPE_MAX_VALUE (domain))
287 == INTEGER_CST
288 && tree_int_cst_lt (TREE_OPERAND (v, 1),
289 TYPE_MAX_VALUE (domain)))
290 {
291 v = NULL_TREE;
292 break;
293 }
294 }
295 v = TREE_OPERAND (v, 0);
296 break;
297 case REALPART_EXPR:
298 case IMAGPART_EXPR:
299 v = NULL_TREE;
300 break;
301 case COMPONENT_REF:
302 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
303 {
304 v = NULL_TREE;
305 break;
306 }
307 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
308 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
309 != UNION_TYPE
310 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
311 != QUAL_UNION_TYPE)
312 break;
313 else
314 v = TREE_OPERAND (v, 0);
315 if (TREE_CODE (v) == COMPONENT_REF
316 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
317 == RECORD_TYPE)
318 {
319 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
320 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
321 if (TREE_CODE (fld_chain) == FIELD_DECL)
322 break;
323
324 if (fld_chain)
325 {
326 v = NULL_TREE;
327 break;
328 }
329 v = TREE_OPERAND (v, 0);
330 }
331 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
332 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
333 != UNION_TYPE
334 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
335 != QUAL_UNION_TYPE)
336 break;
337 else
338 v = TREE_OPERAND (v, 0);
339 if (v != pt_var)
340 v = NULL_TREE;
341 else
342 v = pt_var;
343 break;
344 default:
345 v = pt_var;
346 break;
347 }
348 if (v == pt_var)
349 var = pt_var;
350 }
351 }
352 else
353 var = pt_var;
354
355 if (var != pt_var)
356 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
357 else if (!pt_var_size)
358 return unknown[object_size_type];
359 else
360 var_size = pt_var_size;
361 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
362 if (bytes != error_mark_node)
363 {
364 if (TREE_CODE (bytes) == INTEGER_CST
365 && tree_int_cst_lt (var_size, bytes))
366 bytes = size_zero_node;
367 else
368 bytes = size_binop (MINUS_EXPR, var_size, bytes);
369 }
370 if (var != pt_var
371 && pt_var_size
372 && TREE_CODE (pt_var) == MEM_REF
373 && bytes != error_mark_node)
374 {
375 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
376 if (bytes2 != error_mark_node)
377 {
378 if (TREE_CODE (bytes2) == INTEGER_CST
379 && tree_int_cst_lt (pt_var_size, bytes2))
380 bytes2 = size_zero_node;
381 else
382 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
383 bytes = size_binop (MIN_EXPR, bytes, bytes2);
384 }
385 }
386 }
387 else if (!pt_var_size)
388 return unknown[object_size_type];
389 else
390 bytes = pt_var_size;
391
392 if (tree_fits_uhwi_p (bytes))
393 return tree_to_uhwi (bytes);
394
395 return unknown[object_size_type];
396 }
397
398
399 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
400 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
401 argument from __builtin_object_size. If unknown, return
402 unknown[object_size_type]. */
403
404 static unsigned HOST_WIDE_INT
405 alloc_object_size (const gcall *call, int object_size_type)
406 {
407 tree callee, bytes = NULL_TREE;
408 tree alloc_size;
409 int arg1 = -1, arg2 = -1;
410
411 gcc_assert (is_gimple_call (call));
412
413 callee = gimple_call_fndecl (call);
414 if (!callee)
415 return unknown[object_size_type];
416
417 alloc_size = lookup_attribute ("alloc_size",
418 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
419 if (alloc_size && TREE_VALUE (alloc_size))
420 {
421 tree p = TREE_VALUE (alloc_size);
422
423 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
424 if (TREE_CHAIN (p))
425 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
426 }
427
428 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
429 switch (DECL_FUNCTION_CODE (callee))
430 {
431 case BUILT_IN_CALLOC:
432 arg2 = 1;
433 /* fall through */
434 case BUILT_IN_MALLOC:
435 case BUILT_IN_ALLOCA:
436 case BUILT_IN_ALLOCA_WITH_ALIGN:
437 arg1 = 0;
438 default:
439 break;
440 }
441
442 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
443 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
444 || (arg2 >= 0
445 && (arg2 >= (int)gimple_call_num_args (call)
446 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
447 return unknown[object_size_type];
448
449 if (arg2 >= 0)
450 bytes = size_binop (MULT_EXPR,
451 fold_convert (sizetype, gimple_call_arg (call, arg1)),
452 fold_convert (sizetype, gimple_call_arg (call, arg2)));
453 else if (arg1 >= 0)
454 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
455
456 if (bytes && tree_fits_uhwi_p (bytes))
457 return tree_to_uhwi (bytes);
458
459 return unknown[object_size_type];
460 }
461
462
463 /* If object size is propagated from one of function's arguments directly
464 to its return value, return that argument for GIMPLE_CALL statement CALL.
465 Otherwise return NULL. */
466
467 static tree
468 pass_through_call (const gcall *call)
469 {
470 tree callee = gimple_call_fndecl (call);
471
472 if (callee
473 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
474 switch (DECL_FUNCTION_CODE (callee))
475 {
476 case BUILT_IN_MEMCPY:
477 case BUILT_IN_MEMMOVE:
478 case BUILT_IN_MEMSET:
479 case BUILT_IN_STRCPY:
480 case BUILT_IN_STRNCPY:
481 case BUILT_IN_STRCAT:
482 case BUILT_IN_STRNCAT:
483 case BUILT_IN_MEMCPY_CHK:
484 case BUILT_IN_MEMMOVE_CHK:
485 case BUILT_IN_MEMSET_CHK:
486 case BUILT_IN_STRCPY_CHK:
487 case BUILT_IN_STRNCPY_CHK:
488 case BUILT_IN_STPNCPY_CHK:
489 case BUILT_IN_STRCAT_CHK:
490 case BUILT_IN_STRNCAT_CHK:
491 case BUILT_IN_ASSUME_ALIGNED:
492 if (gimple_call_num_args (call) >= 1)
493 return gimple_call_arg (call, 0);
494 break;
495 default:
496 break;
497 }
498
499 return NULL_TREE;
500 }
501
502
503 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
504 second argument from __builtin_object_size. */
505
506 unsigned HOST_WIDE_INT
507 compute_builtin_object_size (tree ptr, int object_size_type)
508 {
509 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
510
511 if (! offset_limit)
512 init_offset_limit ();
513
514 if (TREE_CODE (ptr) == ADDR_EXPR)
515 return addr_object_size (NULL, ptr, object_size_type);
516
517 if (TREE_CODE (ptr) == SSA_NAME
518 && POINTER_TYPE_P (TREE_TYPE (ptr))
519 && computed[object_size_type] != NULL)
520 {
521 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
522 {
523 struct object_size_info osi;
524 bitmap_iterator bi;
525 unsigned int i;
526
527 if (num_ssa_names > object_sizes[object_size_type].length ())
528 object_sizes[object_size_type].safe_grow (num_ssa_names);
529 if (dump_file)
530 {
531 fprintf (dump_file, "Computing %s %sobject size for ",
532 (object_size_type & 2) ? "minimum" : "maximum",
533 (object_size_type & 1) ? "sub" : "");
534 print_generic_expr (dump_file, ptr, dump_flags);
535 fprintf (dump_file, ":\n");
536 }
537
538 osi.visited = BITMAP_ALLOC (NULL);
539 osi.reexamine = BITMAP_ALLOC (NULL);
540 osi.object_size_type = object_size_type;
541 osi.depths = NULL;
542 osi.stack = NULL;
543 osi.tos = NULL;
544
545 /* First pass: walk UD chains, compute object sizes that
546 can be computed. osi.reexamine bitmap at the end will
547 contain what variables were found in dependency cycles
548 and therefore need to be reexamined. */
549 osi.pass = 0;
550 osi.changed = false;
551 collect_object_sizes_for (&osi, ptr);
552
553 /* Second pass: keep recomputing object sizes of variables
554 that need reexamination, until no object sizes are
555 increased or all object sizes are computed. */
556 if (! bitmap_empty_p (osi.reexamine))
557 {
558 bitmap reexamine = BITMAP_ALLOC (NULL);
559
560 /* If looking for minimum instead of maximum object size,
561 detect cases where a pointer is increased in a loop.
562 Although even without this detection pass 2 would eventually
563 terminate, it could take a long time. If a pointer is
564 increasing this way, we need to assume 0 object size.
565 E.g. p = &buf[0]; while (cond) p = p + 4; */
566 if (object_size_type & 2)
567 {
568 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
569 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
570 osi.tos = osi.stack;
571 osi.pass = 1;
572 /* collect_object_sizes_for is changing
573 osi.reexamine bitmap, so iterate over a copy. */
574 bitmap_copy (reexamine, osi.reexamine);
575 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
576 if (bitmap_bit_p (osi.reexamine, i))
577 check_for_plus_in_loops (&osi, ssa_name (i));
578
579 free (osi.depths);
580 osi.depths = NULL;
581 free (osi.stack);
582 osi.stack = NULL;
583 osi.tos = NULL;
584 }
585
586 do
587 {
588 osi.pass = 2;
589 osi.changed = false;
590 /* collect_object_sizes_for is changing
591 osi.reexamine bitmap, so iterate over a copy. */
592 bitmap_copy (reexamine, osi.reexamine);
593 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
594 if (bitmap_bit_p (osi.reexamine, i))
595 {
596 collect_object_sizes_for (&osi, ssa_name (i));
597 if (dump_file && (dump_flags & TDF_DETAILS))
598 {
599 fprintf (dump_file, "Reexamining ");
600 print_generic_expr (dump_file, ssa_name (i),
601 dump_flags);
602 fprintf (dump_file, "\n");
603 }
604 }
605 }
606 while (osi.changed);
607
608 BITMAP_FREE (reexamine);
609 }
610 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
611 bitmap_set_bit (computed[object_size_type], i);
612
613 /* Debugging dumps. */
614 if (dump_file)
615 {
616 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
617 if (object_sizes[object_size_type][i]
618 != unknown[object_size_type])
619 {
620 print_generic_expr (dump_file, ssa_name (i),
621 dump_flags);
622 fprintf (dump_file,
623 ": %s %sobject size "
624 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
625 (object_size_type & 2) ? "minimum" : "maximum",
626 (object_size_type & 1) ? "sub" : "",
627 object_sizes[object_size_type][i]);
628 }
629 }
630
631 BITMAP_FREE (osi.reexamine);
632 BITMAP_FREE (osi.visited);
633 }
634
635 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
636 }
637
638 return unknown[object_size_type];
639 }
640
641 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
642
643 static void
644 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
645 {
646 int object_size_type = osi->object_size_type;
647 unsigned int varno = SSA_NAME_VERSION (ptr);
648 unsigned HOST_WIDE_INT bytes;
649
650 gcc_assert (object_sizes[object_size_type][varno]
651 != unknown[object_size_type]);
652 gcc_assert (osi->pass == 0);
653
654 if (TREE_CODE (value) == WITH_SIZE_EXPR)
655 value = TREE_OPERAND (value, 0);
656
657 /* Pointer variables should have been handled by merge_object_sizes. */
658 gcc_assert (TREE_CODE (value) != SSA_NAME
659 || !POINTER_TYPE_P (TREE_TYPE (value)));
660
661 if (TREE_CODE (value) == ADDR_EXPR)
662 bytes = addr_object_size (osi, value, object_size_type);
663 else
664 bytes = unknown[object_size_type];
665
666 if ((object_size_type & 2) == 0)
667 {
668 if (object_sizes[object_size_type][varno] < bytes)
669 object_sizes[object_size_type][varno] = bytes;
670 }
671 else
672 {
673 if (object_sizes[object_size_type][varno] > bytes)
674 object_sizes[object_size_type][varno] = bytes;
675 }
676 }
677
678
679 /* Compute object_sizes for PTR, defined to the result of a call. */
680
681 static void
682 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
683 {
684 int object_size_type = osi->object_size_type;
685 unsigned int varno = SSA_NAME_VERSION (ptr);
686 unsigned HOST_WIDE_INT bytes;
687
688 gcc_assert (is_gimple_call (call));
689
690 gcc_assert (object_sizes[object_size_type][varno]
691 != unknown[object_size_type]);
692 gcc_assert (osi->pass == 0);
693
694 bytes = alloc_object_size (call, object_size_type);
695
696 if ((object_size_type & 2) == 0)
697 {
698 if (object_sizes[object_size_type][varno] < bytes)
699 object_sizes[object_size_type][varno] = bytes;
700 }
701 else
702 {
703 if (object_sizes[object_size_type][varno] > bytes)
704 object_sizes[object_size_type][varno] = bytes;
705 }
706 }
707
708
709 /* Compute object_sizes for PTR, defined to an unknown value. */
710
711 static void
712 unknown_object_size (struct object_size_info *osi, tree ptr)
713 {
714 int object_size_type = osi->object_size_type;
715 unsigned int varno = SSA_NAME_VERSION (ptr);
716 unsigned HOST_WIDE_INT bytes;
717
718 gcc_assert (object_sizes[object_size_type][varno]
719 != unknown[object_size_type]);
720 gcc_assert (osi->pass == 0);
721
722 bytes = unknown[object_size_type];
723
724 if ((object_size_type & 2) == 0)
725 {
726 if (object_sizes[object_size_type][varno] < bytes)
727 object_sizes[object_size_type][varno] = bytes;
728 }
729 else
730 {
731 if (object_sizes[object_size_type][varno] > bytes)
732 object_sizes[object_size_type][varno] = bytes;
733 }
734 }
735
736
737 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
738 the object size might need reexamination later. */
739
740 static bool
741 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
742 unsigned HOST_WIDE_INT offset)
743 {
744 int object_size_type = osi->object_size_type;
745 unsigned int varno = SSA_NAME_VERSION (dest);
746 unsigned HOST_WIDE_INT orig_bytes;
747
748 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
749 return false;
750 if (offset >= offset_limit)
751 {
752 object_sizes[object_size_type][varno] = unknown[object_size_type];
753 return false;
754 }
755
756 if (osi->pass == 0)
757 collect_object_sizes_for (osi, orig);
758
759 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
760 if (orig_bytes != unknown[object_size_type])
761 orig_bytes = (offset > orig_bytes)
762 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
763
764 if ((object_size_type & 2) == 0)
765 {
766 if (object_sizes[object_size_type][varno] < orig_bytes)
767 {
768 object_sizes[object_size_type][varno] = orig_bytes;
769 osi->changed = true;
770 }
771 }
772 else
773 {
774 if (object_sizes[object_size_type][varno] > orig_bytes)
775 {
776 object_sizes[object_size_type][varno] = orig_bytes;
777 osi->changed = true;
778 }
779 }
780 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
781 }
782
783
784 /* Compute object_sizes for VAR, defined to the result of an assignment
785 with operator POINTER_PLUS_EXPR. Return true if the object size might
786 need reexamination later. */
787
788 static bool
789 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
790 {
791 int object_size_type = osi->object_size_type;
792 unsigned int varno = SSA_NAME_VERSION (var);
793 unsigned HOST_WIDE_INT bytes;
794 tree op0, op1;
795
796 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
797 {
798 op0 = gimple_assign_rhs1 (stmt);
799 op1 = gimple_assign_rhs2 (stmt);
800 }
801 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
802 {
803 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
804 gcc_assert (TREE_CODE (rhs) == MEM_REF);
805 op0 = TREE_OPERAND (rhs, 0);
806 op1 = TREE_OPERAND (rhs, 1);
807 }
808 else
809 gcc_unreachable ();
810
811 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
812 return false;
813
814 /* Handle PTR + OFFSET here. */
815 if (TREE_CODE (op1) == INTEGER_CST
816 && (TREE_CODE (op0) == SSA_NAME
817 || TREE_CODE (op0) == ADDR_EXPR))
818 {
819 if (! tree_fits_uhwi_p (op1))
820 bytes = unknown[object_size_type];
821 else if (TREE_CODE (op0) == SSA_NAME)
822 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
823 else
824 {
825 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
826
827 /* op0 will be ADDR_EXPR here. */
828 bytes = addr_object_size (osi, op0, object_size_type);
829 if (bytes == unknown[object_size_type])
830 ;
831 else if (off > offset_limit)
832 bytes = unknown[object_size_type];
833 else if (off > bytes)
834 bytes = 0;
835 else
836 bytes -= off;
837 }
838 }
839 else
840 bytes = unknown[object_size_type];
841
842 if ((object_size_type & 2) == 0)
843 {
844 if (object_sizes[object_size_type][varno] < bytes)
845 object_sizes[object_size_type][varno] = bytes;
846 }
847 else
848 {
849 if (object_sizes[object_size_type][varno] > bytes)
850 object_sizes[object_size_type][varno] = bytes;
851 }
852 return false;
853 }
854
855
856 /* Compute object_sizes for VAR, defined at STMT, which is
857 a COND_EXPR. Return true if the object size might need reexamination
858 later. */
859
860 static bool
861 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
862 {
863 tree then_, else_;
864 int object_size_type = osi->object_size_type;
865 unsigned int varno = SSA_NAME_VERSION (var);
866 bool reexamine = false;
867
868 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
869
870 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
871 return false;
872
873 then_ = gimple_assign_rhs2 (stmt);
874 else_ = gimple_assign_rhs3 (stmt);
875
876 if (TREE_CODE (then_) == SSA_NAME)
877 reexamine |= merge_object_sizes (osi, var, then_, 0);
878 else
879 expr_object_size (osi, var, then_);
880
881 if (TREE_CODE (else_) == SSA_NAME)
882 reexamine |= merge_object_sizes (osi, var, else_, 0);
883 else
884 expr_object_size (osi, var, else_);
885
886 return reexamine;
887 }
888
889 /* Compute object sizes for VAR.
890 For ADDR_EXPR an object size is the number of remaining bytes
891 to the end of the object (where what is considered an object depends on
892 OSI->object_size_type).
893 For allocation GIMPLE_CALL like malloc or calloc object size is the size
894 of the allocation.
895 For POINTER_PLUS_EXPR where second operand is a constant integer,
896 object size is object size of the first operand minus the constant.
897 If the constant is bigger than the number of remaining bytes until the
898 end of the object, object size is 0, but if it is instead a pointer
899 subtraction, object size is unknown[object_size_type].
900 To differentiate addition from subtraction, ADDR_EXPR returns
901 unknown[object_size_type] for all objects bigger than half of the address
902 space, and constants less than half of the address space are considered
903 addition, while bigger constants subtraction.
904 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
905 object size is object size of that argument.
906 Otherwise, object size is the maximum of object sizes of variables
907 that it might be set to. */
908
909 static void
910 collect_object_sizes_for (struct object_size_info *osi, tree var)
911 {
912 int object_size_type = osi->object_size_type;
913 unsigned int varno = SSA_NAME_VERSION (var);
914 gimple stmt;
915 bool reexamine;
916
917 if (bitmap_bit_p (computed[object_size_type], varno))
918 return;
919
920 if (osi->pass == 0)
921 {
922 if (bitmap_set_bit (osi->visited, varno))
923 {
924 object_sizes[object_size_type][varno]
925 = (object_size_type & 2) ? -1 : 0;
926 }
927 else
928 {
929 /* Found a dependency loop. Mark the variable for later
930 re-examination. */
931 bitmap_set_bit (osi->reexamine, varno);
932 if (dump_file && (dump_flags & TDF_DETAILS))
933 {
934 fprintf (dump_file, "Found a dependency loop at ");
935 print_generic_expr (dump_file, var, dump_flags);
936 fprintf (dump_file, "\n");
937 }
938 return;
939 }
940 }
941
942 if (dump_file && (dump_flags & TDF_DETAILS))
943 {
944 fprintf (dump_file, "Visiting use-def links for ");
945 print_generic_expr (dump_file, var, dump_flags);
946 fprintf (dump_file, "\n");
947 }
948
949 stmt = SSA_NAME_DEF_STMT (var);
950 reexamine = false;
951
952 switch (gimple_code (stmt))
953 {
954 case GIMPLE_ASSIGN:
955 {
956 tree rhs = gimple_assign_rhs1 (stmt);
957 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
958 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
959 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
960 reexamine = plus_stmt_object_size (osi, var, stmt);
961 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
962 reexamine = cond_expr_object_size (osi, var, stmt);
963 else if (gimple_assign_single_p (stmt)
964 || gimple_assign_unary_nop_p (stmt))
965 {
966 if (TREE_CODE (rhs) == SSA_NAME
967 && POINTER_TYPE_P (TREE_TYPE (rhs)))
968 reexamine = merge_object_sizes (osi, var, rhs, 0);
969 else
970 expr_object_size (osi, var, rhs);
971 }
972 else
973 unknown_object_size (osi, var);
974 break;
975 }
976
977 case GIMPLE_CALL:
978 {
979 gcall *call_stmt = as_a <gcall *> (stmt);
980 tree arg = pass_through_call (call_stmt);
981 if (arg)
982 {
983 if (TREE_CODE (arg) == SSA_NAME
984 && POINTER_TYPE_P (TREE_TYPE (arg)))
985 reexamine = merge_object_sizes (osi, var, arg, 0);
986 else
987 expr_object_size (osi, var, arg);
988 }
989 else
990 call_object_size (osi, var, call_stmt);
991 break;
992 }
993
994 case GIMPLE_ASM:
995 /* Pointers defined by __asm__ statements can point anywhere. */
996 object_sizes[object_size_type][varno] = unknown[object_size_type];
997 break;
998
999 case GIMPLE_NOP:
1000 if (SSA_NAME_VAR (var)
1001 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1002 expr_object_size (osi, var, SSA_NAME_VAR (var));
1003 else
1004 /* Uninitialized SSA names point nowhere. */
1005 object_sizes[object_size_type][varno] = unknown[object_size_type];
1006 break;
1007
1008 case GIMPLE_PHI:
1009 {
1010 unsigned i;
1011
1012 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1013 {
1014 tree rhs = gimple_phi_arg (stmt, i)->def;
1015
1016 if (object_sizes[object_size_type][varno]
1017 == unknown[object_size_type])
1018 break;
1019
1020 if (TREE_CODE (rhs) == SSA_NAME)
1021 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1022 else if (osi->pass == 0)
1023 expr_object_size (osi, var, rhs);
1024 }
1025 break;
1026 }
1027
1028 default:
1029 gcc_unreachable ();
1030 }
1031
1032 if (! reexamine
1033 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1034 {
1035 bitmap_set_bit (computed[object_size_type], varno);
1036 bitmap_clear_bit (osi->reexamine, varno);
1037 }
1038 else
1039 {
1040 bitmap_set_bit (osi->reexamine, varno);
1041 if (dump_file && (dump_flags & TDF_DETAILS))
1042 {
1043 fprintf (dump_file, "Need to reexamine ");
1044 print_generic_expr (dump_file, var, dump_flags);
1045 fprintf (dump_file, "\n");
1046 }
1047 }
1048 }
1049
1050
1051 /* Helper function for check_for_plus_in_loops. Called recursively
1052 to detect loops. */
1053
1054 static void
1055 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1056 unsigned int depth)
1057 {
1058 gimple stmt = SSA_NAME_DEF_STMT (var);
1059 unsigned int varno = SSA_NAME_VERSION (var);
1060
1061 if (osi->depths[varno])
1062 {
1063 if (osi->depths[varno] != depth)
1064 {
1065 unsigned int *sp;
1066
1067 /* Found a loop involving pointer addition. */
1068 for (sp = osi->tos; sp > osi->stack; )
1069 {
1070 --sp;
1071 bitmap_clear_bit (osi->reexamine, *sp);
1072 bitmap_set_bit (computed[osi->object_size_type], *sp);
1073 object_sizes[osi->object_size_type][*sp] = 0;
1074 if (*sp == varno)
1075 break;
1076 }
1077 }
1078 return;
1079 }
1080 else if (! bitmap_bit_p (osi->reexamine, varno))
1081 return;
1082
1083 osi->depths[varno] = depth;
1084 *osi->tos++ = varno;
1085
1086 switch (gimple_code (stmt))
1087 {
1088
1089 case GIMPLE_ASSIGN:
1090 {
1091 if ((gimple_assign_single_p (stmt)
1092 || gimple_assign_unary_nop_p (stmt))
1093 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1094 {
1095 tree rhs = gimple_assign_rhs1 (stmt);
1096
1097 check_for_plus_in_loops_1 (osi, rhs, depth);
1098 }
1099 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1100 {
1101 tree basevar = gimple_assign_rhs1 (stmt);
1102 tree cst = gimple_assign_rhs2 (stmt);
1103
1104 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1105
1106 check_for_plus_in_loops_1 (osi, basevar,
1107 depth + !integer_zerop (cst));
1108 }
1109 else
1110 gcc_unreachable ();
1111 break;
1112 }
1113
1114 case GIMPLE_CALL:
1115 {
1116 gcall *call_stmt = as_a <gcall *> (stmt);
1117 tree arg = pass_through_call (call_stmt);
1118 if (arg)
1119 {
1120 if (TREE_CODE (arg) == SSA_NAME)
1121 check_for_plus_in_loops_1 (osi, arg, depth);
1122 else
1123 gcc_unreachable ();
1124 }
1125 break;
1126 }
1127
1128 case GIMPLE_PHI:
1129 {
1130 unsigned i;
1131
1132 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1133 {
1134 tree rhs = gimple_phi_arg (stmt, i)->def;
1135
1136 if (TREE_CODE (rhs) == SSA_NAME)
1137 check_for_plus_in_loops_1 (osi, rhs, depth);
1138 }
1139 break;
1140 }
1141
1142 default:
1143 gcc_unreachable ();
1144 }
1145
1146 osi->depths[varno] = 0;
1147 osi->tos--;
1148 }
1149
1150
1151 /* Check if some pointer we are computing object size of is being increased
1152 within a loop. If yes, assume all the SSA variables participating in
1153 that loop have minimum object sizes 0. */
1154
1155 static void
1156 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1157 {
1158 gimple stmt = SSA_NAME_DEF_STMT (var);
1159
1160 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1161 and looked for a POINTER_PLUS_EXPR in the pass-through
1162 argument, if any. In GIMPLE, however, such an expression
1163 is not a valid call operand. */
1164
1165 if (is_gimple_assign (stmt)
1166 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1167 {
1168 tree basevar = gimple_assign_rhs1 (stmt);
1169 tree cst = gimple_assign_rhs2 (stmt);
1170
1171 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1172
1173 if (integer_zerop (cst))
1174 return;
1175
1176 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1177 *osi->tos++ = SSA_NAME_VERSION (basevar);
1178 check_for_plus_in_loops_1 (osi, var, 2);
1179 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1180 osi->tos--;
1181 }
1182 }
1183
1184
1185 /* Initialize data structures for the object size computation. */
1186
1187 void
1188 init_object_sizes (void)
1189 {
1190 int object_size_type;
1191
1192 if (computed[0])
1193 return;
1194
1195 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1196 {
1197 object_sizes[object_size_type].safe_grow (num_ssa_names);
1198 computed[object_size_type] = BITMAP_ALLOC (NULL);
1199 }
1200
1201 init_offset_limit ();
1202 }
1203
1204
1205 /* Destroy data structures after the object size computation. */
1206
1207 static void
1208 fini_object_sizes (void)
1209 {
1210 int object_size_type;
1211
1212 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1213 {
1214 object_sizes[object_size_type].release ();
1215 BITMAP_FREE (computed[object_size_type]);
1216 }
1217 }
1218
1219
1220 /* Simple pass to optimize all __builtin_object_size () builtins. */
1221
1222 namespace {
1223
1224 const pass_data pass_data_object_sizes =
1225 {
1226 GIMPLE_PASS, /* type */
1227 "objsz", /* name */
1228 OPTGROUP_NONE, /* optinfo_flags */
1229 TV_NONE, /* tv_id */
1230 ( PROP_cfg | PROP_ssa ), /* properties_required */
1231 0, /* properties_provided */
1232 0, /* properties_destroyed */
1233 0, /* todo_flags_start */
1234 0, /* todo_flags_finish */
1235 };
1236
1237 class pass_object_sizes : public gimple_opt_pass
1238 {
1239 public:
1240 pass_object_sizes (gcc::context *ctxt)
1241 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1242 {}
1243
1244 /* opt_pass methods: */
1245 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1246 virtual unsigned int execute (function *);
1247
1248 }; // class pass_object_sizes
1249
1250 unsigned int
1251 pass_object_sizes::execute (function *fun)
1252 {
1253 basic_block bb;
1254 FOR_EACH_BB_FN (bb, fun)
1255 {
1256 gimple_stmt_iterator i;
1257 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1258 {
1259 tree result;
1260 gimple call = gsi_stmt (i);
1261 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1262 continue;
1263
1264 init_object_sizes ();
1265
1266 /* In the first pass instance, only attempt to fold
1267 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1268 and rather than folding the builtin to the constant if any,
1269 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1270 call result and the computed constant. */
1271 if (first_pass_instance)
1272 {
1273 tree ost = gimple_call_arg (call, 1);
1274 if (tree_fits_uhwi_p (ost))
1275 {
1276 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1277 tree ptr = gimple_call_arg (call, 0);
1278 tree lhs = gimple_call_lhs (call);
1279 if ((object_size_type == 1 || object_size_type == 3)
1280 && (TREE_CODE (ptr) == ADDR_EXPR
1281 || TREE_CODE (ptr) == SSA_NAME)
1282 && lhs)
1283 {
1284 tree type = TREE_TYPE (lhs);
1285 unsigned HOST_WIDE_INT bytes
1286 = compute_builtin_object_size (ptr, object_size_type);
1287 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type == 1
1288 ? -1 : 0)
1289 && wi::fits_to_tree_p (bytes, type))
1290 {
1291 tree tem = make_ssa_name (type);
1292 gimple_call_set_lhs (call, tem);
1293 enum tree_code code
1294 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1295 tree cst = build_int_cstu (type, bytes);
1296 gimple g = gimple_build_assign (lhs, code, tem, cst);
1297 gsi_insert_after (&i, g, GSI_NEW_STMT);
1298 update_stmt (call);
1299 }
1300 }
1301 }
1302 continue;
1303 }
1304
1305 result = fold_call_stmt (as_a <gcall *> (call), false);
1306 if (!result)
1307 {
1308 tree ost = gimple_call_arg (call, 1);
1309
1310 if (tree_fits_uhwi_p (ost))
1311 {
1312 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1313
1314 if (object_size_type < 2)
1315 result = fold_convert (size_type_node,
1316 integer_minus_one_node);
1317 else if (object_size_type < 4)
1318 result = build_zero_cst (size_type_node);
1319 }
1320
1321 if (!result)
1322 continue;
1323 }
1324
1325 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1326
1327 if (dump_file && (dump_flags & TDF_DETAILS))
1328 {
1329 fprintf (dump_file, "Simplified\n ");
1330 print_gimple_stmt (dump_file, call, 0, dump_flags);
1331 fprintf (dump_file, " to ");
1332 print_generic_expr (dump_file, result, 0);
1333 fprintf (dump_file, "\n");
1334 }
1335
1336 tree lhs = gimple_call_lhs (call);
1337 if (!lhs)
1338 continue;
1339
1340 /* Propagate into all uses and fold those stmts. */
1341 gimple use_stmt;
1342 imm_use_iterator iter;
1343 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1344 {
1345 use_operand_p use_p;
1346 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1347 SET_USE (use_p, result);
1348 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1349 fold_stmt (&gsi);
1350 update_stmt (gsi_stmt (gsi));
1351 }
1352 }
1353 }
1354
1355 fini_object_sizes ();
1356 return 0;
1357 }
1358
1359 } // anon namespace
1360
1361 gimple_opt_pass *
1362 make_pass_object_sizes (gcc::context *ctxt)
1363 {
1364 return new pass_object_sizes (ctxt);
1365 }