]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-object-size.c
db9b5694e85d2afce4df21e730823efd3dfc5f7e
[thirdparty/gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
37
38 struct object_size_info
39 {
40 int object_size_type;
41 unsigned char pass;
42 bool changed;
43 bitmap visited, reexamine;
44 unsigned int *depths;
45 unsigned int *stack, *tos;
46 };
47
48 static const unsigned HOST_WIDE_INT unknown[4] = {
49 HOST_WIDE_INT_M1U,
50 HOST_WIDE_INT_M1U,
51 0,
52 0
53 };
54
55 static tree compute_object_offset (const_tree, const_tree);
56 static bool addr_object_size (struct object_size_info *,
57 const_tree, int, unsigned HOST_WIDE_INT *,
58 tree * = NULL);
59 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
60 static tree pass_through_call (const gcall *);
61 static void collect_object_sizes_for (struct object_size_info *, tree);
62 static void expr_object_size (struct object_size_info *, tree, tree);
63 static bool merge_object_sizes (struct object_size_info *, tree, tree,
64 unsigned HOST_WIDE_INT);
65 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
66 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
67 static void init_offset_limit (void);
68 static void check_for_plus_in_loops (struct object_size_info *, tree);
69 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
70 unsigned int);
71
72 /* object_sizes[0] is upper bound for number of bytes till the end of
73 the object.
74 object_sizes[1] is upper bound for number of bytes till the end of
75 the subobject (innermost array or field with address taken).
76 object_sizes[2] is lower bound for number of bytes till the end of
77 the object and object_sizes[3] lower bound for subobject. */
78 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
79
80 /* Bitmaps what object sizes have been computed already. */
81 static bitmap computed[4];
82
83 /* Maximum value of offset we consider to be addition. */
84 static unsigned HOST_WIDE_INT offset_limit;
85
86
87 /* Initialize OFFSET_LIMIT variable. */
88 static void
89 init_offset_limit (void)
90 {
91 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
92 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
93 else
94 offset_limit = -1;
95 offset_limit /= 2;
96 }
97
98
99 /* Compute offset of EXPR within VAR. Return error_mark_node
100 if unknown. */
101
102 static tree
103 compute_object_offset (const_tree expr, const_tree var)
104 {
105 enum tree_code code = PLUS_EXPR;
106 tree base, off, t;
107
108 if (expr == var)
109 return size_zero_node;
110
111 switch (TREE_CODE (expr))
112 {
113 case COMPONENT_REF:
114 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
115 if (base == error_mark_node)
116 return base;
117
118 t = TREE_OPERAND (expr, 1);
119 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
120 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
121 / BITS_PER_UNIT));
122 break;
123
124 case REALPART_EXPR:
125 CASE_CONVERT:
126 case VIEW_CONVERT_EXPR:
127 case NON_LVALUE_EXPR:
128 return compute_object_offset (TREE_OPERAND (expr, 0), var);
129
130 case IMAGPART_EXPR:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
134
135 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
136 break;
137
138 case ARRAY_REF:
139 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
140 if (base == error_mark_node)
141 return base;
142
143 t = TREE_OPERAND (expr, 1);
144 tree low_bound, unit_size;
145 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
146 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
147 if (! integer_zerop (low_bound))
148 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
149 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
150 {
151 code = MINUS_EXPR;
152 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
153 }
154 t = fold_convert (sizetype, t);
155 off = size_binop (MULT_EXPR, unit_size, t);
156 break;
157
158 case MEM_REF:
159 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
160 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
161
162 default:
163 return error_mark_node;
164 }
165
166 return size_binop (code, base, off);
167 }
168
169
170 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
171 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
172 If unknown, return unknown[object_size_type]. */
173
174 static bool
175 addr_object_size (struct object_size_info *osi, const_tree ptr,
176 int object_size_type, unsigned HOST_WIDE_INT *psize,
177 tree *pdecl /* = NULL */)
178 {
179 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
180
181 tree dummy;
182 if (!pdecl)
183 pdecl = &dummy;
184
185 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
186
187 /* Set to unknown and overwrite just before returning if the size
188 could be determined. */
189 *psize = unknown[object_size_type];
190
191 pt_var = TREE_OPERAND (ptr, 0);
192 while (handled_component_p (pt_var))
193 pt_var = TREE_OPERAND (pt_var, 0);
194
195 if (pt_var
196 && TREE_CODE (pt_var) == MEM_REF)
197 {
198 unsigned HOST_WIDE_INT sz;
199
200 if (!osi || (object_size_type & 1) != 0
201 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
202 {
203 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
204 object_size_type & ~1, &sz, pdecl);
205 }
206 else
207 {
208 tree var = TREE_OPERAND (pt_var, 0);
209 if (osi->pass == 0)
210 collect_object_sizes_for (osi, var);
211 if (bitmap_bit_p (computed[object_size_type],
212 SSA_NAME_VERSION (var)))
213 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
214 else
215 sz = unknown[object_size_type];
216 }
217 if (sz != unknown[object_size_type])
218 {
219 offset_int mem_offset;
220 if (mem_ref_offset (pt_var).is_constant (&mem_offset))
221 {
222 offset_int dsz = wi::sub (sz, mem_offset);
223 if (wi::neg_p (dsz))
224 sz = 0;
225 else if (wi::fits_uhwi_p (dsz))
226 sz = dsz.to_uhwi ();
227 else
228 sz = unknown[object_size_type];
229 }
230 else
231 sz = unknown[object_size_type];
232 }
233
234 if (sz != unknown[object_size_type] && sz < offset_limit)
235 pt_var_size = size_int (sz);
236 }
237 else if (pt_var
238 && DECL_P (pt_var)
239 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
240 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
241 {
242 *pdecl = pt_var;
243 pt_var_size = DECL_SIZE_UNIT (pt_var);
244 }
245 else if (pt_var
246 && TREE_CODE (pt_var) == STRING_CST
247 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
248 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
249 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
250 < offset_limit)
251 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
252 else
253 return false;
254
255 if (pt_var != TREE_OPERAND (ptr, 0))
256 {
257 tree var;
258
259 if (object_size_type & 1)
260 {
261 var = TREE_OPERAND (ptr, 0);
262
263 while (var != pt_var
264 && TREE_CODE (var) != BIT_FIELD_REF
265 && TREE_CODE (var) != COMPONENT_REF
266 && TREE_CODE (var) != ARRAY_REF
267 && TREE_CODE (var) != ARRAY_RANGE_REF
268 && TREE_CODE (var) != REALPART_EXPR
269 && TREE_CODE (var) != IMAGPART_EXPR)
270 var = TREE_OPERAND (var, 0);
271 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
272 var = TREE_OPERAND (var, 0);
273 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
274 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
275 || (pt_var_size
276 && tree_int_cst_lt (pt_var_size,
277 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
278 var = pt_var;
279 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
280 {
281 tree v = var;
282 /* For &X->fld, compute object size only if fld isn't the last
283 field, as struct { int i; char c[1]; } is often used instead
284 of flexible array member. */
285 while (v && v != pt_var)
286 switch (TREE_CODE (v))
287 {
288 case ARRAY_REF:
289 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
290 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
291 {
292 tree domain
293 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
294 if (domain
295 && TYPE_MAX_VALUE (domain)
296 && TREE_CODE (TYPE_MAX_VALUE (domain))
297 == INTEGER_CST
298 && tree_int_cst_lt (TREE_OPERAND (v, 1),
299 TYPE_MAX_VALUE (domain)))
300 {
301 v = NULL_TREE;
302 break;
303 }
304 }
305 v = TREE_OPERAND (v, 0);
306 break;
307 case REALPART_EXPR:
308 case IMAGPART_EXPR:
309 v = NULL_TREE;
310 break;
311 case COMPONENT_REF:
312 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
313 {
314 v = NULL_TREE;
315 break;
316 }
317 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
318 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
319 != UNION_TYPE
320 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
321 != QUAL_UNION_TYPE)
322 break;
323 else
324 v = TREE_OPERAND (v, 0);
325 if (TREE_CODE (v) == COMPONENT_REF
326 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
327 == RECORD_TYPE)
328 {
329 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
330 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
331 if (TREE_CODE (fld_chain) == FIELD_DECL)
332 break;
333
334 if (fld_chain)
335 {
336 v = NULL_TREE;
337 break;
338 }
339 v = TREE_OPERAND (v, 0);
340 }
341 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
342 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
343 != UNION_TYPE
344 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
345 != QUAL_UNION_TYPE)
346 break;
347 else
348 v = TREE_OPERAND (v, 0);
349 if (v != pt_var)
350 v = NULL_TREE;
351 else
352 v = pt_var;
353 break;
354 default:
355 v = pt_var;
356 break;
357 }
358 if (v == pt_var)
359 var = pt_var;
360 }
361 }
362 else
363 var = pt_var;
364
365 if (var != pt_var)
366 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
367 else if (!pt_var_size)
368 return false;
369 else
370 var_size = pt_var_size;
371 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
372 if (bytes != error_mark_node)
373 {
374 if (TREE_CODE (bytes) == INTEGER_CST
375 && tree_int_cst_lt (var_size, bytes))
376 bytes = size_zero_node;
377 else
378 bytes = size_binop (MINUS_EXPR, var_size, bytes);
379 }
380 if (var != pt_var
381 && pt_var_size
382 && TREE_CODE (pt_var) == MEM_REF
383 && bytes != error_mark_node)
384 {
385 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
386 if (bytes2 != error_mark_node)
387 {
388 if (TREE_CODE (bytes2) == INTEGER_CST
389 && tree_int_cst_lt (pt_var_size, bytes2))
390 bytes2 = size_zero_node;
391 else
392 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
393 bytes = size_binop (MIN_EXPR, bytes, bytes2);
394 }
395 }
396 }
397 else if (!pt_var_size)
398 return false;
399 else
400 bytes = pt_var_size;
401
402 if (tree_fits_uhwi_p (bytes))
403 {
404 *psize = tree_to_uhwi (bytes);
405 return true;
406 }
407
408 return false;
409 }
410
411
412 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
413 Handles calls to functions declared with attribute alloc_size.
414 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
415 If unknown, return unknown[object_size_type]. */
416
417 static unsigned HOST_WIDE_INT
418 alloc_object_size (const gcall *call, int object_size_type)
419 {
420 gcc_assert (is_gimple_call (call));
421
422 tree calltype;
423 if (tree callfn = gimple_call_fndecl (call))
424 calltype = TREE_TYPE (callfn);
425 else
426 calltype = gimple_call_fntype (call);
427
428 if (!calltype)
429 return unknown[object_size_type];
430
431 /* Set to positions of alloc_size arguments. */
432 int arg1 = -1, arg2 = -1;
433 tree alloc_size = lookup_attribute ("alloc_size",
434 TYPE_ATTRIBUTES (calltype));
435 if (alloc_size && TREE_VALUE (alloc_size))
436 {
437 tree p = TREE_VALUE (alloc_size);
438
439 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
440 if (TREE_CHAIN (p))
441 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
442 }
443
444 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
445 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
446 || (arg2 >= 0
447 && (arg2 >= (int)gimple_call_num_args (call)
448 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
449 return unknown[object_size_type];
450
451 tree bytes = NULL_TREE;
452 if (arg2 >= 0)
453 bytes = size_binop (MULT_EXPR,
454 fold_convert (sizetype, gimple_call_arg (call, arg1)),
455 fold_convert (sizetype, gimple_call_arg (call, arg2)));
456 else if (arg1 >= 0)
457 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
458
459 if (bytes && tree_fits_uhwi_p (bytes))
460 return tree_to_uhwi (bytes);
461
462 return unknown[object_size_type];
463 }
464
465
466 /* If object size is propagated from one of function's arguments directly
467 to its return value, return that argument for GIMPLE_CALL statement CALL.
468 Otherwise return NULL. */
469
470 static tree
471 pass_through_call (const gcall *call)
472 {
473 unsigned rf = gimple_call_return_flags (call);
474 if (rf & ERF_RETURNS_ARG)
475 {
476 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
477 if (argnum < gimple_call_num_args (call))
478 return gimple_call_arg (call, argnum);
479 }
480
481 /* __builtin_assume_aligned is intentionally not marked RET1. */
482 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
483 return gimple_call_arg (call, 0);
484
485 return NULL_TREE;
486 }
487
488
489 /* Compute __builtin_object_size value for PTR and set *PSIZE to
490 the resulting value. If the declared object is known and PDECL
491 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
492 is the second argument to __builtin_object_size.
493 Returns true on success and false when the object size could not
494 be determined. */
495
496 bool
497 compute_builtin_object_size (tree ptr, int object_size_type,
498 unsigned HOST_WIDE_INT *psize,
499 tree *pdecl /* = NULL */)
500 {
501 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
502
503 /* Set to unknown and overwrite just before returning if the size
504 could be determined. */
505 *psize = unknown[object_size_type];
506
507 if (! offset_limit)
508 init_offset_limit ();
509
510 if (TREE_CODE (ptr) == ADDR_EXPR)
511 return addr_object_size (NULL, ptr, object_size_type, psize, pdecl);
512
513 if (TREE_CODE (ptr) != SSA_NAME
514 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
515 return false;
516
517 if (computed[object_size_type] == NULL)
518 {
519 if (optimize || object_size_type & 1)
520 return false;
521
522 /* When not optimizing, rather than failing, make a small effort
523 to determine the object size without the full benefit of
524 the (costly) computation below. */
525 gimple *def = SSA_NAME_DEF_STMT (ptr);
526 if (gimple_code (def) == GIMPLE_ASSIGN)
527 {
528 tree_code code = gimple_assign_rhs_code (def);
529 if (code == POINTER_PLUS_EXPR)
530 {
531 tree offset = gimple_assign_rhs2 (def);
532 ptr = gimple_assign_rhs1 (def);
533
534 if (tree_fits_shwi_p (offset)
535 && compute_builtin_object_size (ptr, object_size_type,
536 psize, pdecl))
537 {
538 /* Return zero when the offset is out of bounds. */
539 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
540 *psize = off < *psize ? *psize - off : 0;
541 return true;
542 }
543 }
544 }
545 return false;
546 }
547
548 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
549 {
550 struct object_size_info osi;
551 bitmap_iterator bi;
552 unsigned int i;
553
554 if (num_ssa_names > object_sizes[object_size_type].length ())
555 object_sizes[object_size_type].safe_grow (num_ssa_names);
556 if (dump_file)
557 {
558 fprintf (dump_file, "Computing %s %sobject size for ",
559 (object_size_type & 2) ? "minimum" : "maximum",
560 (object_size_type & 1) ? "sub" : "");
561 print_generic_expr (dump_file, ptr, dump_flags);
562 fprintf (dump_file, ":\n");
563 }
564
565 osi.visited = BITMAP_ALLOC (NULL);
566 osi.reexamine = BITMAP_ALLOC (NULL);
567 osi.object_size_type = object_size_type;
568 osi.depths = NULL;
569 osi.stack = NULL;
570 osi.tos = NULL;
571
572 /* First pass: walk UD chains, compute object sizes that
573 can be computed. osi.reexamine bitmap at the end will
574 contain what variables were found in dependency cycles
575 and therefore need to be reexamined. */
576 osi.pass = 0;
577 osi.changed = false;
578 collect_object_sizes_for (&osi, ptr);
579
580 /* Second pass: keep recomputing object sizes of variables
581 that need reexamination, until no object sizes are
582 increased or all object sizes are computed. */
583 if (! bitmap_empty_p (osi.reexamine))
584 {
585 bitmap reexamine = BITMAP_ALLOC (NULL);
586
587 /* If looking for minimum instead of maximum object size,
588 detect cases where a pointer is increased in a loop.
589 Although even without this detection pass 2 would eventually
590 terminate, it could take a long time. If a pointer is
591 increasing this way, we need to assume 0 object size.
592 E.g. p = &buf[0]; while (cond) p = p + 4; */
593 if (object_size_type & 2)
594 {
595 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
596 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
597 osi.tos = osi.stack;
598 osi.pass = 1;
599 /* collect_object_sizes_for is changing
600 osi.reexamine bitmap, so iterate over a copy. */
601 bitmap_copy (reexamine, osi.reexamine);
602 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
603 if (bitmap_bit_p (osi.reexamine, i))
604 check_for_plus_in_loops (&osi, ssa_name (i));
605
606 free (osi.depths);
607 osi.depths = NULL;
608 free (osi.stack);
609 osi.stack = NULL;
610 osi.tos = NULL;
611 }
612
613 do
614 {
615 osi.pass = 2;
616 osi.changed = false;
617 /* collect_object_sizes_for is changing
618 osi.reexamine bitmap, so iterate over a copy. */
619 bitmap_copy (reexamine, osi.reexamine);
620 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
621 if (bitmap_bit_p (osi.reexamine, i))
622 {
623 collect_object_sizes_for (&osi, ssa_name (i));
624 if (dump_file && (dump_flags & TDF_DETAILS))
625 {
626 fprintf (dump_file, "Reexamining ");
627 print_generic_expr (dump_file, ssa_name (i),
628 dump_flags);
629 fprintf (dump_file, "\n");
630 }
631 }
632 }
633 while (osi.changed);
634
635 BITMAP_FREE (reexamine);
636 }
637 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
638 bitmap_set_bit (computed[object_size_type], i);
639
640 /* Debugging dumps. */
641 if (dump_file)
642 {
643 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
644 if (object_sizes[object_size_type][i]
645 != unknown[object_size_type])
646 {
647 print_generic_expr (dump_file, ssa_name (i),
648 dump_flags);
649 fprintf (dump_file,
650 ": %s %sobject size "
651 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
652 (object_size_type & 2) ? "minimum" : "maximum",
653 (object_size_type & 1) ? "sub" : "",
654 object_sizes[object_size_type][i]);
655 }
656 }
657
658 BITMAP_FREE (osi.reexamine);
659 BITMAP_FREE (osi.visited);
660 }
661
662 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
663 return *psize != unknown[object_size_type];
664 }
665
666 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
667
668 static void
669 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
670 {
671 int object_size_type = osi->object_size_type;
672 unsigned int varno = SSA_NAME_VERSION (ptr);
673 unsigned HOST_WIDE_INT bytes;
674
675 gcc_assert (object_sizes[object_size_type][varno]
676 != unknown[object_size_type]);
677 gcc_assert (osi->pass == 0);
678
679 if (TREE_CODE (value) == WITH_SIZE_EXPR)
680 value = TREE_OPERAND (value, 0);
681
682 /* Pointer variables should have been handled by merge_object_sizes. */
683 gcc_assert (TREE_CODE (value) != SSA_NAME
684 || !POINTER_TYPE_P (TREE_TYPE (value)));
685
686 if (TREE_CODE (value) == ADDR_EXPR)
687 addr_object_size (osi, value, object_size_type, &bytes);
688 else
689 bytes = unknown[object_size_type];
690
691 if ((object_size_type & 2) == 0)
692 {
693 if (object_sizes[object_size_type][varno] < bytes)
694 object_sizes[object_size_type][varno] = bytes;
695 }
696 else
697 {
698 if (object_sizes[object_size_type][varno] > bytes)
699 object_sizes[object_size_type][varno] = bytes;
700 }
701 }
702
703
704 /* Compute object_sizes for PTR, defined to the result of a call. */
705
706 static void
707 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
708 {
709 int object_size_type = osi->object_size_type;
710 unsigned int varno = SSA_NAME_VERSION (ptr);
711 unsigned HOST_WIDE_INT bytes;
712
713 gcc_assert (is_gimple_call (call));
714
715 gcc_assert (object_sizes[object_size_type][varno]
716 != unknown[object_size_type]);
717 gcc_assert (osi->pass == 0);
718
719 bytes = alloc_object_size (call, object_size_type);
720
721 if ((object_size_type & 2) == 0)
722 {
723 if (object_sizes[object_size_type][varno] < bytes)
724 object_sizes[object_size_type][varno] = bytes;
725 }
726 else
727 {
728 if (object_sizes[object_size_type][varno] > bytes)
729 object_sizes[object_size_type][varno] = bytes;
730 }
731 }
732
733
734 /* Compute object_sizes for PTR, defined to an unknown value. */
735
736 static void
737 unknown_object_size (struct object_size_info *osi, tree ptr)
738 {
739 int object_size_type = osi->object_size_type;
740 unsigned int varno = SSA_NAME_VERSION (ptr);
741 unsigned HOST_WIDE_INT bytes;
742
743 gcc_assert (object_sizes[object_size_type][varno]
744 != unknown[object_size_type]);
745 gcc_assert (osi->pass == 0);
746
747 bytes = unknown[object_size_type];
748
749 if ((object_size_type & 2) == 0)
750 {
751 if (object_sizes[object_size_type][varno] < bytes)
752 object_sizes[object_size_type][varno] = bytes;
753 }
754 else
755 {
756 if (object_sizes[object_size_type][varno] > bytes)
757 object_sizes[object_size_type][varno] = bytes;
758 }
759 }
760
761
762 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
763 the object size might need reexamination later. */
764
765 static bool
766 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
767 unsigned HOST_WIDE_INT offset)
768 {
769 int object_size_type = osi->object_size_type;
770 unsigned int varno = SSA_NAME_VERSION (dest);
771 unsigned HOST_WIDE_INT orig_bytes;
772
773 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
774 return false;
775 if (offset >= offset_limit)
776 {
777 object_sizes[object_size_type][varno] = unknown[object_size_type];
778 return false;
779 }
780
781 if (osi->pass == 0)
782 collect_object_sizes_for (osi, orig);
783
784 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
785 if (orig_bytes != unknown[object_size_type])
786 orig_bytes = (offset > orig_bytes)
787 ? HOST_WIDE_INT_0U : orig_bytes - offset;
788
789 if ((object_size_type & 2) == 0)
790 {
791 if (object_sizes[object_size_type][varno] < orig_bytes)
792 {
793 object_sizes[object_size_type][varno] = orig_bytes;
794 osi->changed = true;
795 }
796 }
797 else
798 {
799 if (object_sizes[object_size_type][varno] > orig_bytes)
800 {
801 object_sizes[object_size_type][varno] = orig_bytes;
802 osi->changed = true;
803 }
804 }
805 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
806 }
807
808
809 /* Compute object_sizes for VAR, defined to the result of an assignment
810 with operator POINTER_PLUS_EXPR. Return true if the object size might
811 need reexamination later. */
812
813 static bool
814 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
815 {
816 int object_size_type = osi->object_size_type;
817 unsigned int varno = SSA_NAME_VERSION (var);
818 unsigned HOST_WIDE_INT bytes;
819 tree op0, op1;
820
821 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
822 {
823 op0 = gimple_assign_rhs1 (stmt);
824 op1 = gimple_assign_rhs2 (stmt);
825 }
826 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
827 {
828 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
829 gcc_assert (TREE_CODE (rhs) == MEM_REF);
830 op0 = TREE_OPERAND (rhs, 0);
831 op1 = TREE_OPERAND (rhs, 1);
832 }
833 else
834 gcc_unreachable ();
835
836 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
837 return false;
838
839 /* Handle PTR + OFFSET here. */
840 if (TREE_CODE (op1) == INTEGER_CST
841 && (TREE_CODE (op0) == SSA_NAME
842 || TREE_CODE (op0) == ADDR_EXPR))
843 {
844 if (! tree_fits_uhwi_p (op1))
845 bytes = unknown[object_size_type];
846 else if (TREE_CODE (op0) == SSA_NAME)
847 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
848 else
849 {
850 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
851
852 /* op0 will be ADDR_EXPR here. */
853 addr_object_size (osi, op0, object_size_type, &bytes);
854 if (bytes == unknown[object_size_type])
855 ;
856 else if (off > offset_limit)
857 bytes = unknown[object_size_type];
858 else if (off > bytes)
859 bytes = 0;
860 else
861 bytes -= off;
862 }
863 }
864 else
865 bytes = unknown[object_size_type];
866
867 if ((object_size_type & 2) == 0)
868 {
869 if (object_sizes[object_size_type][varno] < bytes)
870 object_sizes[object_size_type][varno] = bytes;
871 }
872 else
873 {
874 if (object_sizes[object_size_type][varno] > bytes)
875 object_sizes[object_size_type][varno] = bytes;
876 }
877 return false;
878 }
879
880
881 /* Compute object_sizes for VAR, defined at STMT, which is
882 a COND_EXPR. Return true if the object size might need reexamination
883 later. */
884
885 static bool
886 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
887 {
888 tree then_, else_;
889 int object_size_type = osi->object_size_type;
890 unsigned int varno = SSA_NAME_VERSION (var);
891 bool reexamine = false;
892
893 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
894
895 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
896 return false;
897
898 then_ = gimple_assign_rhs2 (stmt);
899 else_ = gimple_assign_rhs3 (stmt);
900
901 if (TREE_CODE (then_) == SSA_NAME)
902 reexamine |= merge_object_sizes (osi, var, then_, 0);
903 else
904 expr_object_size (osi, var, then_);
905
906 if (TREE_CODE (else_) == SSA_NAME)
907 reexamine |= merge_object_sizes (osi, var, else_, 0);
908 else
909 expr_object_size (osi, var, else_);
910
911 return reexamine;
912 }
913
914 /* Compute object sizes for VAR.
915 For ADDR_EXPR an object size is the number of remaining bytes
916 to the end of the object (where what is considered an object depends on
917 OSI->object_size_type).
918 For allocation GIMPLE_CALL like malloc or calloc object size is the size
919 of the allocation.
920 For POINTER_PLUS_EXPR where second operand is a constant integer,
921 object size is object size of the first operand minus the constant.
922 If the constant is bigger than the number of remaining bytes until the
923 end of the object, object size is 0, but if it is instead a pointer
924 subtraction, object size is unknown[object_size_type].
925 To differentiate addition from subtraction, ADDR_EXPR returns
926 unknown[object_size_type] for all objects bigger than half of the address
927 space, and constants less than half of the address space are considered
928 addition, while bigger constants subtraction.
929 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
930 object size is object size of that argument.
931 Otherwise, object size is the maximum of object sizes of variables
932 that it might be set to. */
933
934 static void
935 collect_object_sizes_for (struct object_size_info *osi, tree var)
936 {
937 int object_size_type = osi->object_size_type;
938 unsigned int varno = SSA_NAME_VERSION (var);
939 gimple *stmt;
940 bool reexamine;
941
942 if (bitmap_bit_p (computed[object_size_type], varno))
943 return;
944
945 if (osi->pass == 0)
946 {
947 if (bitmap_set_bit (osi->visited, varno))
948 {
949 object_sizes[object_size_type][varno]
950 = (object_size_type & 2) ? -1 : 0;
951 }
952 else
953 {
954 /* Found a dependency loop. Mark the variable for later
955 re-examination. */
956 bitmap_set_bit (osi->reexamine, varno);
957 if (dump_file && (dump_flags & TDF_DETAILS))
958 {
959 fprintf (dump_file, "Found a dependency loop at ");
960 print_generic_expr (dump_file, var, dump_flags);
961 fprintf (dump_file, "\n");
962 }
963 return;
964 }
965 }
966
967 if (dump_file && (dump_flags & TDF_DETAILS))
968 {
969 fprintf (dump_file, "Visiting use-def links for ");
970 print_generic_expr (dump_file, var, dump_flags);
971 fprintf (dump_file, "\n");
972 }
973
974 stmt = SSA_NAME_DEF_STMT (var);
975 reexamine = false;
976
977 switch (gimple_code (stmt))
978 {
979 case GIMPLE_ASSIGN:
980 {
981 tree rhs = gimple_assign_rhs1 (stmt);
982 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
983 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
984 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
985 reexamine = plus_stmt_object_size (osi, var, stmt);
986 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
987 reexamine = cond_expr_object_size (osi, var, stmt);
988 else if (gimple_assign_single_p (stmt)
989 || gimple_assign_unary_nop_p (stmt))
990 {
991 if (TREE_CODE (rhs) == SSA_NAME
992 && POINTER_TYPE_P (TREE_TYPE (rhs)))
993 reexamine = merge_object_sizes (osi, var, rhs, 0);
994 else
995 expr_object_size (osi, var, rhs);
996 }
997 else
998 unknown_object_size (osi, var);
999 break;
1000 }
1001
1002 case GIMPLE_CALL:
1003 {
1004 gcall *call_stmt = as_a <gcall *> (stmt);
1005 tree arg = pass_through_call (call_stmt);
1006 if (arg)
1007 {
1008 if (TREE_CODE (arg) == SSA_NAME
1009 && POINTER_TYPE_P (TREE_TYPE (arg)))
1010 reexamine = merge_object_sizes (osi, var, arg, 0);
1011 else
1012 expr_object_size (osi, var, arg);
1013 }
1014 else
1015 call_object_size (osi, var, call_stmt);
1016 break;
1017 }
1018
1019 case GIMPLE_ASM:
1020 /* Pointers defined by __asm__ statements can point anywhere. */
1021 object_sizes[object_size_type][varno] = unknown[object_size_type];
1022 break;
1023
1024 case GIMPLE_NOP:
1025 if (SSA_NAME_VAR (var)
1026 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1027 expr_object_size (osi, var, SSA_NAME_VAR (var));
1028 else
1029 /* Uninitialized SSA names point nowhere. */
1030 object_sizes[object_size_type][varno] = unknown[object_size_type];
1031 break;
1032
1033 case GIMPLE_PHI:
1034 {
1035 unsigned i;
1036
1037 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1038 {
1039 tree rhs = gimple_phi_arg (stmt, i)->def;
1040
1041 if (object_sizes[object_size_type][varno]
1042 == unknown[object_size_type])
1043 break;
1044
1045 if (TREE_CODE (rhs) == SSA_NAME)
1046 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1047 else if (osi->pass == 0)
1048 expr_object_size (osi, var, rhs);
1049 }
1050 break;
1051 }
1052
1053 default:
1054 gcc_unreachable ();
1055 }
1056
1057 if (! reexamine
1058 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1059 {
1060 bitmap_set_bit (computed[object_size_type], varno);
1061 bitmap_clear_bit (osi->reexamine, varno);
1062 }
1063 else
1064 {
1065 bitmap_set_bit (osi->reexamine, varno);
1066 if (dump_file && (dump_flags & TDF_DETAILS))
1067 {
1068 fprintf (dump_file, "Need to reexamine ");
1069 print_generic_expr (dump_file, var, dump_flags);
1070 fprintf (dump_file, "\n");
1071 }
1072 }
1073 }
1074
1075
1076 /* Helper function for check_for_plus_in_loops. Called recursively
1077 to detect loops. */
1078
1079 static void
1080 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1081 unsigned int depth)
1082 {
1083 gimple *stmt = SSA_NAME_DEF_STMT (var);
1084 unsigned int varno = SSA_NAME_VERSION (var);
1085
1086 if (osi->depths[varno])
1087 {
1088 if (osi->depths[varno] != depth)
1089 {
1090 unsigned int *sp;
1091
1092 /* Found a loop involving pointer addition. */
1093 for (sp = osi->tos; sp > osi->stack; )
1094 {
1095 --sp;
1096 bitmap_clear_bit (osi->reexamine, *sp);
1097 bitmap_set_bit (computed[osi->object_size_type], *sp);
1098 object_sizes[osi->object_size_type][*sp] = 0;
1099 if (*sp == varno)
1100 break;
1101 }
1102 }
1103 return;
1104 }
1105 else if (! bitmap_bit_p (osi->reexamine, varno))
1106 return;
1107
1108 osi->depths[varno] = depth;
1109 *osi->tos++ = varno;
1110
1111 switch (gimple_code (stmt))
1112 {
1113
1114 case GIMPLE_ASSIGN:
1115 {
1116 if ((gimple_assign_single_p (stmt)
1117 || gimple_assign_unary_nop_p (stmt))
1118 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1119 {
1120 tree rhs = gimple_assign_rhs1 (stmt);
1121
1122 check_for_plus_in_loops_1 (osi, rhs, depth);
1123 }
1124 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1125 {
1126 tree basevar = gimple_assign_rhs1 (stmt);
1127 tree cst = gimple_assign_rhs2 (stmt);
1128
1129 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1130
1131 check_for_plus_in_loops_1 (osi, basevar,
1132 depth + !integer_zerop (cst));
1133 }
1134 else
1135 gcc_unreachable ();
1136 break;
1137 }
1138
1139 case GIMPLE_CALL:
1140 {
1141 gcall *call_stmt = as_a <gcall *> (stmt);
1142 tree arg = pass_through_call (call_stmt);
1143 if (arg)
1144 {
1145 if (TREE_CODE (arg) == SSA_NAME)
1146 check_for_plus_in_loops_1 (osi, arg, depth);
1147 else
1148 gcc_unreachable ();
1149 }
1150 break;
1151 }
1152
1153 case GIMPLE_PHI:
1154 {
1155 unsigned i;
1156
1157 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1158 {
1159 tree rhs = gimple_phi_arg (stmt, i)->def;
1160
1161 if (TREE_CODE (rhs) == SSA_NAME)
1162 check_for_plus_in_loops_1 (osi, rhs, depth);
1163 }
1164 break;
1165 }
1166
1167 default:
1168 gcc_unreachable ();
1169 }
1170
1171 osi->depths[varno] = 0;
1172 osi->tos--;
1173 }
1174
1175
1176 /* Check if some pointer we are computing object size of is being increased
1177 within a loop. If yes, assume all the SSA variables participating in
1178 that loop have minimum object sizes 0. */
1179
1180 static void
1181 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1182 {
1183 gimple *stmt = SSA_NAME_DEF_STMT (var);
1184
1185 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1186 and looked for a POINTER_PLUS_EXPR in the pass-through
1187 argument, if any. In GIMPLE, however, such an expression
1188 is not a valid call operand. */
1189
1190 if (is_gimple_assign (stmt)
1191 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1192 {
1193 tree basevar = gimple_assign_rhs1 (stmt);
1194 tree cst = gimple_assign_rhs2 (stmt);
1195
1196 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1197
1198 if (integer_zerop (cst))
1199 return;
1200
1201 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1202 *osi->tos++ = SSA_NAME_VERSION (basevar);
1203 check_for_plus_in_loops_1 (osi, var, 2);
1204 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1205 osi->tos--;
1206 }
1207 }
1208
1209
1210 /* Initialize data structures for the object size computation. */
1211
1212 void
1213 init_object_sizes (void)
1214 {
1215 int object_size_type;
1216
1217 if (computed[0])
1218 return;
1219
1220 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1221 {
1222 object_sizes[object_size_type].safe_grow (num_ssa_names);
1223 computed[object_size_type] = BITMAP_ALLOC (NULL);
1224 }
1225
1226 init_offset_limit ();
1227 }
1228
1229
1230 /* Destroy data structures after the object size computation. */
1231
1232 void
1233 fini_object_sizes (void)
1234 {
1235 int object_size_type;
1236
1237 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1238 {
1239 object_sizes[object_size_type].release ();
1240 BITMAP_FREE (computed[object_size_type]);
1241 }
1242 }
1243
1244
1245 /* Simple pass to optimize all __builtin_object_size () builtins. */
1246
1247 namespace {
1248
1249 const pass_data pass_data_object_sizes =
1250 {
1251 GIMPLE_PASS, /* type */
1252 "objsz", /* name */
1253 OPTGROUP_NONE, /* optinfo_flags */
1254 TV_NONE, /* tv_id */
1255 ( PROP_cfg | PROP_ssa ), /* properties_required */
1256 0, /* properties_provided */
1257 0, /* properties_destroyed */
1258 0, /* todo_flags_start */
1259 0, /* todo_flags_finish */
1260 };
1261
1262 class pass_object_sizes : public gimple_opt_pass
1263 {
1264 public:
1265 pass_object_sizes (gcc::context *ctxt)
1266 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1267 {}
1268
1269 /* opt_pass methods: */
1270 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1271 void set_pass_param (unsigned int n, bool param)
1272 {
1273 gcc_assert (n == 0);
1274 insert_min_max_p = param;
1275 }
1276 virtual unsigned int execute (function *);
1277
1278 private:
1279 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1280 bool insert_min_max_p;
1281 }; // class pass_object_sizes
1282
1283 /* Dummy valueize function. */
1284
1285 static tree
1286 do_valueize (tree t)
1287 {
1288 return t;
1289 }
1290
1291 unsigned int
1292 pass_object_sizes::execute (function *fun)
1293 {
1294 basic_block bb;
1295 FOR_EACH_BB_FN (bb, fun)
1296 {
1297 gimple_stmt_iterator i;
1298 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1299 {
1300 tree result;
1301 gimple *call = gsi_stmt (i);
1302 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1303 continue;
1304
1305 init_object_sizes ();
1306
1307 /* If insert_min_max_p, only attempt to fold
1308 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1309 and rather than folding the builtin to the constant if any,
1310 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1311 call result and the computed constant. */
1312 if (insert_min_max_p)
1313 {
1314 tree ost = gimple_call_arg (call, 1);
1315 if (tree_fits_uhwi_p (ost))
1316 {
1317 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1318 tree ptr = gimple_call_arg (call, 0);
1319 tree lhs = gimple_call_lhs (call);
1320 if ((object_size_type == 1 || object_size_type == 3)
1321 && (TREE_CODE (ptr) == ADDR_EXPR
1322 || TREE_CODE (ptr) == SSA_NAME)
1323 && lhs)
1324 {
1325 tree type = TREE_TYPE (lhs);
1326 unsigned HOST_WIDE_INT bytes;
1327 if (compute_builtin_object_size (ptr, object_size_type,
1328 &bytes)
1329 && wi::fits_to_tree_p (bytes, type))
1330 {
1331 tree tem = make_ssa_name (type);
1332 gimple_call_set_lhs (call, tem);
1333 enum tree_code code
1334 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1335 tree cst = build_int_cstu (type, bytes);
1336 gimple *g
1337 = gimple_build_assign (lhs, code, tem, cst);
1338 gsi_insert_after (&i, g, GSI_NEW_STMT);
1339 update_stmt (call);
1340 }
1341 }
1342 }
1343 continue;
1344 }
1345
1346 tree lhs = gimple_call_lhs (call);
1347 if (!lhs)
1348 continue;
1349
1350 result = gimple_fold_stmt_to_constant (call, do_valueize);
1351 if (!result)
1352 {
1353 tree ost = gimple_call_arg (call, 1);
1354
1355 if (tree_fits_uhwi_p (ost))
1356 {
1357 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1358
1359 if (object_size_type < 2)
1360 result = fold_convert (size_type_node,
1361 integer_minus_one_node);
1362 else if (object_size_type < 4)
1363 result = build_zero_cst (size_type_node);
1364 }
1365
1366 if (!result)
1367 continue;
1368 }
1369
1370 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1371
1372 if (dump_file && (dump_flags & TDF_DETAILS))
1373 {
1374 fprintf (dump_file, "Simplified\n ");
1375 print_gimple_stmt (dump_file, call, 0, dump_flags);
1376 fprintf (dump_file, " to ");
1377 print_generic_expr (dump_file, result);
1378 fprintf (dump_file, "\n");
1379 }
1380
1381 /* Propagate into all uses and fold those stmts. */
1382 replace_uses_by (lhs, result);
1383 }
1384 }
1385
1386 fini_object_sizes ();
1387 return 0;
1388 }
1389
1390 } // anon namespace
1391
1392 gimple_opt_pass *
1393 make_pass_object_sizes (gcc::context *ctxt)
1394 {
1395 return new pass_object_sizes (ctxt);
1396 }