]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-object-size.c
c++: Adjust push_template_decl_real
[thirdparty/gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
37
38 struct object_size_info
39 {
40 int object_size_type;
41 unsigned char pass;
42 bool changed;
43 bitmap visited, reexamine;
44 unsigned int *depths;
45 unsigned int *stack, *tos;
46 };
47
48 static const unsigned HOST_WIDE_INT unknown[4] = {
49 HOST_WIDE_INT_M1U,
50 HOST_WIDE_INT_M1U,
51 0,
52 0
53 };
54
55 static tree compute_object_offset (const_tree, const_tree);
56 static bool addr_object_size (struct object_size_info *,
57 const_tree, int, unsigned HOST_WIDE_INT *,
58 tree * = NULL, tree * = NULL);
59 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
60 static tree pass_through_call (const gcall *);
61 static void collect_object_sizes_for (struct object_size_info *, tree);
62 static void expr_object_size (struct object_size_info *, tree, tree);
63 static bool merge_object_sizes (struct object_size_info *, tree, tree,
64 unsigned HOST_WIDE_INT);
65 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
66 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
67 static void init_offset_limit (void);
68 static void check_for_plus_in_loops (struct object_size_info *, tree);
69 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
70 unsigned int);
71
72 /* object_sizes[0] is upper bound for number of bytes till the end of
73 the object.
74 object_sizes[1] is upper bound for number of bytes till the end of
75 the subobject (innermost array or field with address taken).
76 object_sizes[2] is lower bound for number of bytes till the end of
77 the object and object_sizes[3] lower bound for subobject. */
78 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
79
80 /* Bitmaps what object sizes have been computed already. */
81 static bitmap computed[4];
82
83 /* Maximum value of offset we consider to be addition. */
84 static unsigned HOST_WIDE_INT offset_limit;
85
86
87 /* Initialize OFFSET_LIMIT variable. */
88 static void
89 init_offset_limit (void)
90 {
91 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
92 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
93 else
94 offset_limit = -1;
95 offset_limit /= 2;
96 }
97
98
99 /* Compute offset of EXPR within VAR. Return error_mark_node
100 if unknown. */
101
102 static tree
103 compute_object_offset (const_tree expr, const_tree var)
104 {
105 enum tree_code code = PLUS_EXPR;
106 tree base, off, t;
107
108 if (expr == var)
109 return size_zero_node;
110
111 switch (TREE_CODE (expr))
112 {
113 case COMPONENT_REF:
114 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
115 if (base == error_mark_node)
116 return base;
117
118 t = TREE_OPERAND (expr, 1);
119 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
120 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
121 / BITS_PER_UNIT));
122 break;
123
124 case REALPART_EXPR:
125 CASE_CONVERT:
126 case VIEW_CONVERT_EXPR:
127 case NON_LVALUE_EXPR:
128 return compute_object_offset (TREE_OPERAND (expr, 0), var);
129
130 case IMAGPART_EXPR:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
134
135 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
136 break;
137
138 case ARRAY_REF:
139 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
140 if (base == error_mark_node)
141 return base;
142
143 t = TREE_OPERAND (expr, 1);
144 tree low_bound, unit_size;
145 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
146 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
147 if (! integer_zerop (low_bound))
148 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
149 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
150 {
151 code = MINUS_EXPR;
152 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
153 }
154 t = fold_convert (sizetype, t);
155 off = size_binop (MULT_EXPR, unit_size, t);
156 break;
157
158 case MEM_REF:
159 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
160 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
161
162 default:
163 return error_mark_node;
164 }
165
166 return size_binop (code, base, off);
167 }
168
169
170 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
171 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
172 If unknown, return unknown[object_size_type]. */
173
174 static bool
175 addr_object_size (struct object_size_info *osi, const_tree ptr,
176 int object_size_type, unsigned HOST_WIDE_INT *psize,
177 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
178 {
179 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
180
181 tree dummy_decl, dummy_off = size_zero_node;
182 if (!pdecl)
183 pdecl = &dummy_decl;
184 if (!poff)
185 poff = &dummy_off;
186
187 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
188
189 /* Set to unknown and overwrite just before returning if the size
190 could be determined. */
191 *psize = unknown[object_size_type];
192
193 pt_var = TREE_OPERAND (ptr, 0);
194 while (handled_component_p (pt_var))
195 pt_var = TREE_OPERAND (pt_var, 0);
196
197 if (pt_var
198 && TREE_CODE (pt_var) == MEM_REF)
199 {
200 unsigned HOST_WIDE_INT sz;
201
202 if (!osi || (object_size_type & 1) != 0
203 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
204 {
205 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
206 object_size_type & ~1, &sz, pdecl, poff);
207 }
208 else
209 {
210 tree var = TREE_OPERAND (pt_var, 0);
211 if (osi->pass == 0)
212 collect_object_sizes_for (osi, var);
213 if (bitmap_bit_p (computed[object_size_type],
214 SSA_NAME_VERSION (var)))
215 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
216 else
217 sz = unknown[object_size_type];
218 }
219 if (sz != unknown[object_size_type])
220 {
221 offset_int mem_offset;
222 if (mem_ref_offset (pt_var).is_constant (&mem_offset))
223 {
224 offset_int dsz = wi::sub (sz, mem_offset);
225 if (wi::neg_p (dsz))
226 sz = 0;
227 else if (wi::fits_uhwi_p (dsz))
228 sz = dsz.to_uhwi ();
229 else
230 sz = unknown[object_size_type];
231 }
232 else
233 sz = unknown[object_size_type];
234 }
235
236 if (sz != unknown[object_size_type] && sz < offset_limit)
237 pt_var_size = size_int (sz);
238 }
239 else if (pt_var
240 && DECL_P (pt_var)
241 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
242 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
243 {
244 *pdecl = pt_var;
245 pt_var_size = DECL_SIZE_UNIT (pt_var);
246 }
247 else if (pt_var
248 && TREE_CODE (pt_var) == STRING_CST
249 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
250 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
251 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
252 < offset_limit)
253 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
254 else
255 return false;
256
257 if (pt_var != TREE_OPERAND (ptr, 0))
258 {
259 tree var;
260
261 if (object_size_type & 1)
262 {
263 var = TREE_OPERAND (ptr, 0);
264
265 while (var != pt_var
266 && TREE_CODE (var) != BIT_FIELD_REF
267 && TREE_CODE (var) != COMPONENT_REF
268 && TREE_CODE (var) != ARRAY_REF
269 && TREE_CODE (var) != ARRAY_RANGE_REF
270 && TREE_CODE (var) != REALPART_EXPR
271 && TREE_CODE (var) != IMAGPART_EXPR)
272 var = TREE_OPERAND (var, 0);
273 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
274 var = TREE_OPERAND (var, 0);
275 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
276 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
277 || (pt_var_size
278 && tree_int_cst_lt (pt_var_size,
279 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
280 var = pt_var;
281 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
282 {
283 tree v = var;
284 /* For &X->fld, compute object size only if fld isn't the last
285 field, as struct { int i; char c[1]; } is often used instead
286 of flexible array member. */
287 while (v && v != pt_var)
288 switch (TREE_CODE (v))
289 {
290 case ARRAY_REF:
291 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
292 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
293 {
294 tree domain
295 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
296 if (domain
297 && TYPE_MAX_VALUE (domain)
298 && TREE_CODE (TYPE_MAX_VALUE (domain))
299 == INTEGER_CST
300 && tree_int_cst_lt (TREE_OPERAND (v, 1),
301 TYPE_MAX_VALUE (domain)))
302 {
303 v = NULL_TREE;
304 break;
305 }
306 }
307 v = TREE_OPERAND (v, 0);
308 break;
309 case REALPART_EXPR:
310 case IMAGPART_EXPR:
311 v = NULL_TREE;
312 break;
313 case COMPONENT_REF:
314 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
315 {
316 v = NULL_TREE;
317 break;
318 }
319 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
320 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
321 != UNION_TYPE
322 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
323 != QUAL_UNION_TYPE)
324 break;
325 else
326 v = TREE_OPERAND (v, 0);
327 if (TREE_CODE (v) == COMPONENT_REF
328 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
329 == RECORD_TYPE)
330 {
331 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
332 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
333 if (TREE_CODE (fld_chain) == FIELD_DECL)
334 break;
335
336 if (fld_chain)
337 {
338 v = NULL_TREE;
339 break;
340 }
341 v = TREE_OPERAND (v, 0);
342 }
343 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
344 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
345 != UNION_TYPE
346 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
347 != QUAL_UNION_TYPE)
348 break;
349 else
350 v = TREE_OPERAND (v, 0);
351 if (v != pt_var)
352 v = NULL_TREE;
353 else
354 v = pt_var;
355 break;
356 default:
357 v = pt_var;
358 break;
359 }
360 if (v == pt_var)
361 var = pt_var;
362 }
363 }
364 else
365 var = pt_var;
366
367 if (var != pt_var)
368 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
369 else if (!pt_var_size)
370 return false;
371 else
372 var_size = pt_var_size;
373 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
374 if (bytes != error_mark_node)
375 {
376 if (TREE_CODE (bytes) == INTEGER_CST
377 && tree_int_cst_lt (var_size, bytes))
378 bytes = size_zero_node;
379 else
380 bytes = size_binop (MINUS_EXPR, var_size, bytes);
381 *poff = bytes;
382 }
383 if (var != pt_var
384 && pt_var_size
385 && TREE_CODE (pt_var) == MEM_REF
386 && bytes != error_mark_node)
387 {
388 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
389 if (bytes2 != error_mark_node)
390 {
391 if (TREE_CODE (bytes2) == INTEGER_CST
392 && tree_int_cst_lt (pt_var_size, bytes2))
393 bytes2 = size_zero_node;
394 else
395 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
396 *poff = size_binop (PLUS_EXPR, *poff, bytes2);
397 bytes = size_binop (MIN_EXPR, bytes, bytes2);
398 }
399 }
400 }
401 else if (!pt_var_size)
402 return false;
403 else
404 bytes = pt_var_size;
405
406 if (tree_fits_uhwi_p (bytes))
407 {
408 *psize = tree_to_uhwi (bytes);
409 return true;
410 }
411
412 return false;
413 }
414
415
416 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
417 Handles calls to functions declared with attribute alloc_size.
418 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
419 If unknown, return unknown[object_size_type]. */
420
421 static unsigned HOST_WIDE_INT
422 alloc_object_size (const gcall *call, int object_size_type)
423 {
424 gcc_assert (is_gimple_call (call));
425
426 tree calltype;
427 if (tree callfn = gimple_call_fndecl (call))
428 calltype = TREE_TYPE (callfn);
429 else
430 calltype = gimple_call_fntype (call);
431
432 if (!calltype)
433 return unknown[object_size_type];
434
435 /* Set to positions of alloc_size arguments. */
436 int arg1 = -1, arg2 = -1;
437 tree alloc_size = lookup_attribute ("alloc_size",
438 TYPE_ATTRIBUTES (calltype));
439 if (alloc_size && TREE_VALUE (alloc_size))
440 {
441 tree p = TREE_VALUE (alloc_size);
442
443 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
444 if (TREE_CHAIN (p))
445 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
446 }
447
448 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
449 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
450 || (arg2 >= 0
451 && (arg2 >= (int)gimple_call_num_args (call)
452 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
453 return unknown[object_size_type];
454
455 tree bytes = NULL_TREE;
456 if (arg2 >= 0)
457 bytes = size_binop (MULT_EXPR,
458 fold_convert (sizetype, gimple_call_arg (call, arg1)),
459 fold_convert (sizetype, gimple_call_arg (call, arg2)));
460 else if (arg1 >= 0)
461 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
462
463 if (bytes && tree_fits_uhwi_p (bytes))
464 return tree_to_uhwi (bytes);
465
466 return unknown[object_size_type];
467 }
468
469
470 /* If object size is propagated from one of function's arguments directly
471 to its return value, return that argument for GIMPLE_CALL statement CALL.
472 Otherwise return NULL. */
473
474 static tree
475 pass_through_call (const gcall *call)
476 {
477 unsigned rf = gimple_call_return_flags (call);
478 if (rf & ERF_RETURNS_ARG)
479 {
480 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
481 if (argnum < gimple_call_num_args (call))
482 return gimple_call_arg (call, argnum);
483 }
484
485 /* __builtin_assume_aligned is intentionally not marked RET1. */
486 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
487 return gimple_call_arg (call, 0);
488
489 return NULL_TREE;
490 }
491
492
493 /* Compute __builtin_object_size value for PTR and set *PSIZE to
494 the resulting value. If the declared object is known and PDECL
495 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
496 is the second argument to __builtin_object_size.
497 Returns true on success and false when the object size could not
498 be determined. */
499
500 bool
501 compute_builtin_object_size (tree ptr, int object_size_type,
502 unsigned HOST_WIDE_INT *psize,
503 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
504 {
505 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
506
507 tree dummy_decl, dummy_off = size_zero_node;
508 if (!pdecl)
509 pdecl = &dummy_decl;
510 if (!poff)
511 poff = &dummy_off;
512
513 /* Set to unknown and overwrite just before returning if the size
514 could be determined. */
515 *psize = unknown[object_size_type];
516
517 if (! offset_limit)
518 init_offset_limit ();
519
520 if (TREE_CODE (ptr) == ADDR_EXPR)
521 return addr_object_size (NULL, ptr, object_size_type, psize, pdecl, poff);
522
523 if (TREE_CODE (ptr) != SSA_NAME
524 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
525 return false;
526
527 if (computed[object_size_type] == NULL)
528 {
529 if (optimize || object_size_type & 1)
530 return false;
531
532 /* When not optimizing, rather than failing, make a small effort
533 to determine the object size without the full benefit of
534 the (costly) computation below. */
535 gimple *def = SSA_NAME_DEF_STMT (ptr);
536 if (gimple_code (def) == GIMPLE_ASSIGN)
537 {
538 tree_code code = gimple_assign_rhs_code (def);
539 if (code == POINTER_PLUS_EXPR)
540 {
541 tree offset = gimple_assign_rhs2 (def);
542 ptr = gimple_assign_rhs1 (def);
543
544 if (tree_fits_shwi_p (offset)
545 && compute_builtin_object_size (ptr, object_size_type,
546 psize, pdecl, poff))
547 {
548 /* Return zero when the offset is out of bounds. */
549 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
550 *psize = off < *psize ? *psize - off : 0;
551 *poff = offset;
552 return true;
553 }
554 }
555 }
556 return false;
557 }
558
559 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
560 {
561 struct object_size_info osi;
562 bitmap_iterator bi;
563 unsigned int i;
564
565 if (num_ssa_names > object_sizes[object_size_type].length ())
566 object_sizes[object_size_type].safe_grow (num_ssa_names);
567 if (dump_file)
568 {
569 fprintf (dump_file, "Computing %s %sobject size for ",
570 (object_size_type & 2) ? "minimum" : "maximum",
571 (object_size_type & 1) ? "sub" : "");
572 print_generic_expr (dump_file, ptr, dump_flags);
573 fprintf (dump_file, ":\n");
574 }
575
576 osi.visited = BITMAP_ALLOC (NULL);
577 osi.reexamine = BITMAP_ALLOC (NULL);
578 osi.object_size_type = object_size_type;
579 osi.depths = NULL;
580 osi.stack = NULL;
581 osi.tos = NULL;
582
583 /* First pass: walk UD chains, compute object sizes that
584 can be computed. osi.reexamine bitmap at the end will
585 contain what variables were found in dependency cycles
586 and therefore need to be reexamined. */
587 osi.pass = 0;
588 osi.changed = false;
589 collect_object_sizes_for (&osi, ptr);
590
591 /* Second pass: keep recomputing object sizes of variables
592 that need reexamination, until no object sizes are
593 increased or all object sizes are computed. */
594 if (! bitmap_empty_p (osi.reexamine))
595 {
596 bitmap reexamine = BITMAP_ALLOC (NULL);
597
598 /* If looking for minimum instead of maximum object size,
599 detect cases where a pointer is increased in a loop.
600 Although even without this detection pass 2 would eventually
601 terminate, it could take a long time. If a pointer is
602 increasing this way, we need to assume 0 object size.
603 E.g. p = &buf[0]; while (cond) p = p + 4; */
604 if (object_size_type & 2)
605 {
606 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
607 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
608 osi.tos = osi.stack;
609 osi.pass = 1;
610 /* collect_object_sizes_for is changing
611 osi.reexamine bitmap, so iterate over a copy. */
612 bitmap_copy (reexamine, osi.reexamine);
613 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
614 if (bitmap_bit_p (osi.reexamine, i))
615 check_for_plus_in_loops (&osi, ssa_name (i));
616
617 free (osi.depths);
618 osi.depths = NULL;
619 free (osi.stack);
620 osi.stack = NULL;
621 osi.tos = NULL;
622 }
623
624 do
625 {
626 osi.pass = 2;
627 osi.changed = false;
628 /* collect_object_sizes_for is changing
629 osi.reexamine bitmap, so iterate over a copy. */
630 bitmap_copy (reexamine, osi.reexamine);
631 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
632 if (bitmap_bit_p (osi.reexamine, i))
633 {
634 collect_object_sizes_for (&osi, ssa_name (i));
635 if (dump_file && (dump_flags & TDF_DETAILS))
636 {
637 fprintf (dump_file, "Reexamining ");
638 print_generic_expr (dump_file, ssa_name (i),
639 dump_flags);
640 fprintf (dump_file, "\n");
641 }
642 }
643 }
644 while (osi.changed);
645
646 BITMAP_FREE (reexamine);
647 }
648 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
649 bitmap_set_bit (computed[object_size_type], i);
650
651 /* Debugging dumps. */
652 if (dump_file)
653 {
654 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
655 if (object_sizes[object_size_type][i]
656 != unknown[object_size_type])
657 {
658 print_generic_expr (dump_file, ssa_name (i),
659 dump_flags);
660 fprintf (dump_file,
661 ": %s %sobject size "
662 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
663 (object_size_type & 2) ? "minimum" : "maximum",
664 (object_size_type & 1) ? "sub" : "",
665 object_sizes[object_size_type][i]);
666 }
667 }
668
669 BITMAP_FREE (osi.reexamine);
670 BITMAP_FREE (osi.visited);
671 }
672
673 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
674 return *psize != unknown[object_size_type];
675 }
676
677 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
678
679 static void
680 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
681 {
682 int object_size_type = osi->object_size_type;
683 unsigned int varno = SSA_NAME_VERSION (ptr);
684 unsigned HOST_WIDE_INT bytes;
685
686 gcc_assert (object_sizes[object_size_type][varno]
687 != unknown[object_size_type]);
688 gcc_assert (osi->pass == 0);
689
690 if (TREE_CODE (value) == WITH_SIZE_EXPR)
691 value = TREE_OPERAND (value, 0);
692
693 /* Pointer variables should have been handled by merge_object_sizes. */
694 gcc_assert (TREE_CODE (value) != SSA_NAME
695 || !POINTER_TYPE_P (TREE_TYPE (value)));
696
697 if (TREE_CODE (value) == ADDR_EXPR)
698 addr_object_size (osi, value, object_size_type, &bytes);
699 else
700 bytes = unknown[object_size_type];
701
702 if ((object_size_type & 2) == 0)
703 {
704 if (object_sizes[object_size_type][varno] < bytes)
705 object_sizes[object_size_type][varno] = bytes;
706 }
707 else
708 {
709 if (object_sizes[object_size_type][varno] > bytes)
710 object_sizes[object_size_type][varno] = bytes;
711 }
712 }
713
714
715 /* Compute object_sizes for PTR, defined to the result of a call. */
716
717 static void
718 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
719 {
720 int object_size_type = osi->object_size_type;
721 unsigned int varno = SSA_NAME_VERSION (ptr);
722 unsigned HOST_WIDE_INT bytes;
723
724 gcc_assert (is_gimple_call (call));
725
726 gcc_assert (object_sizes[object_size_type][varno]
727 != unknown[object_size_type]);
728 gcc_assert (osi->pass == 0);
729
730 bytes = alloc_object_size (call, object_size_type);
731
732 if ((object_size_type & 2) == 0)
733 {
734 if (object_sizes[object_size_type][varno] < bytes)
735 object_sizes[object_size_type][varno] = bytes;
736 }
737 else
738 {
739 if (object_sizes[object_size_type][varno] > bytes)
740 object_sizes[object_size_type][varno] = bytes;
741 }
742 }
743
744
745 /* Compute object_sizes for PTR, defined to an unknown value. */
746
747 static void
748 unknown_object_size (struct object_size_info *osi, tree ptr)
749 {
750 int object_size_type = osi->object_size_type;
751 unsigned int varno = SSA_NAME_VERSION (ptr);
752 unsigned HOST_WIDE_INT bytes;
753
754 gcc_assert (object_sizes[object_size_type][varno]
755 != unknown[object_size_type]);
756 gcc_assert (osi->pass == 0);
757
758 bytes = unknown[object_size_type];
759
760 if ((object_size_type & 2) == 0)
761 {
762 if (object_sizes[object_size_type][varno] < bytes)
763 object_sizes[object_size_type][varno] = bytes;
764 }
765 else
766 {
767 if (object_sizes[object_size_type][varno] > bytes)
768 object_sizes[object_size_type][varno] = bytes;
769 }
770 }
771
772
773 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
774 the object size might need reexamination later. */
775
776 static bool
777 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
778 unsigned HOST_WIDE_INT offset)
779 {
780 int object_size_type = osi->object_size_type;
781 unsigned int varno = SSA_NAME_VERSION (dest);
782 unsigned HOST_WIDE_INT orig_bytes;
783
784 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
785 return false;
786 if (offset >= offset_limit)
787 {
788 object_sizes[object_size_type][varno] = unknown[object_size_type];
789 return false;
790 }
791
792 if (osi->pass == 0)
793 collect_object_sizes_for (osi, orig);
794
795 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
796 if (orig_bytes != unknown[object_size_type])
797 orig_bytes = (offset > orig_bytes)
798 ? HOST_WIDE_INT_0U : orig_bytes - offset;
799
800 if ((object_size_type & 2) == 0)
801 {
802 if (object_sizes[object_size_type][varno] < orig_bytes)
803 {
804 object_sizes[object_size_type][varno] = orig_bytes;
805 osi->changed = true;
806 }
807 }
808 else
809 {
810 if (object_sizes[object_size_type][varno] > orig_bytes)
811 {
812 object_sizes[object_size_type][varno] = orig_bytes;
813 osi->changed = true;
814 }
815 }
816 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
817 }
818
819
820 /* Compute object_sizes for VAR, defined to the result of an assignment
821 with operator POINTER_PLUS_EXPR. Return true if the object size might
822 need reexamination later. */
823
824 static bool
825 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
826 {
827 int object_size_type = osi->object_size_type;
828 unsigned int varno = SSA_NAME_VERSION (var);
829 unsigned HOST_WIDE_INT bytes;
830 tree op0, op1;
831
832 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
833 {
834 op0 = gimple_assign_rhs1 (stmt);
835 op1 = gimple_assign_rhs2 (stmt);
836 }
837 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
838 {
839 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
840 gcc_assert (TREE_CODE (rhs) == MEM_REF);
841 op0 = TREE_OPERAND (rhs, 0);
842 op1 = TREE_OPERAND (rhs, 1);
843 }
844 else
845 gcc_unreachable ();
846
847 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
848 return false;
849
850 /* Handle PTR + OFFSET here. */
851 if (TREE_CODE (op1) == INTEGER_CST
852 && (TREE_CODE (op0) == SSA_NAME
853 || TREE_CODE (op0) == ADDR_EXPR))
854 {
855 if (! tree_fits_uhwi_p (op1))
856 bytes = unknown[object_size_type];
857 else if (TREE_CODE (op0) == SSA_NAME)
858 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
859 else
860 {
861 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
862
863 /* op0 will be ADDR_EXPR here. */
864 addr_object_size (osi, op0, object_size_type, &bytes);
865 if (bytes == unknown[object_size_type])
866 ;
867 else if (off > offset_limit)
868 bytes = unknown[object_size_type];
869 else if (off > bytes)
870 bytes = 0;
871 else
872 bytes -= off;
873 }
874 }
875 else
876 bytes = unknown[object_size_type];
877
878 if ((object_size_type & 2) == 0)
879 {
880 if (object_sizes[object_size_type][varno] < bytes)
881 object_sizes[object_size_type][varno] = bytes;
882 }
883 else
884 {
885 if (object_sizes[object_size_type][varno] > bytes)
886 object_sizes[object_size_type][varno] = bytes;
887 }
888 return false;
889 }
890
891
892 /* Compute object_sizes for VAR, defined at STMT, which is
893 a COND_EXPR. Return true if the object size might need reexamination
894 later. */
895
896 static bool
897 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
898 {
899 tree then_, else_;
900 int object_size_type = osi->object_size_type;
901 unsigned int varno = SSA_NAME_VERSION (var);
902 bool reexamine = false;
903
904 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
905
906 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
907 return false;
908
909 then_ = gimple_assign_rhs2 (stmt);
910 else_ = gimple_assign_rhs3 (stmt);
911
912 if (TREE_CODE (then_) == SSA_NAME)
913 reexamine |= merge_object_sizes (osi, var, then_, 0);
914 else
915 expr_object_size (osi, var, then_);
916
917 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
918 return reexamine;
919
920 if (TREE_CODE (else_) == SSA_NAME)
921 reexamine |= merge_object_sizes (osi, var, else_, 0);
922 else
923 expr_object_size (osi, var, else_);
924
925 return reexamine;
926 }
927
928 /* Compute object sizes for VAR.
929 For ADDR_EXPR an object size is the number of remaining bytes
930 to the end of the object (where what is considered an object depends on
931 OSI->object_size_type).
932 For allocation GIMPLE_CALL like malloc or calloc object size is the size
933 of the allocation.
934 For POINTER_PLUS_EXPR where second operand is a constant integer,
935 object size is object size of the first operand minus the constant.
936 If the constant is bigger than the number of remaining bytes until the
937 end of the object, object size is 0, but if it is instead a pointer
938 subtraction, object size is unknown[object_size_type].
939 To differentiate addition from subtraction, ADDR_EXPR returns
940 unknown[object_size_type] for all objects bigger than half of the address
941 space, and constants less than half of the address space are considered
942 addition, while bigger constants subtraction.
943 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
944 object size is object size of that argument.
945 Otherwise, object size is the maximum of object sizes of variables
946 that it might be set to. */
947
948 static void
949 collect_object_sizes_for (struct object_size_info *osi, tree var)
950 {
951 int object_size_type = osi->object_size_type;
952 unsigned int varno = SSA_NAME_VERSION (var);
953 gimple *stmt;
954 bool reexamine;
955
956 if (bitmap_bit_p (computed[object_size_type], varno))
957 return;
958
959 if (osi->pass == 0)
960 {
961 if (bitmap_set_bit (osi->visited, varno))
962 {
963 object_sizes[object_size_type][varno]
964 = (object_size_type & 2) ? -1 : 0;
965 }
966 else
967 {
968 /* Found a dependency loop. Mark the variable for later
969 re-examination. */
970 bitmap_set_bit (osi->reexamine, varno);
971 if (dump_file && (dump_flags & TDF_DETAILS))
972 {
973 fprintf (dump_file, "Found a dependency loop at ");
974 print_generic_expr (dump_file, var, dump_flags);
975 fprintf (dump_file, "\n");
976 }
977 return;
978 }
979 }
980
981 if (dump_file && (dump_flags & TDF_DETAILS))
982 {
983 fprintf (dump_file, "Visiting use-def links for ");
984 print_generic_expr (dump_file, var, dump_flags);
985 fprintf (dump_file, "\n");
986 }
987
988 stmt = SSA_NAME_DEF_STMT (var);
989 reexamine = false;
990
991 switch (gimple_code (stmt))
992 {
993 case GIMPLE_ASSIGN:
994 {
995 tree rhs = gimple_assign_rhs1 (stmt);
996 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
997 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
998 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
999 reexamine = plus_stmt_object_size (osi, var, stmt);
1000 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1001 reexamine = cond_expr_object_size (osi, var, stmt);
1002 else if (gimple_assign_single_p (stmt)
1003 || gimple_assign_unary_nop_p (stmt))
1004 {
1005 if (TREE_CODE (rhs) == SSA_NAME
1006 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1007 reexamine = merge_object_sizes (osi, var, rhs, 0);
1008 else
1009 expr_object_size (osi, var, rhs);
1010 }
1011 else
1012 unknown_object_size (osi, var);
1013 break;
1014 }
1015
1016 case GIMPLE_CALL:
1017 {
1018 gcall *call_stmt = as_a <gcall *> (stmt);
1019 tree arg = pass_through_call (call_stmt);
1020 if (arg)
1021 {
1022 if (TREE_CODE (arg) == SSA_NAME
1023 && POINTER_TYPE_P (TREE_TYPE (arg)))
1024 reexamine = merge_object_sizes (osi, var, arg, 0);
1025 else
1026 expr_object_size (osi, var, arg);
1027 }
1028 else
1029 call_object_size (osi, var, call_stmt);
1030 break;
1031 }
1032
1033 case GIMPLE_ASM:
1034 /* Pointers defined by __asm__ statements can point anywhere. */
1035 object_sizes[object_size_type][varno] = unknown[object_size_type];
1036 break;
1037
1038 case GIMPLE_NOP:
1039 if (SSA_NAME_VAR (var)
1040 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1041 expr_object_size (osi, var, SSA_NAME_VAR (var));
1042 else
1043 /* Uninitialized SSA names point nowhere. */
1044 object_sizes[object_size_type][varno] = unknown[object_size_type];
1045 break;
1046
1047 case GIMPLE_PHI:
1048 {
1049 unsigned i;
1050
1051 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1052 {
1053 tree rhs = gimple_phi_arg (stmt, i)->def;
1054
1055 if (object_sizes[object_size_type][varno]
1056 == unknown[object_size_type])
1057 break;
1058
1059 if (TREE_CODE (rhs) == SSA_NAME)
1060 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1061 else if (osi->pass == 0)
1062 expr_object_size (osi, var, rhs);
1063 }
1064 break;
1065 }
1066
1067 default:
1068 gcc_unreachable ();
1069 }
1070
1071 if (! reexamine
1072 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1073 {
1074 bitmap_set_bit (computed[object_size_type], varno);
1075 bitmap_clear_bit (osi->reexamine, varno);
1076 }
1077 else
1078 {
1079 bitmap_set_bit (osi->reexamine, varno);
1080 if (dump_file && (dump_flags & TDF_DETAILS))
1081 {
1082 fprintf (dump_file, "Need to reexamine ");
1083 print_generic_expr (dump_file, var, dump_flags);
1084 fprintf (dump_file, "\n");
1085 }
1086 }
1087 }
1088
1089
1090 /* Helper function for check_for_plus_in_loops. Called recursively
1091 to detect loops. */
1092
1093 static void
1094 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1095 unsigned int depth)
1096 {
1097 gimple *stmt = SSA_NAME_DEF_STMT (var);
1098 unsigned int varno = SSA_NAME_VERSION (var);
1099
1100 if (osi->depths[varno])
1101 {
1102 if (osi->depths[varno] != depth)
1103 {
1104 unsigned int *sp;
1105
1106 /* Found a loop involving pointer addition. */
1107 for (sp = osi->tos; sp > osi->stack; )
1108 {
1109 --sp;
1110 bitmap_clear_bit (osi->reexamine, *sp);
1111 bitmap_set_bit (computed[osi->object_size_type], *sp);
1112 object_sizes[osi->object_size_type][*sp] = 0;
1113 if (*sp == varno)
1114 break;
1115 }
1116 }
1117 return;
1118 }
1119 else if (! bitmap_bit_p (osi->reexamine, varno))
1120 return;
1121
1122 osi->depths[varno] = depth;
1123 *osi->tos++ = varno;
1124
1125 switch (gimple_code (stmt))
1126 {
1127
1128 case GIMPLE_ASSIGN:
1129 {
1130 if ((gimple_assign_single_p (stmt)
1131 || gimple_assign_unary_nop_p (stmt))
1132 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1133 {
1134 tree rhs = gimple_assign_rhs1 (stmt);
1135
1136 check_for_plus_in_loops_1 (osi, rhs, depth);
1137 }
1138 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1139 {
1140 tree basevar = gimple_assign_rhs1 (stmt);
1141 tree cst = gimple_assign_rhs2 (stmt);
1142
1143 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1144
1145 check_for_plus_in_loops_1 (osi, basevar,
1146 depth + !integer_zerop (cst));
1147 }
1148 else
1149 gcc_unreachable ();
1150 break;
1151 }
1152
1153 case GIMPLE_CALL:
1154 {
1155 gcall *call_stmt = as_a <gcall *> (stmt);
1156 tree arg = pass_through_call (call_stmt);
1157 if (arg)
1158 {
1159 if (TREE_CODE (arg) == SSA_NAME)
1160 check_for_plus_in_loops_1 (osi, arg, depth);
1161 else
1162 gcc_unreachable ();
1163 }
1164 break;
1165 }
1166
1167 case GIMPLE_PHI:
1168 {
1169 unsigned i;
1170
1171 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1172 {
1173 tree rhs = gimple_phi_arg (stmt, i)->def;
1174
1175 if (TREE_CODE (rhs) == SSA_NAME)
1176 check_for_plus_in_loops_1 (osi, rhs, depth);
1177 }
1178 break;
1179 }
1180
1181 default:
1182 gcc_unreachable ();
1183 }
1184
1185 osi->depths[varno] = 0;
1186 osi->tos--;
1187 }
1188
1189
1190 /* Check if some pointer we are computing object size of is being increased
1191 within a loop. If yes, assume all the SSA variables participating in
1192 that loop have minimum object sizes 0. */
1193
1194 static void
1195 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1196 {
1197 gimple *stmt = SSA_NAME_DEF_STMT (var);
1198
1199 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1200 and looked for a POINTER_PLUS_EXPR in the pass-through
1201 argument, if any. In GIMPLE, however, such an expression
1202 is not a valid call operand. */
1203
1204 if (is_gimple_assign (stmt)
1205 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1206 {
1207 tree basevar = gimple_assign_rhs1 (stmt);
1208 tree cst = gimple_assign_rhs2 (stmt);
1209
1210 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1211
1212 if (integer_zerop (cst))
1213 return;
1214
1215 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1216 *osi->tos++ = SSA_NAME_VERSION (basevar);
1217 check_for_plus_in_loops_1 (osi, var, 2);
1218 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1219 osi->tos--;
1220 }
1221 }
1222
1223
1224 /* Initialize data structures for the object size computation. */
1225
1226 void
1227 init_object_sizes (void)
1228 {
1229 int object_size_type;
1230
1231 if (computed[0])
1232 return;
1233
1234 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1235 {
1236 object_sizes[object_size_type].safe_grow (num_ssa_names);
1237 computed[object_size_type] = BITMAP_ALLOC (NULL);
1238 }
1239
1240 init_offset_limit ();
1241 }
1242
1243
1244 /* Destroy data structures after the object size computation. */
1245
1246 void
1247 fini_object_sizes (void)
1248 {
1249 int object_size_type;
1250
1251 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1252 {
1253 object_sizes[object_size_type].release ();
1254 BITMAP_FREE (computed[object_size_type]);
1255 }
1256 }
1257
1258
1259 /* Simple pass to optimize all __builtin_object_size () builtins. */
1260
1261 namespace {
1262
1263 const pass_data pass_data_object_sizes =
1264 {
1265 GIMPLE_PASS, /* type */
1266 "objsz", /* name */
1267 OPTGROUP_NONE, /* optinfo_flags */
1268 TV_NONE, /* tv_id */
1269 ( PROP_cfg | PROP_ssa ), /* properties_required */
1270 0, /* properties_provided */
1271 0, /* properties_destroyed */
1272 0, /* todo_flags_start */
1273 0, /* todo_flags_finish */
1274 };
1275
1276 class pass_object_sizes : public gimple_opt_pass
1277 {
1278 public:
1279 pass_object_sizes (gcc::context *ctxt)
1280 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1281 {}
1282
1283 /* opt_pass methods: */
1284 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1285 void set_pass_param (unsigned int n, bool param)
1286 {
1287 gcc_assert (n == 0);
1288 insert_min_max_p = param;
1289 }
1290 virtual unsigned int execute (function *);
1291
1292 private:
1293 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1294 bool insert_min_max_p;
1295 }; // class pass_object_sizes
1296
1297 /* Dummy valueize function. */
1298
1299 static tree
1300 do_valueize (tree t)
1301 {
1302 return t;
1303 }
1304
1305 unsigned int
1306 pass_object_sizes::execute (function *fun)
1307 {
1308 basic_block bb;
1309 FOR_EACH_BB_FN (bb, fun)
1310 {
1311 gimple_stmt_iterator i;
1312 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1313 {
1314 tree result;
1315 gimple *call = gsi_stmt (i);
1316 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1317 continue;
1318
1319 init_object_sizes ();
1320
1321 /* If insert_min_max_p, only attempt to fold
1322 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1323 and rather than folding the builtin to the constant if any,
1324 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1325 call result and the computed constant. */
1326 if (insert_min_max_p)
1327 {
1328 tree ost = gimple_call_arg (call, 1);
1329 if (tree_fits_uhwi_p (ost))
1330 {
1331 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1332 tree ptr = gimple_call_arg (call, 0);
1333 tree lhs = gimple_call_lhs (call);
1334 if ((object_size_type == 1 || object_size_type == 3)
1335 && (TREE_CODE (ptr) == ADDR_EXPR
1336 || TREE_CODE (ptr) == SSA_NAME)
1337 && lhs)
1338 {
1339 tree type = TREE_TYPE (lhs);
1340 unsigned HOST_WIDE_INT bytes;
1341 if (compute_builtin_object_size (ptr, object_size_type,
1342 &bytes)
1343 && wi::fits_to_tree_p (bytes, type))
1344 {
1345 tree tem = make_ssa_name (type);
1346 gimple_call_set_lhs (call, tem);
1347 enum tree_code code
1348 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1349 tree cst = build_int_cstu (type, bytes);
1350 gimple *g
1351 = gimple_build_assign (lhs, code, tem, cst);
1352 gsi_insert_after (&i, g, GSI_NEW_STMT);
1353 update_stmt (call);
1354 }
1355 }
1356 }
1357 continue;
1358 }
1359
1360 tree lhs = gimple_call_lhs (call);
1361 if (!lhs)
1362 continue;
1363
1364 result = gimple_fold_stmt_to_constant (call, do_valueize);
1365 if (!result)
1366 {
1367 tree ost = gimple_call_arg (call, 1);
1368
1369 if (tree_fits_uhwi_p (ost))
1370 {
1371 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1372
1373 if (object_size_type < 2)
1374 result = fold_convert (size_type_node,
1375 integer_minus_one_node);
1376 else if (object_size_type < 4)
1377 result = build_zero_cst (size_type_node);
1378 }
1379
1380 if (!result)
1381 continue;
1382 }
1383
1384 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1385
1386 if (dump_file && (dump_flags & TDF_DETAILS))
1387 {
1388 fprintf (dump_file, "Simplified\n ");
1389 print_gimple_stmt (dump_file, call, 0, dump_flags);
1390 fprintf (dump_file, " to ");
1391 print_generic_expr (dump_file, result);
1392 fprintf (dump_file, "\n");
1393 }
1394
1395 /* Propagate into all uses and fold those stmts. */
1396 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1397 replace_uses_by (lhs, result);
1398 else
1399 replace_call_with_value (&i, result);
1400 }
1401 }
1402
1403 fini_object_sizes ();
1404 return 0;
1405 }
1406
1407 } // anon namespace
1408
1409 gimple_opt_pass *
1410 make_pass_object_sizes (gcc::context *ctxt)
1411 {
1412 return new pass_object_sizes (ctxt);
1413 }