1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
35 #include "stringpool.h"
38 struct object_size_info
43 bitmap visited
, reexamine
;
45 unsigned int *stack
, *tos
;
48 static const unsigned HOST_WIDE_INT unknown
[4] = {
55 static tree
compute_object_offset (const_tree
, const_tree
);
56 static bool addr_object_size (struct object_size_info
*,
57 const_tree
, int, unsigned HOST_WIDE_INT
*,
58 tree
* = NULL
, tree
* = NULL
);
59 static unsigned HOST_WIDE_INT
alloc_object_size (const gcall
*, int);
60 static tree
pass_through_call (const gcall
*);
61 static void collect_object_sizes_for (struct object_size_info
*, tree
);
62 static void expr_object_size (struct object_size_info
*, tree
, tree
);
63 static bool merge_object_sizes (struct object_size_info
*, tree
, tree
,
64 unsigned HOST_WIDE_INT
);
65 static bool plus_stmt_object_size (struct object_size_info
*, tree
, gimple
*);
66 static bool cond_expr_object_size (struct object_size_info
*, tree
, gimple
*);
67 static void init_offset_limit (void);
68 static void check_for_plus_in_loops (struct object_size_info
*, tree
);
69 static void check_for_plus_in_loops_1 (struct object_size_info
*, tree
,
72 /* object_sizes[0] is upper bound for number of bytes till the end of
74 object_sizes[1] is upper bound for number of bytes till the end of
75 the subobject (innermost array or field with address taken).
76 object_sizes[2] is lower bound for number of bytes till the end of
77 the object and object_sizes[3] lower bound for subobject. */
78 static vec
<unsigned HOST_WIDE_INT
> object_sizes
[4];
80 /* Bitmaps what object sizes have been computed already. */
81 static bitmap computed
[4];
83 /* Maximum value of offset we consider to be addition. */
84 static unsigned HOST_WIDE_INT offset_limit
;
87 /* Initialize OFFSET_LIMIT variable. */
89 init_offset_limit (void)
91 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype
)))
92 offset_limit
= tree_to_uhwi (TYPE_MAX_VALUE (sizetype
));
99 /* Compute offset of EXPR within VAR. Return error_mark_node
103 compute_object_offset (const_tree expr
, const_tree var
)
105 enum tree_code code
= PLUS_EXPR
;
109 return size_zero_node
;
111 switch (TREE_CODE (expr
))
114 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
115 if (base
== error_mark_node
)
118 t
= TREE_OPERAND (expr
, 1);
119 off
= size_binop (PLUS_EXPR
, DECL_FIELD_OFFSET (t
),
120 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t
))
126 case VIEW_CONVERT_EXPR
:
127 case NON_LVALUE_EXPR
:
128 return compute_object_offset (TREE_OPERAND (expr
, 0), var
);
131 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
132 if (base
== error_mark_node
)
135 off
= TYPE_SIZE_UNIT (TREE_TYPE (expr
));
139 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
140 if (base
== error_mark_node
)
143 t
= TREE_OPERAND (expr
, 1);
144 tree low_bound
, unit_size
;
145 low_bound
= array_ref_low_bound (CONST_CAST_TREE (expr
));
146 unit_size
= array_ref_element_size (CONST_CAST_TREE (expr
));
147 if (! integer_zerop (low_bound
))
148 t
= fold_build2 (MINUS_EXPR
, TREE_TYPE (t
), t
, low_bound
);
149 if (TREE_CODE (t
) == INTEGER_CST
&& tree_int_cst_sgn (t
) < 0)
152 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
154 t
= fold_convert (sizetype
, t
);
155 off
= size_binop (MULT_EXPR
, unit_size
, t
);
159 gcc_assert (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
);
160 return wide_int_to_tree (sizetype
, mem_ref_offset (expr
));
163 return error_mark_node
;
166 return size_binop (code
, base
, off
);
169 /* Returns the size of the object designated by DECL considering its
170 initializer if it either has one or if it would not affect its size,
171 otherwise the size of the object without the initializer when MIN
172 is true, else null. An object's initializer affects the object's
173 size if it's a struct type with a flexible array member. */
176 decl_init_size (tree decl
, bool min
)
178 tree size
= DECL_SIZE_UNIT (decl
);
179 tree type
= TREE_TYPE (decl
);
180 if (TREE_CODE (type
) != RECORD_TYPE
)
183 tree last
= last_field (type
);
187 tree last_type
= TREE_TYPE (last
);
188 if (TREE_CODE (last_type
) != ARRAY_TYPE
189 || TYPE_SIZE (last_type
))
192 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
193 of the initializer and sometimes doesn't. */
194 size
= TYPE_SIZE_UNIT (type
);
195 tree ref
= build3 (COMPONENT_REF
, type
, decl
, last
, NULL_TREE
);
196 tree compsize
= component_ref_size (ref
);
198 return min
? size
: NULL_TREE
;
200 /* The size includes tail padding and initializer elements. */
201 tree pos
= byte_position (last
);
202 size
= fold_build2 (PLUS_EXPR
, TREE_TYPE (size
), pos
, compsize
);
206 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
207 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
208 If unknown, return unknown[object_size_type]. */
211 addr_object_size (struct object_size_info
*osi
, const_tree ptr
,
212 int object_size_type
, unsigned HOST_WIDE_INT
*psize
,
213 tree
*pdecl
/* = NULL */, tree
*poff
/* = NULL */)
215 tree pt_var
, pt_var_size
= NULL_TREE
, var_size
, bytes
;
217 tree dummy_decl
, dummy_off
= size_zero_node
;
223 gcc_assert (TREE_CODE (ptr
) == ADDR_EXPR
);
225 /* Set to unknown and overwrite just before returning if the size
226 could be determined. */
227 *psize
= unknown
[object_size_type
];
229 pt_var
= TREE_OPERAND (ptr
, 0);
230 while (handled_component_p (pt_var
))
231 pt_var
= TREE_OPERAND (pt_var
, 0);
236 if (TREE_CODE (pt_var
) == MEM_REF
)
238 unsigned HOST_WIDE_INT sz
;
240 if (!osi
|| (object_size_type
& 1) != 0
241 || TREE_CODE (TREE_OPERAND (pt_var
, 0)) != SSA_NAME
)
243 compute_builtin_object_size (TREE_OPERAND (pt_var
, 0),
244 object_size_type
& ~1, &sz
, pdecl
, poff
);
248 tree var
= TREE_OPERAND (pt_var
, 0);
250 collect_object_sizes_for (osi
, var
);
251 if (bitmap_bit_p (computed
[object_size_type
],
252 SSA_NAME_VERSION (var
)))
253 sz
= object_sizes
[object_size_type
][SSA_NAME_VERSION (var
)];
255 sz
= unknown
[object_size_type
];
257 if (sz
!= unknown
[object_size_type
])
259 offset_int mem_offset
;
260 if (mem_ref_offset (pt_var
).is_constant (&mem_offset
))
262 offset_int dsz
= wi::sub (sz
, mem_offset
);
265 else if (wi::fits_uhwi_p (dsz
))
268 sz
= unknown
[object_size_type
];
271 sz
= unknown
[object_size_type
];
274 if (sz
!= unknown
[object_size_type
] && sz
< offset_limit
)
275 pt_var_size
= size_int (sz
);
277 else if (DECL_P (pt_var
))
280 pt_var_size
= decl_init_size (pt_var
, object_size_type
& 2);
284 else if (TREE_CODE (pt_var
) == STRING_CST
)
285 pt_var_size
= TYPE_SIZE_UNIT (TREE_TYPE (pt_var
));
291 /* Validate the size determined above. */
292 if (!tree_fits_uhwi_p (pt_var_size
)
293 || tree_to_uhwi (pt_var_size
) >= offset_limit
)
297 if (pt_var
!= TREE_OPERAND (ptr
, 0))
301 if (object_size_type
& 1)
303 var
= TREE_OPERAND (ptr
, 0);
306 && TREE_CODE (var
) != BIT_FIELD_REF
307 && TREE_CODE (var
) != COMPONENT_REF
308 && TREE_CODE (var
) != ARRAY_REF
309 && TREE_CODE (var
) != ARRAY_RANGE_REF
310 && TREE_CODE (var
) != REALPART_EXPR
311 && TREE_CODE (var
) != IMAGPART_EXPR
)
312 var
= TREE_OPERAND (var
, 0);
313 if (var
!= pt_var
&& TREE_CODE (var
) == ARRAY_REF
)
314 var
= TREE_OPERAND (var
, 0);
315 if (! TYPE_SIZE_UNIT (TREE_TYPE (var
))
316 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var
)))
318 && tree_int_cst_lt (pt_var_size
,
319 TYPE_SIZE_UNIT (TREE_TYPE (var
)))))
321 else if (var
!= pt_var
&& TREE_CODE (pt_var
) == MEM_REF
)
324 /* For &X->fld, compute object size only if fld isn't the last
325 field, as struct { int i; char c[1]; } is often used instead
326 of flexible array member. */
327 while (v
&& v
!= pt_var
)
328 switch (TREE_CODE (v
))
331 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v
, 0)))
332 && TREE_CODE (TREE_OPERAND (v
, 1)) == INTEGER_CST
)
335 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v
, 0)));
337 && TYPE_MAX_VALUE (domain
)
338 && TREE_CODE (TYPE_MAX_VALUE (domain
))
340 && tree_int_cst_lt (TREE_OPERAND (v
, 1),
341 TYPE_MAX_VALUE (domain
)))
347 v
= TREE_OPERAND (v
, 0);
354 if (TREE_CODE (TREE_TYPE (v
)) != ARRAY_TYPE
)
359 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
360 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
362 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
366 v
= TREE_OPERAND (v
, 0);
367 if (TREE_CODE (v
) == COMPONENT_REF
368 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
371 tree fld_chain
= DECL_CHAIN (TREE_OPERAND (v
, 1));
372 for (; fld_chain
; fld_chain
= DECL_CHAIN (fld_chain
))
373 if (TREE_CODE (fld_chain
) == FIELD_DECL
)
381 v
= TREE_OPERAND (v
, 0);
383 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
384 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
386 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
390 v
= TREE_OPERAND (v
, 0);
408 var_size
= TYPE_SIZE_UNIT (TREE_TYPE (var
));
409 else if (!pt_var_size
)
412 var_size
= pt_var_size
;
413 bytes
= compute_object_offset (TREE_OPERAND (ptr
, 0), var
);
414 if (bytes
!= error_mark_node
)
416 if (TREE_CODE (bytes
) == INTEGER_CST
417 && tree_int_cst_lt (var_size
, bytes
))
418 bytes
= size_zero_node
;
420 bytes
= size_binop (MINUS_EXPR
, var_size
, bytes
);
425 && TREE_CODE (pt_var
) == MEM_REF
426 && bytes
!= error_mark_node
)
428 tree bytes2
= compute_object_offset (TREE_OPERAND (ptr
, 0), pt_var
);
429 if (bytes2
!= error_mark_node
)
431 if (TREE_CODE (bytes2
) == INTEGER_CST
432 && tree_int_cst_lt (pt_var_size
, bytes2
))
433 bytes2
= size_zero_node
;
435 bytes2
= size_binop (MINUS_EXPR
, pt_var_size
, bytes2
);
436 *poff
= size_binop (PLUS_EXPR
, *poff
, bytes2
);
437 bytes
= size_binop (MIN_EXPR
, bytes
, bytes2
);
441 else if (!pt_var_size
)
446 if (tree_fits_uhwi_p (bytes
))
448 *psize
= tree_to_uhwi (bytes
);
456 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
457 Handles calls to functions declared with attribute alloc_size.
458 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
459 If unknown, return unknown[object_size_type]. */
461 static unsigned HOST_WIDE_INT
462 alloc_object_size (const gcall
*call
, int object_size_type
)
464 gcc_assert (is_gimple_call (call
));
467 if (tree callfn
= gimple_call_fndecl (call
))
468 calltype
= TREE_TYPE (callfn
);
470 calltype
= gimple_call_fntype (call
);
473 return unknown
[object_size_type
];
475 /* Set to positions of alloc_size arguments. */
476 int arg1
= -1, arg2
= -1;
477 tree alloc_size
= lookup_attribute ("alloc_size",
478 TYPE_ATTRIBUTES (calltype
));
479 if (alloc_size
&& TREE_VALUE (alloc_size
))
481 tree p
= TREE_VALUE (alloc_size
);
483 arg1
= TREE_INT_CST_LOW (TREE_VALUE (p
))-1;
485 arg2
= TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p
)))-1;
488 if (arg1
< 0 || arg1
>= (int)gimple_call_num_args (call
)
489 || TREE_CODE (gimple_call_arg (call
, arg1
)) != INTEGER_CST
491 && (arg2
>= (int)gimple_call_num_args (call
)
492 || TREE_CODE (gimple_call_arg (call
, arg2
)) != INTEGER_CST
)))
493 return unknown
[object_size_type
];
495 tree bytes
= NULL_TREE
;
497 bytes
= size_binop (MULT_EXPR
,
498 fold_convert (sizetype
, gimple_call_arg (call
, arg1
)),
499 fold_convert (sizetype
, gimple_call_arg (call
, arg2
)));
501 bytes
= fold_convert (sizetype
, gimple_call_arg (call
, arg1
));
503 if (bytes
&& tree_fits_uhwi_p (bytes
))
504 return tree_to_uhwi (bytes
);
506 return unknown
[object_size_type
];
510 /* If object size is propagated from one of function's arguments directly
511 to its return value, return that argument for GIMPLE_CALL statement CALL.
512 Otherwise return NULL. */
515 pass_through_call (const gcall
*call
)
517 unsigned rf
= gimple_call_return_flags (call
);
518 if (rf
& ERF_RETURNS_ARG
)
520 unsigned argnum
= rf
& ERF_RETURN_ARG_MASK
;
521 if (argnum
< gimple_call_num_args (call
))
522 return gimple_call_arg (call
, argnum
);
525 /* __builtin_assume_aligned is intentionally not marked RET1. */
526 if (gimple_call_builtin_p (call
, BUILT_IN_ASSUME_ALIGNED
))
527 return gimple_call_arg (call
, 0);
533 /* Compute __builtin_object_size value for PTR and set *PSIZE to
534 the resulting value. If the declared object is known and PDECL
535 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
536 is the second argument to __builtin_object_size.
537 Returns true on success and false when the object size could not
541 compute_builtin_object_size (tree ptr
, int object_size_type
,
542 unsigned HOST_WIDE_INT
*psize
,
543 tree
*pdecl
/* = NULL */, tree
*poff
/* = NULL */)
545 gcc_assert (object_size_type
>= 0 && object_size_type
<= 3);
547 tree dummy_decl
, dummy_off
= size_zero_node
;
553 /* Set to unknown and overwrite just before returning if the size
554 could be determined. */
555 *psize
= unknown
[object_size_type
];
558 init_offset_limit ();
560 if (TREE_CODE (ptr
) == ADDR_EXPR
)
561 return addr_object_size (NULL
, ptr
, object_size_type
, psize
, pdecl
, poff
);
563 if (TREE_CODE (ptr
) != SSA_NAME
564 || !POINTER_TYPE_P (TREE_TYPE (ptr
)))
567 if (computed
[object_size_type
] == NULL
)
569 if (optimize
|| object_size_type
& 1)
572 /* When not optimizing, rather than failing, make a small effort
573 to determine the object size without the full benefit of
574 the (costly) computation below. */
575 gimple
*def
= SSA_NAME_DEF_STMT (ptr
);
576 if (gimple_code (def
) == GIMPLE_ASSIGN
)
578 tree_code code
= gimple_assign_rhs_code (def
);
579 if (code
== POINTER_PLUS_EXPR
)
581 tree offset
= gimple_assign_rhs2 (def
);
582 ptr
= gimple_assign_rhs1 (def
);
584 if (tree_fits_shwi_p (offset
)
585 && compute_builtin_object_size (ptr
, object_size_type
,
588 /* Return zero when the offset is out of bounds. */
589 unsigned HOST_WIDE_INT off
= tree_to_shwi (offset
);
590 *psize
= off
< *psize
? *psize
- off
: 0;
599 if (!bitmap_bit_p (computed
[object_size_type
], SSA_NAME_VERSION (ptr
)))
601 struct object_size_info osi
;
605 if (num_ssa_names
> object_sizes
[object_size_type
].length ())
606 object_sizes
[object_size_type
].safe_grow (num_ssa_names
);
609 fprintf (dump_file
, "Computing %s %sobject size for ",
610 (object_size_type
& 2) ? "minimum" : "maximum",
611 (object_size_type
& 1) ? "sub" : "");
612 print_generic_expr (dump_file
, ptr
, dump_flags
);
613 fprintf (dump_file
, ":\n");
616 osi
.visited
= BITMAP_ALLOC (NULL
);
617 osi
.reexamine
= BITMAP_ALLOC (NULL
);
618 osi
.object_size_type
= object_size_type
;
623 /* First pass: walk UD chains, compute object sizes that
624 can be computed. osi.reexamine bitmap at the end will
625 contain what variables were found in dependency cycles
626 and therefore need to be reexamined. */
629 collect_object_sizes_for (&osi
, ptr
);
631 /* Second pass: keep recomputing object sizes of variables
632 that need reexamination, until no object sizes are
633 increased or all object sizes are computed. */
634 if (! bitmap_empty_p (osi
.reexamine
))
636 bitmap reexamine
= BITMAP_ALLOC (NULL
);
638 /* If looking for minimum instead of maximum object size,
639 detect cases where a pointer is increased in a loop.
640 Although even without this detection pass 2 would eventually
641 terminate, it could take a long time. If a pointer is
642 increasing this way, we need to assume 0 object size.
643 E.g. p = &buf[0]; while (cond) p = p + 4; */
644 if (object_size_type
& 2)
646 osi
.depths
= XCNEWVEC (unsigned int, num_ssa_names
);
647 osi
.stack
= XNEWVEC (unsigned int, num_ssa_names
);
650 /* collect_object_sizes_for is changing
651 osi.reexamine bitmap, so iterate over a copy. */
652 bitmap_copy (reexamine
, osi
.reexamine
);
653 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
654 if (bitmap_bit_p (osi
.reexamine
, i
))
655 check_for_plus_in_loops (&osi
, ssa_name (i
));
668 /* collect_object_sizes_for is changing
669 osi.reexamine bitmap, so iterate over a copy. */
670 bitmap_copy (reexamine
, osi
.reexamine
);
671 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
672 if (bitmap_bit_p (osi
.reexamine
, i
))
674 collect_object_sizes_for (&osi
, ssa_name (i
));
675 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
677 fprintf (dump_file
, "Reexamining ");
678 print_generic_expr (dump_file
, ssa_name (i
),
680 fprintf (dump_file
, "\n");
686 BITMAP_FREE (reexamine
);
688 EXECUTE_IF_SET_IN_BITMAP (osi
.reexamine
, 0, i
, bi
)
689 bitmap_set_bit (computed
[object_size_type
], i
);
691 /* Debugging dumps. */
694 EXECUTE_IF_SET_IN_BITMAP (osi
.visited
, 0, i
, bi
)
695 if (object_sizes
[object_size_type
][i
]
696 != unknown
[object_size_type
])
698 print_generic_expr (dump_file
, ssa_name (i
),
701 ": %s %sobject size "
702 HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
703 (object_size_type
& 2) ? "minimum" : "maximum",
704 (object_size_type
& 1) ? "sub" : "",
705 object_sizes
[object_size_type
][i
]);
709 BITMAP_FREE (osi
.reexamine
);
710 BITMAP_FREE (osi
.visited
);
713 *psize
= object_sizes
[object_size_type
][SSA_NAME_VERSION (ptr
)];
714 return *psize
!= unknown
[object_size_type
];
717 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
720 expr_object_size (struct object_size_info
*osi
, tree ptr
, tree value
)
722 int object_size_type
= osi
->object_size_type
;
723 unsigned int varno
= SSA_NAME_VERSION (ptr
);
724 unsigned HOST_WIDE_INT bytes
;
726 gcc_assert (object_sizes
[object_size_type
][varno
]
727 != unknown
[object_size_type
]);
728 gcc_assert (osi
->pass
== 0);
730 if (TREE_CODE (value
) == WITH_SIZE_EXPR
)
731 value
= TREE_OPERAND (value
, 0);
733 /* Pointer variables should have been handled by merge_object_sizes. */
734 gcc_assert (TREE_CODE (value
) != SSA_NAME
735 || !POINTER_TYPE_P (TREE_TYPE (value
)));
737 if (TREE_CODE (value
) == ADDR_EXPR
)
738 addr_object_size (osi
, value
, object_size_type
, &bytes
);
740 bytes
= unknown
[object_size_type
];
742 if ((object_size_type
& 2) == 0)
744 if (object_sizes
[object_size_type
][varno
] < bytes
)
745 object_sizes
[object_size_type
][varno
] = bytes
;
749 if (object_sizes
[object_size_type
][varno
] > bytes
)
750 object_sizes
[object_size_type
][varno
] = bytes
;
755 /* Compute object_sizes for PTR, defined to the result of a call. */
758 call_object_size (struct object_size_info
*osi
, tree ptr
, gcall
*call
)
760 int object_size_type
= osi
->object_size_type
;
761 unsigned int varno
= SSA_NAME_VERSION (ptr
);
762 unsigned HOST_WIDE_INT bytes
;
764 gcc_assert (is_gimple_call (call
));
766 gcc_assert (object_sizes
[object_size_type
][varno
]
767 != unknown
[object_size_type
]);
768 gcc_assert (osi
->pass
== 0);
770 bytes
= alloc_object_size (call
, object_size_type
);
772 if ((object_size_type
& 2) == 0)
774 if (object_sizes
[object_size_type
][varno
] < bytes
)
775 object_sizes
[object_size_type
][varno
] = bytes
;
779 if (object_sizes
[object_size_type
][varno
] > bytes
)
780 object_sizes
[object_size_type
][varno
] = bytes
;
785 /* Compute object_sizes for PTR, defined to an unknown value. */
788 unknown_object_size (struct object_size_info
*osi
, tree ptr
)
790 int object_size_type
= osi
->object_size_type
;
791 unsigned int varno
= SSA_NAME_VERSION (ptr
);
792 unsigned HOST_WIDE_INT bytes
;
794 gcc_assert (object_sizes
[object_size_type
][varno
]
795 != unknown
[object_size_type
]);
796 gcc_assert (osi
->pass
== 0);
798 bytes
= unknown
[object_size_type
];
800 if ((object_size_type
& 2) == 0)
802 if (object_sizes
[object_size_type
][varno
] < bytes
)
803 object_sizes
[object_size_type
][varno
] = bytes
;
807 if (object_sizes
[object_size_type
][varno
] > bytes
)
808 object_sizes
[object_size_type
][varno
] = bytes
;
813 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
814 the object size might need reexamination later. */
817 merge_object_sizes (struct object_size_info
*osi
, tree dest
, tree orig
,
818 unsigned HOST_WIDE_INT offset
)
820 int object_size_type
= osi
->object_size_type
;
821 unsigned int varno
= SSA_NAME_VERSION (dest
);
822 unsigned HOST_WIDE_INT orig_bytes
;
824 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
826 if (offset
>= offset_limit
)
828 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
833 collect_object_sizes_for (osi
, orig
);
835 orig_bytes
= object_sizes
[object_size_type
][SSA_NAME_VERSION (orig
)];
836 if (orig_bytes
!= unknown
[object_size_type
])
837 orig_bytes
= (offset
> orig_bytes
)
838 ? HOST_WIDE_INT_0U
: orig_bytes
- offset
;
840 if ((object_size_type
& 2) == 0)
842 if (object_sizes
[object_size_type
][varno
] < orig_bytes
)
844 object_sizes
[object_size_type
][varno
] = orig_bytes
;
850 if (object_sizes
[object_size_type
][varno
] > orig_bytes
)
852 object_sizes
[object_size_type
][varno
] = orig_bytes
;
856 return bitmap_bit_p (osi
->reexamine
, SSA_NAME_VERSION (orig
));
860 /* Compute object_sizes for VAR, defined to the result of an assignment
861 with operator POINTER_PLUS_EXPR. Return true if the object size might
862 need reexamination later. */
865 plus_stmt_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
867 int object_size_type
= osi
->object_size_type
;
868 unsigned int varno
= SSA_NAME_VERSION (var
);
869 unsigned HOST_WIDE_INT bytes
;
872 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
874 op0
= gimple_assign_rhs1 (stmt
);
875 op1
= gimple_assign_rhs2 (stmt
);
877 else if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
879 tree rhs
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
880 gcc_assert (TREE_CODE (rhs
) == MEM_REF
);
881 op0
= TREE_OPERAND (rhs
, 0);
882 op1
= TREE_OPERAND (rhs
, 1);
887 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
890 /* Handle PTR + OFFSET here. */
891 if (TREE_CODE (op1
) == INTEGER_CST
892 && (TREE_CODE (op0
) == SSA_NAME
893 || TREE_CODE (op0
) == ADDR_EXPR
))
895 if (! tree_fits_uhwi_p (op1
))
896 bytes
= unknown
[object_size_type
];
897 else if (TREE_CODE (op0
) == SSA_NAME
)
898 return merge_object_sizes (osi
, var
, op0
, tree_to_uhwi (op1
));
901 unsigned HOST_WIDE_INT off
= tree_to_uhwi (op1
);
903 /* op0 will be ADDR_EXPR here. */
904 addr_object_size (osi
, op0
, object_size_type
, &bytes
);
905 if (bytes
== unknown
[object_size_type
])
907 else if (off
> offset_limit
)
908 bytes
= unknown
[object_size_type
];
909 else if (off
> bytes
)
916 bytes
= unknown
[object_size_type
];
918 if ((object_size_type
& 2) == 0)
920 if (object_sizes
[object_size_type
][varno
] < bytes
)
921 object_sizes
[object_size_type
][varno
] = bytes
;
925 if (object_sizes
[object_size_type
][varno
] > bytes
)
926 object_sizes
[object_size_type
][varno
] = bytes
;
932 /* Compute object_sizes for VAR, defined at STMT, which is
933 a COND_EXPR. Return true if the object size might need reexamination
937 cond_expr_object_size (struct object_size_info
*osi
, tree var
, gimple
*stmt
)
940 int object_size_type
= osi
->object_size_type
;
941 unsigned int varno
= SSA_NAME_VERSION (var
);
942 bool reexamine
= false;
944 gcc_assert (gimple_assign_rhs_code (stmt
) == COND_EXPR
);
946 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
949 then_
= gimple_assign_rhs2 (stmt
);
950 else_
= gimple_assign_rhs3 (stmt
);
952 if (TREE_CODE (then_
) == SSA_NAME
)
953 reexamine
|= merge_object_sizes (osi
, var
, then_
, 0);
955 expr_object_size (osi
, var
, then_
);
957 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
960 if (TREE_CODE (else_
) == SSA_NAME
)
961 reexamine
|= merge_object_sizes (osi
, var
, else_
, 0);
963 expr_object_size (osi
, var
, else_
);
968 /* Compute object sizes for VAR.
969 For ADDR_EXPR an object size is the number of remaining bytes
970 to the end of the object (where what is considered an object depends on
971 OSI->object_size_type).
972 For allocation GIMPLE_CALL like malloc or calloc object size is the size
974 For POINTER_PLUS_EXPR where second operand is a constant integer,
975 object size is object size of the first operand minus the constant.
976 If the constant is bigger than the number of remaining bytes until the
977 end of the object, object size is 0, but if it is instead a pointer
978 subtraction, object size is unknown[object_size_type].
979 To differentiate addition from subtraction, ADDR_EXPR returns
980 unknown[object_size_type] for all objects bigger than half of the address
981 space, and constants less than half of the address space are considered
982 addition, while bigger constants subtraction.
983 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
984 object size is object size of that argument.
985 Otherwise, object size is the maximum of object sizes of variables
986 that it might be set to. */
989 collect_object_sizes_for (struct object_size_info
*osi
, tree var
)
991 int object_size_type
= osi
->object_size_type
;
992 unsigned int varno
= SSA_NAME_VERSION (var
);
996 if (bitmap_bit_p (computed
[object_size_type
], varno
))
1001 if (bitmap_set_bit (osi
->visited
, varno
))
1003 object_sizes
[object_size_type
][varno
]
1004 = (object_size_type
& 2) ? -1 : 0;
1008 /* Found a dependency loop. Mark the variable for later
1010 bitmap_set_bit (osi
->reexamine
, varno
);
1011 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1013 fprintf (dump_file
, "Found a dependency loop at ");
1014 print_generic_expr (dump_file
, var
, dump_flags
);
1015 fprintf (dump_file
, "\n");
1021 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1023 fprintf (dump_file
, "Visiting use-def links for ");
1024 print_generic_expr (dump_file
, var
, dump_flags
);
1025 fprintf (dump_file
, "\n");
1028 stmt
= SSA_NAME_DEF_STMT (var
);
1031 switch (gimple_code (stmt
))
1035 tree rhs
= gimple_assign_rhs1 (stmt
);
1036 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
1037 || (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
1038 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
))
1039 reexamine
= plus_stmt_object_size (osi
, var
, stmt
);
1040 else if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
1041 reexamine
= cond_expr_object_size (osi
, var
, stmt
);
1042 else if (gimple_assign_single_p (stmt
)
1043 || gimple_assign_unary_nop_p (stmt
))
1045 if (TREE_CODE (rhs
) == SSA_NAME
1046 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
1047 reexamine
= merge_object_sizes (osi
, var
, rhs
, 0);
1049 expr_object_size (osi
, var
, rhs
);
1052 unknown_object_size (osi
, var
);
1058 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
1059 tree arg
= pass_through_call (call_stmt
);
1062 if (TREE_CODE (arg
) == SSA_NAME
1063 && POINTER_TYPE_P (TREE_TYPE (arg
)))
1064 reexamine
= merge_object_sizes (osi
, var
, arg
, 0);
1066 expr_object_size (osi
, var
, arg
);
1069 call_object_size (osi
, var
, call_stmt
);
1074 /* Pointers defined by __asm__ statements can point anywhere. */
1075 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
1079 if (SSA_NAME_VAR (var
)
1080 && TREE_CODE (SSA_NAME_VAR (var
)) == PARM_DECL
)
1081 expr_object_size (osi
, var
, SSA_NAME_VAR (var
));
1083 /* Uninitialized SSA names point nowhere. */
1084 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
1091 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1093 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1095 if (object_sizes
[object_size_type
][varno
]
1096 == unknown
[object_size_type
])
1099 if (TREE_CODE (rhs
) == SSA_NAME
)
1100 reexamine
|= merge_object_sizes (osi
, var
, rhs
, 0);
1101 else if (osi
->pass
== 0)
1102 expr_object_size (osi
, var
, rhs
);
1112 || object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
1114 bitmap_set_bit (computed
[object_size_type
], varno
);
1115 bitmap_clear_bit (osi
->reexamine
, varno
);
1119 bitmap_set_bit (osi
->reexamine
, varno
);
1120 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1122 fprintf (dump_file
, "Need to reexamine ");
1123 print_generic_expr (dump_file
, var
, dump_flags
);
1124 fprintf (dump_file
, "\n");
1130 /* Helper function for check_for_plus_in_loops. Called recursively
1134 check_for_plus_in_loops_1 (struct object_size_info
*osi
, tree var
,
1137 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1138 unsigned int varno
= SSA_NAME_VERSION (var
);
1140 if (osi
->depths
[varno
])
1142 if (osi
->depths
[varno
] != depth
)
1146 /* Found a loop involving pointer addition. */
1147 for (sp
= osi
->tos
; sp
> osi
->stack
; )
1150 bitmap_clear_bit (osi
->reexamine
, *sp
);
1151 bitmap_set_bit (computed
[osi
->object_size_type
], *sp
);
1152 object_sizes
[osi
->object_size_type
][*sp
] = 0;
1159 else if (! bitmap_bit_p (osi
->reexamine
, varno
))
1162 osi
->depths
[varno
] = depth
;
1163 *osi
->tos
++ = varno
;
1165 switch (gimple_code (stmt
))
1170 if ((gimple_assign_single_p (stmt
)
1171 || gimple_assign_unary_nop_p (stmt
))
1172 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
1174 tree rhs
= gimple_assign_rhs1 (stmt
);
1176 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1178 else if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1180 tree basevar
= gimple_assign_rhs1 (stmt
);
1181 tree cst
= gimple_assign_rhs2 (stmt
);
1183 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1185 check_for_plus_in_loops_1 (osi
, basevar
,
1186 depth
+ !integer_zerop (cst
));
1195 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
1196 tree arg
= pass_through_call (call_stmt
);
1199 if (TREE_CODE (arg
) == SSA_NAME
)
1200 check_for_plus_in_loops_1 (osi
, arg
, depth
);
1211 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1213 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1215 if (TREE_CODE (rhs
) == SSA_NAME
)
1216 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1225 osi
->depths
[varno
] = 0;
1230 /* Check if some pointer we are computing object size of is being increased
1231 within a loop. If yes, assume all the SSA variables participating in
1232 that loop have minimum object sizes 0. */
1235 check_for_plus_in_loops (struct object_size_info
*osi
, tree var
)
1237 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
1239 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1240 and looked for a POINTER_PLUS_EXPR in the pass-through
1241 argument, if any. In GIMPLE, however, such an expression
1242 is not a valid call operand. */
1244 if (is_gimple_assign (stmt
)
1245 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1247 tree basevar
= gimple_assign_rhs1 (stmt
);
1248 tree cst
= gimple_assign_rhs2 (stmt
);
1250 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1252 if (integer_zerop (cst
))
1255 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 1;
1256 *osi
->tos
++ = SSA_NAME_VERSION (basevar
);
1257 check_for_plus_in_loops_1 (osi
, var
, 2);
1258 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 0;
1264 /* Initialize data structures for the object size computation. */
1267 init_object_sizes (void)
1269 int object_size_type
;
1274 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1276 object_sizes
[object_size_type
].safe_grow (num_ssa_names
);
1277 computed
[object_size_type
] = BITMAP_ALLOC (NULL
);
1280 init_offset_limit ();
1284 /* Destroy data structures after the object size computation. */
1287 fini_object_sizes (void)
1289 int object_size_type
;
1291 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1293 object_sizes
[object_size_type
].release ();
1294 BITMAP_FREE (computed
[object_size_type
]);
1299 /* Simple pass to optimize all __builtin_object_size () builtins. */
1303 const pass_data pass_data_object_sizes
=
1305 GIMPLE_PASS
, /* type */
1307 OPTGROUP_NONE
, /* optinfo_flags */
1308 TV_NONE
, /* tv_id */
1309 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1310 0, /* properties_provided */
1311 0, /* properties_destroyed */
1312 0, /* todo_flags_start */
1313 0, /* todo_flags_finish */
1316 class pass_object_sizes
: public gimple_opt_pass
1319 pass_object_sizes (gcc::context
*ctxt
)
1320 : gimple_opt_pass (pass_data_object_sizes
, ctxt
), insert_min_max_p (false)
1323 /* opt_pass methods: */
1324 opt_pass
* clone () { return new pass_object_sizes (m_ctxt
); }
1325 void set_pass_param (unsigned int n
, bool param
)
1327 gcc_assert (n
== 0);
1328 insert_min_max_p
= param
;
1330 virtual unsigned int execute (function
*);
1333 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1334 bool insert_min_max_p
;
1335 }; // class pass_object_sizes
1337 /* Dummy valueize function. */
1340 do_valueize (tree t
)
1346 pass_object_sizes::execute (function
*fun
)
1349 FOR_EACH_BB_FN (bb
, fun
)
1351 gimple_stmt_iterator i
;
1352 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1355 gimple
*call
= gsi_stmt (i
);
1356 if (!gimple_call_builtin_p (call
, BUILT_IN_OBJECT_SIZE
))
1359 init_object_sizes ();
1361 /* If insert_min_max_p, only attempt to fold
1362 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1363 and rather than folding the builtin to the constant if any,
1364 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1365 call result and the computed constant. */
1366 if (insert_min_max_p
)
1368 tree ost
= gimple_call_arg (call
, 1);
1369 if (tree_fits_uhwi_p (ost
))
1371 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
1372 tree ptr
= gimple_call_arg (call
, 0);
1373 tree lhs
= gimple_call_lhs (call
);
1374 if ((object_size_type
== 1 || object_size_type
== 3)
1375 && (TREE_CODE (ptr
) == ADDR_EXPR
1376 || TREE_CODE (ptr
) == SSA_NAME
)
1379 tree type
= TREE_TYPE (lhs
);
1380 unsigned HOST_WIDE_INT bytes
;
1381 if (compute_builtin_object_size (ptr
, object_size_type
,
1383 && wi::fits_to_tree_p (bytes
, type
))
1385 tree tem
= make_ssa_name (type
);
1386 gimple_call_set_lhs (call
, tem
);
1388 = object_size_type
== 1 ? MIN_EXPR
: MAX_EXPR
;
1389 tree cst
= build_int_cstu (type
, bytes
);
1391 = gimple_build_assign (lhs
, code
, tem
, cst
);
1392 gsi_insert_after (&i
, g
, GSI_NEW_STMT
);
1400 tree lhs
= gimple_call_lhs (call
);
1404 result
= gimple_fold_stmt_to_constant (call
, do_valueize
);
1407 tree ost
= gimple_call_arg (call
, 1);
1409 if (tree_fits_uhwi_p (ost
))
1411 unsigned HOST_WIDE_INT object_size_type
= tree_to_uhwi (ost
);
1413 if (object_size_type
< 2)
1414 result
= fold_convert (size_type_node
,
1415 integer_minus_one_node
);
1416 else if (object_size_type
< 4)
1417 result
= build_zero_cst (size_type_node
);
1424 gcc_assert (TREE_CODE (result
) == INTEGER_CST
);
1426 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1428 fprintf (dump_file
, "Simplified\n ");
1429 print_gimple_stmt (dump_file
, call
, 0, dump_flags
);
1430 fprintf (dump_file
, " to ");
1431 print_generic_expr (dump_file
, result
);
1432 fprintf (dump_file
, "\n");
1435 /* Propagate into all uses and fold those stmts. */
1436 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
1437 replace_uses_by (lhs
, result
);
1439 replace_call_with_value (&i
, result
);
1443 fini_object_sizes ();
1450 make_pass_object_sizes (gcc::context
*ctxt
)
1452 return new pass_object_sizes (ctxt
);