1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
30 #include "gimple-ssa.h"
31 #include "tree-ssanames.h"
32 #include "tree-pass.h"
33 #include "tree-ssa-propagate.h"
35 struct object_size_info
38 bitmap visited
, reexamine
;
42 unsigned int *stack
, *tos
;
45 static const unsigned HOST_WIDE_INT unknown
[4] = { -1, -1, 0, 0 };
47 static tree
compute_object_offset (const_tree
, const_tree
);
48 static unsigned HOST_WIDE_INT
addr_object_size (struct object_size_info
*,
50 static unsigned HOST_WIDE_INT
alloc_object_size (const_gimple
, int);
51 static tree
pass_through_call (const_gimple
);
52 static void collect_object_sizes_for (struct object_size_info
*, tree
);
53 static void expr_object_size (struct object_size_info
*, tree
, tree
);
54 static bool merge_object_sizes (struct object_size_info
*, tree
, tree
,
55 unsigned HOST_WIDE_INT
);
56 static bool plus_stmt_object_size (struct object_size_info
*, tree
, gimple
);
57 static bool cond_expr_object_size (struct object_size_info
*, tree
, gimple
);
58 static unsigned int compute_object_sizes (void);
59 static void init_offset_limit (void);
60 static void check_for_plus_in_loops (struct object_size_info
*, tree
);
61 static void check_for_plus_in_loops_1 (struct object_size_info
*, tree
,
64 /* object_sizes[0] is upper bound for number of bytes till the end of
66 object_sizes[1] is upper bound for number of bytes till the end of
67 the subobject (innermost array or field with address taken).
68 object_sizes[2] is lower bound for number of bytes till the end of
69 the object and object_sizes[3] lower bound for subobject. */
70 static unsigned HOST_WIDE_INT
*object_sizes
[4];
72 /* Bitmaps what object sizes have been computed already. */
73 static bitmap computed
[4];
75 /* Maximum value of offset we consider to be addition. */
76 static unsigned HOST_WIDE_INT offset_limit
;
79 /* Initialize OFFSET_LIMIT variable. */
81 init_offset_limit (void)
83 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype
)))
84 offset_limit
= tree_to_uhwi (TYPE_MAX_VALUE (sizetype
));
91 /* Compute offset of EXPR within VAR. Return error_mark_node
95 compute_object_offset (const_tree expr
, const_tree var
)
97 enum tree_code code
= PLUS_EXPR
;
101 return size_zero_node
;
103 switch (TREE_CODE (expr
))
106 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
107 if (base
== error_mark_node
)
110 t
= TREE_OPERAND (expr
, 1);
111 off
= size_binop (PLUS_EXPR
, DECL_FIELD_OFFSET (t
),
112 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t
))
118 case VIEW_CONVERT_EXPR
:
119 case NON_LVALUE_EXPR
:
120 return compute_object_offset (TREE_OPERAND (expr
, 0), var
);
123 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
124 if (base
== error_mark_node
)
127 off
= TYPE_SIZE_UNIT (TREE_TYPE (expr
));
131 base
= compute_object_offset (TREE_OPERAND (expr
, 0), var
);
132 if (base
== error_mark_node
)
135 t
= TREE_OPERAND (expr
, 1);
136 if (TREE_CODE (t
) == INTEGER_CST
&& tree_int_cst_sgn (t
) < 0)
139 t
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
141 t
= fold_convert (sizetype
, t
);
142 off
= size_binop (MULT_EXPR
, TYPE_SIZE_UNIT (TREE_TYPE (expr
)), t
);
146 gcc_assert (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
);
147 return wide_int_to_tree (sizetype
, mem_ref_offset (expr
));
150 return error_mark_node
;
153 return size_binop (code
, base
, off
);
157 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
158 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
159 If unknown, return unknown[object_size_type]. */
161 static unsigned HOST_WIDE_INT
162 addr_object_size (struct object_size_info
*osi
, const_tree ptr
,
163 int object_size_type
)
165 tree pt_var
, pt_var_size
= NULL_TREE
, var_size
, bytes
;
167 gcc_assert (TREE_CODE (ptr
) == ADDR_EXPR
);
169 pt_var
= TREE_OPERAND (ptr
, 0);
170 while (handled_component_p (pt_var
))
171 pt_var
= TREE_OPERAND (pt_var
, 0);
174 && TREE_CODE (pt_var
) == MEM_REF
)
176 unsigned HOST_WIDE_INT sz
;
178 if (!osi
|| (object_size_type
& 1) != 0
179 || TREE_CODE (TREE_OPERAND (pt_var
, 0)) != SSA_NAME
)
181 sz
= compute_builtin_object_size (TREE_OPERAND (pt_var
, 0),
182 object_size_type
& ~1);
186 tree var
= TREE_OPERAND (pt_var
, 0);
188 collect_object_sizes_for (osi
, var
);
189 if (bitmap_bit_p (computed
[object_size_type
],
190 SSA_NAME_VERSION (var
)))
191 sz
= object_sizes
[object_size_type
][SSA_NAME_VERSION (var
)];
193 sz
= unknown
[object_size_type
];
195 if (sz
!= unknown
[object_size_type
])
197 offset_int dsz
= wi::sub (sz
, mem_ref_offset (pt_var
));
200 else if (wi::fits_uhwi_p (dsz
))
203 sz
= unknown
[object_size_type
];
206 if (sz
!= unknown
[object_size_type
] && sz
< offset_limit
)
207 pt_var_size
= size_int (sz
);
211 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var
))
212 && (unsigned HOST_WIDE_INT
)
213 tree_to_uhwi (DECL_SIZE_UNIT (pt_var
)) < offset_limit
)
214 pt_var_size
= DECL_SIZE_UNIT (pt_var
);
216 && TREE_CODE (pt_var
) == STRING_CST
217 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var
))
218 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var
)))
219 && (unsigned HOST_WIDE_INT
)
220 tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var
)))
222 pt_var_size
= TYPE_SIZE_UNIT (TREE_TYPE (pt_var
));
224 return unknown
[object_size_type
];
226 if (pt_var
!= TREE_OPERAND (ptr
, 0))
230 if (object_size_type
& 1)
232 var
= TREE_OPERAND (ptr
, 0);
235 && TREE_CODE (var
) != BIT_FIELD_REF
236 && TREE_CODE (var
) != COMPONENT_REF
237 && TREE_CODE (var
) != ARRAY_REF
238 && TREE_CODE (var
) != ARRAY_RANGE_REF
239 && TREE_CODE (var
) != REALPART_EXPR
240 && TREE_CODE (var
) != IMAGPART_EXPR
)
241 var
= TREE_OPERAND (var
, 0);
242 if (var
!= pt_var
&& TREE_CODE (var
) == ARRAY_REF
)
243 var
= TREE_OPERAND (var
, 0);
244 if (! TYPE_SIZE_UNIT (TREE_TYPE (var
))
245 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var
)))
247 && tree_int_cst_lt (pt_var_size
,
248 TYPE_SIZE_UNIT (TREE_TYPE (var
)))))
250 else if (var
!= pt_var
&& TREE_CODE (pt_var
) == MEM_REF
)
253 /* For &X->fld, compute object size only if fld isn't the last
254 field, as struct { int i; char c[1]; } is often used instead
255 of flexible array member. */
256 while (v
&& v
!= pt_var
)
257 switch (TREE_CODE (v
))
260 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v
, 0)))
261 && TREE_CODE (TREE_OPERAND (v
, 1)) == INTEGER_CST
)
264 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v
, 0)));
266 && TYPE_MAX_VALUE (domain
)
267 && TREE_CODE (TYPE_MAX_VALUE (domain
))
269 && tree_int_cst_lt (TREE_OPERAND (v
, 1),
270 TYPE_MAX_VALUE (domain
)))
276 v
= TREE_OPERAND (v
, 0);
283 if (TREE_CODE (TREE_TYPE (v
)) != ARRAY_TYPE
)
288 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
289 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
291 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
295 v
= TREE_OPERAND (v
, 0);
296 if (TREE_CODE (v
) == COMPONENT_REF
297 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
300 tree fld_chain
= DECL_CHAIN (TREE_OPERAND (v
, 1));
301 for (; fld_chain
; fld_chain
= DECL_CHAIN (fld_chain
))
302 if (TREE_CODE (fld_chain
) == FIELD_DECL
)
310 v
= TREE_OPERAND (v
, 0);
312 while (v
!= pt_var
&& TREE_CODE (v
) == COMPONENT_REF
)
313 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
315 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v
, 0)))
319 v
= TREE_OPERAND (v
, 0);
337 var_size
= TYPE_SIZE_UNIT (TREE_TYPE (var
));
338 else if (!pt_var_size
)
339 return unknown
[object_size_type
];
341 var_size
= pt_var_size
;
342 bytes
= compute_object_offset (TREE_OPERAND (ptr
, 0), var
);
343 if (bytes
!= error_mark_node
)
345 if (TREE_CODE (bytes
) == INTEGER_CST
346 && tree_int_cst_lt (var_size
, bytes
))
347 bytes
= size_zero_node
;
349 bytes
= size_binop (MINUS_EXPR
, var_size
, bytes
);
353 && TREE_CODE (pt_var
) == MEM_REF
354 && bytes
!= error_mark_node
)
356 tree bytes2
= compute_object_offset (TREE_OPERAND (ptr
, 0), pt_var
);
357 if (bytes2
!= error_mark_node
)
359 if (TREE_CODE (bytes2
) == INTEGER_CST
360 && tree_int_cst_lt (pt_var_size
, bytes2
))
361 bytes2
= size_zero_node
;
363 bytes2
= size_binop (MINUS_EXPR
, pt_var_size
, bytes2
);
364 bytes
= size_binop (MIN_EXPR
, bytes
, bytes2
);
368 else if (!pt_var_size
)
369 return unknown
[object_size_type
];
373 if (tree_fits_uhwi_p (bytes
))
374 return tree_to_uhwi (bytes
);
376 return unknown
[object_size_type
];
380 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
381 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
382 argument from __builtin_object_size. If unknown, return
383 unknown[object_size_type]. */
385 static unsigned HOST_WIDE_INT
386 alloc_object_size (const_gimple call
, int object_size_type
)
388 tree callee
, bytes
= NULL_TREE
;
390 int arg1
= -1, arg2
= -1;
392 gcc_assert (is_gimple_call (call
));
394 callee
= gimple_call_fndecl (call
);
396 return unknown
[object_size_type
];
398 alloc_size
= lookup_attribute ("alloc_size",
399 TYPE_ATTRIBUTES (TREE_TYPE (callee
)));
400 if (alloc_size
&& TREE_VALUE (alloc_size
))
402 tree p
= TREE_VALUE (alloc_size
);
404 arg1
= tree_to_hwi (TREE_VALUE (p
))-1;
406 arg2
= tree_to_hwi (TREE_VALUE (TREE_CHAIN (p
)))-1;
409 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
410 switch (DECL_FUNCTION_CODE (callee
))
412 case BUILT_IN_CALLOC
:
415 case BUILT_IN_MALLOC
:
416 case BUILT_IN_ALLOCA
:
417 case BUILT_IN_ALLOCA_WITH_ALIGN
:
423 if (arg1
< 0 || arg1
>= (int)gimple_call_num_args (call
)
424 || TREE_CODE (gimple_call_arg (call
, arg1
)) != INTEGER_CST
426 && (arg2
>= (int)gimple_call_num_args (call
)
427 || TREE_CODE (gimple_call_arg (call
, arg2
)) != INTEGER_CST
)))
428 return unknown
[object_size_type
];
431 bytes
= size_binop (MULT_EXPR
,
432 fold_convert (sizetype
, gimple_call_arg (call
, arg1
)),
433 fold_convert (sizetype
, gimple_call_arg (call
, arg2
)));
435 bytes
= fold_convert (sizetype
, gimple_call_arg (call
, arg1
));
437 if (bytes
&& tree_fits_uhwi_p (bytes
))
438 return tree_to_uhwi (bytes
);
440 return unknown
[object_size_type
];
444 /* If object size is propagated from one of function's arguments directly
445 to its return value, return that argument for GIMPLE_CALL statement CALL.
446 Otherwise return NULL. */
449 pass_through_call (const_gimple call
)
451 tree callee
= gimple_call_fndecl (call
);
454 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
455 switch (DECL_FUNCTION_CODE (callee
))
457 case BUILT_IN_MEMCPY
:
458 case BUILT_IN_MEMMOVE
:
459 case BUILT_IN_MEMSET
:
460 case BUILT_IN_STRCPY
:
461 case BUILT_IN_STRNCPY
:
462 case BUILT_IN_STRCAT
:
463 case BUILT_IN_STRNCAT
:
464 case BUILT_IN_MEMCPY_CHK
:
465 case BUILT_IN_MEMMOVE_CHK
:
466 case BUILT_IN_MEMSET_CHK
:
467 case BUILT_IN_STRCPY_CHK
:
468 case BUILT_IN_STRNCPY_CHK
:
469 case BUILT_IN_STPNCPY_CHK
:
470 case BUILT_IN_STRCAT_CHK
:
471 case BUILT_IN_STRNCAT_CHK
:
472 case BUILT_IN_ASSUME_ALIGNED
:
473 if (gimple_call_num_args (call
) >= 1)
474 return gimple_call_arg (call
, 0);
484 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
485 second argument from __builtin_object_size. */
487 unsigned HOST_WIDE_INT
488 compute_builtin_object_size (tree ptr
, int object_size_type
)
490 gcc_assert (object_size_type
>= 0 && object_size_type
<= 3);
493 init_offset_limit ();
495 if (TREE_CODE (ptr
) == ADDR_EXPR
)
496 return addr_object_size (NULL
, ptr
, object_size_type
);
498 if (TREE_CODE (ptr
) == SSA_NAME
499 && POINTER_TYPE_P (TREE_TYPE (ptr
))
500 && object_sizes
[object_size_type
] != NULL
)
502 if (!bitmap_bit_p (computed
[object_size_type
], SSA_NAME_VERSION (ptr
)))
504 struct object_size_info osi
;
510 fprintf (dump_file
, "Computing %s %sobject size for ",
511 (object_size_type
& 2) ? "minimum" : "maximum",
512 (object_size_type
& 1) ? "sub" : "");
513 print_generic_expr (dump_file
, ptr
, dump_flags
);
514 fprintf (dump_file
, ":\n");
517 osi
.visited
= BITMAP_ALLOC (NULL
);
518 osi
.reexamine
= BITMAP_ALLOC (NULL
);
519 osi
.object_size_type
= object_size_type
;
524 /* First pass: walk UD chains, compute object sizes that
525 can be computed. osi.reexamine bitmap at the end will
526 contain what variables were found in dependency cycles
527 and therefore need to be reexamined. */
530 collect_object_sizes_for (&osi
, ptr
);
532 /* Second pass: keep recomputing object sizes of variables
533 that need reexamination, until no object sizes are
534 increased or all object sizes are computed. */
535 if (! bitmap_empty_p (osi
.reexamine
))
537 bitmap reexamine
= BITMAP_ALLOC (NULL
);
539 /* If looking for minimum instead of maximum object size,
540 detect cases where a pointer is increased in a loop.
541 Although even without this detection pass 2 would eventually
542 terminate, it could take a long time. If a pointer is
543 increasing this way, we need to assume 0 object size.
544 E.g. p = &buf[0]; while (cond) p = p + 4; */
545 if (object_size_type
& 2)
547 osi
.depths
= XCNEWVEC (unsigned int, num_ssa_names
);
548 osi
.stack
= XNEWVEC (unsigned int, num_ssa_names
);
551 /* collect_object_sizes_for is changing
552 osi.reexamine bitmap, so iterate over a copy. */
553 bitmap_copy (reexamine
, osi
.reexamine
);
554 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
555 if (bitmap_bit_p (osi
.reexamine
, i
))
556 check_for_plus_in_loops (&osi
, ssa_name (i
));
569 /* collect_object_sizes_for is changing
570 osi.reexamine bitmap, so iterate over a copy. */
571 bitmap_copy (reexamine
, osi
.reexamine
);
572 EXECUTE_IF_SET_IN_BITMAP (reexamine
, 0, i
, bi
)
573 if (bitmap_bit_p (osi
.reexamine
, i
))
575 collect_object_sizes_for (&osi
, ssa_name (i
));
576 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
578 fprintf (dump_file
, "Reexamining ");
579 print_generic_expr (dump_file
, ssa_name (i
),
581 fprintf (dump_file
, "\n");
587 BITMAP_FREE (reexamine
);
589 EXECUTE_IF_SET_IN_BITMAP (osi
.reexamine
, 0, i
, bi
)
590 bitmap_set_bit (computed
[object_size_type
], i
);
592 /* Debugging dumps. */
595 EXECUTE_IF_SET_IN_BITMAP (osi
.visited
, 0, i
, bi
)
596 if (object_sizes
[object_size_type
][i
]
597 != unknown
[object_size_type
])
599 print_generic_expr (dump_file
, ssa_name (i
),
602 ": %s %sobject size "
603 HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
604 (object_size_type
& 2) ? "minimum" : "maximum",
605 (object_size_type
& 1) ? "sub" : "",
606 object_sizes
[object_size_type
][i
]);
610 BITMAP_FREE (osi
.reexamine
);
611 BITMAP_FREE (osi
.visited
);
614 return object_sizes
[object_size_type
][SSA_NAME_VERSION (ptr
)];
617 return unknown
[object_size_type
];
620 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
623 expr_object_size (struct object_size_info
*osi
, tree ptr
, tree value
)
625 int object_size_type
= osi
->object_size_type
;
626 unsigned int varno
= SSA_NAME_VERSION (ptr
);
627 unsigned HOST_WIDE_INT bytes
;
629 gcc_assert (object_sizes
[object_size_type
][varno
]
630 != unknown
[object_size_type
]);
631 gcc_assert (osi
->pass
== 0);
633 if (TREE_CODE (value
) == WITH_SIZE_EXPR
)
634 value
= TREE_OPERAND (value
, 0);
636 /* Pointer variables should have been handled by merge_object_sizes. */
637 gcc_assert (TREE_CODE (value
) != SSA_NAME
638 || !POINTER_TYPE_P (TREE_TYPE (value
)));
640 if (TREE_CODE (value
) == ADDR_EXPR
)
641 bytes
= addr_object_size (osi
, value
, object_size_type
);
643 bytes
= unknown
[object_size_type
];
645 if ((object_size_type
& 2) == 0)
647 if (object_sizes
[object_size_type
][varno
] < bytes
)
648 object_sizes
[object_size_type
][varno
] = bytes
;
652 if (object_sizes
[object_size_type
][varno
] > bytes
)
653 object_sizes
[object_size_type
][varno
] = bytes
;
658 /* Compute object_sizes for PTR, defined to the result of a call. */
661 call_object_size (struct object_size_info
*osi
, tree ptr
, gimple call
)
663 int object_size_type
= osi
->object_size_type
;
664 unsigned int varno
= SSA_NAME_VERSION (ptr
);
665 unsigned HOST_WIDE_INT bytes
;
667 gcc_assert (is_gimple_call (call
));
669 gcc_assert (object_sizes
[object_size_type
][varno
]
670 != unknown
[object_size_type
]);
671 gcc_assert (osi
->pass
== 0);
673 bytes
= alloc_object_size (call
, object_size_type
);
675 if ((object_size_type
& 2) == 0)
677 if (object_sizes
[object_size_type
][varno
] < bytes
)
678 object_sizes
[object_size_type
][varno
] = bytes
;
682 if (object_sizes
[object_size_type
][varno
] > bytes
)
683 object_sizes
[object_size_type
][varno
] = bytes
;
688 /* Compute object_sizes for PTR, defined to an unknown value. */
691 unknown_object_size (struct object_size_info
*osi
, tree ptr
)
693 int object_size_type
= osi
->object_size_type
;
694 unsigned int varno
= SSA_NAME_VERSION (ptr
);
695 unsigned HOST_WIDE_INT bytes
;
697 gcc_assert (object_sizes
[object_size_type
][varno
]
698 != unknown
[object_size_type
]);
699 gcc_assert (osi
->pass
== 0);
701 bytes
= unknown
[object_size_type
];
703 if ((object_size_type
& 2) == 0)
705 if (object_sizes
[object_size_type
][varno
] < bytes
)
706 object_sizes
[object_size_type
][varno
] = bytes
;
710 if (object_sizes
[object_size_type
][varno
] > bytes
)
711 object_sizes
[object_size_type
][varno
] = bytes
;
716 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
717 the object size might need reexamination later. */
720 merge_object_sizes (struct object_size_info
*osi
, tree dest
, tree orig
,
721 unsigned HOST_WIDE_INT offset
)
723 int object_size_type
= osi
->object_size_type
;
724 unsigned int varno
= SSA_NAME_VERSION (dest
);
725 unsigned HOST_WIDE_INT orig_bytes
;
727 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
729 if (offset
>= offset_limit
)
731 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
736 collect_object_sizes_for (osi
, orig
);
738 orig_bytes
= object_sizes
[object_size_type
][SSA_NAME_VERSION (orig
)];
739 if (orig_bytes
!= unknown
[object_size_type
])
740 orig_bytes
= (offset
> orig_bytes
)
741 ? (unsigned HOST_WIDE_INT
) 0 : orig_bytes
- offset
;
743 if ((object_size_type
& 2) == 0)
745 if (object_sizes
[object_size_type
][varno
] < orig_bytes
)
747 object_sizes
[object_size_type
][varno
] = orig_bytes
;
753 if (object_sizes
[object_size_type
][varno
] > orig_bytes
)
755 object_sizes
[object_size_type
][varno
] = orig_bytes
;
759 return bitmap_bit_p (osi
->reexamine
, SSA_NAME_VERSION (orig
));
763 /* Compute object_sizes for VAR, defined to the result of an assignment
764 with operator POINTER_PLUS_EXPR. Return true if the object size might
765 need reexamination later. */
768 plus_stmt_object_size (struct object_size_info
*osi
, tree var
, gimple stmt
)
770 int object_size_type
= osi
->object_size_type
;
771 unsigned int varno
= SSA_NAME_VERSION (var
);
772 unsigned HOST_WIDE_INT bytes
;
775 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
777 op0
= gimple_assign_rhs1 (stmt
);
778 op1
= gimple_assign_rhs2 (stmt
);
780 else if (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
782 tree rhs
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
783 gcc_assert (TREE_CODE (rhs
) == MEM_REF
);
784 op0
= TREE_OPERAND (rhs
, 0);
785 op1
= TREE_OPERAND (rhs
, 1);
790 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
793 /* Handle PTR + OFFSET here. */
794 if (TREE_CODE (op1
) == INTEGER_CST
795 && (TREE_CODE (op0
) == SSA_NAME
796 || TREE_CODE (op0
) == ADDR_EXPR
))
798 if (! tree_fits_uhwi_p (op1
))
799 bytes
= unknown
[object_size_type
];
800 else if (TREE_CODE (op0
) == SSA_NAME
)
801 return merge_object_sizes (osi
, var
, op0
, tree_to_uhwi (op1
));
804 unsigned HOST_WIDE_INT off
= tree_to_uhwi (op1
);
806 /* op0 will be ADDR_EXPR here. */
807 bytes
= addr_object_size (osi
, op0
, object_size_type
);
808 if (bytes
== unknown
[object_size_type
])
810 else if (off
> offset_limit
)
811 bytes
= unknown
[object_size_type
];
812 else if (off
> bytes
)
819 bytes
= unknown
[object_size_type
];
821 if ((object_size_type
& 2) == 0)
823 if (object_sizes
[object_size_type
][varno
] < bytes
)
824 object_sizes
[object_size_type
][varno
] = bytes
;
828 if (object_sizes
[object_size_type
][varno
] > bytes
)
829 object_sizes
[object_size_type
][varno
] = bytes
;
835 /* Compute object_sizes for VAR, defined at STMT, which is
836 a COND_EXPR. Return true if the object size might need reexamination
840 cond_expr_object_size (struct object_size_info
*osi
, tree var
, gimple stmt
)
843 int object_size_type
= osi
->object_size_type
;
844 unsigned int varno
= SSA_NAME_VERSION (var
);
845 bool reexamine
= false;
847 gcc_assert (gimple_assign_rhs_code (stmt
) == COND_EXPR
);
849 if (object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
852 then_
= gimple_assign_rhs2 (stmt
);
853 else_
= gimple_assign_rhs3 (stmt
);
855 if (TREE_CODE (then_
) == SSA_NAME
)
856 reexamine
|= merge_object_sizes (osi
, var
, then_
, 0);
858 expr_object_size (osi
, var
, then_
);
860 if (TREE_CODE (else_
) == SSA_NAME
)
861 reexamine
|= merge_object_sizes (osi
, var
, else_
, 0);
863 expr_object_size (osi
, var
, else_
);
868 /* Compute object sizes for VAR.
869 For ADDR_EXPR an object size is the number of remaining bytes
870 to the end of the object (where what is considered an object depends on
871 OSI->object_size_type).
872 For allocation GIMPLE_CALL like malloc or calloc object size is the size
874 For POINTER_PLUS_EXPR where second operand is a constant integer,
875 object size is object size of the first operand minus the constant.
876 If the constant is bigger than the number of remaining bytes until the
877 end of the object, object size is 0, but if it is instead a pointer
878 subtraction, object size is unknown[object_size_type].
879 To differentiate addition from subtraction, ADDR_EXPR returns
880 unknown[object_size_type] for all objects bigger than half of the address
881 space, and constants less than half of the address space are considered
882 addition, while bigger constants subtraction.
883 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
884 object size is object size of that argument.
885 Otherwise, object size is the maximum of object sizes of variables
886 that it might be set to. */
889 collect_object_sizes_for (struct object_size_info
*osi
, tree var
)
891 int object_size_type
= osi
->object_size_type
;
892 unsigned int varno
= SSA_NAME_VERSION (var
);
896 if (bitmap_bit_p (computed
[object_size_type
], varno
))
901 if (bitmap_set_bit (osi
->visited
, varno
))
903 object_sizes
[object_size_type
][varno
]
904 = (object_size_type
& 2) ? -1 : 0;
908 /* Found a dependency loop. Mark the variable for later
910 bitmap_set_bit (osi
->reexamine
, varno
);
911 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
913 fprintf (dump_file
, "Found a dependency loop at ");
914 print_generic_expr (dump_file
, var
, dump_flags
);
915 fprintf (dump_file
, "\n");
921 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
923 fprintf (dump_file
, "Visiting use-def links for ");
924 print_generic_expr (dump_file
, var
, dump_flags
);
925 fprintf (dump_file
, "\n");
928 stmt
= SSA_NAME_DEF_STMT (var
);
931 switch (gimple_code (stmt
))
935 tree rhs
= gimple_assign_rhs1 (stmt
);
936 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
937 || (gimple_assign_rhs_code (stmt
) == ADDR_EXPR
938 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == MEM_REF
))
939 reexamine
= plus_stmt_object_size (osi
, var
, stmt
);
940 else if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
941 reexamine
= cond_expr_object_size (osi
, var
, stmt
);
942 else if (gimple_assign_single_p (stmt
)
943 || gimple_assign_unary_nop_p (stmt
))
945 if (TREE_CODE (rhs
) == SSA_NAME
946 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
947 reexamine
= merge_object_sizes (osi
, var
, rhs
, 0);
949 expr_object_size (osi
, var
, rhs
);
952 unknown_object_size (osi
, var
);
958 tree arg
= pass_through_call (stmt
);
961 if (TREE_CODE (arg
) == SSA_NAME
962 && POINTER_TYPE_P (TREE_TYPE (arg
)))
963 reexamine
= merge_object_sizes (osi
, var
, arg
, 0);
965 expr_object_size (osi
, var
, arg
);
968 call_object_size (osi
, var
, stmt
);
973 /* Pointers defined by __asm__ statements can point anywhere. */
974 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
978 if (SSA_NAME_VAR (var
)
979 && TREE_CODE (SSA_NAME_VAR (var
)) == PARM_DECL
)
980 expr_object_size (osi
, var
, SSA_NAME_VAR (var
));
982 /* Uninitialized SSA names point nowhere. */
983 object_sizes
[object_size_type
][varno
] = unknown
[object_size_type
];
990 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
992 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
994 if (object_sizes
[object_size_type
][varno
]
995 == unknown
[object_size_type
])
998 if (TREE_CODE (rhs
) == SSA_NAME
)
999 reexamine
|= merge_object_sizes (osi
, var
, rhs
, 0);
1000 else if (osi
->pass
== 0)
1001 expr_object_size (osi
, var
, rhs
);
1011 || object_sizes
[object_size_type
][varno
] == unknown
[object_size_type
])
1013 bitmap_set_bit (computed
[object_size_type
], varno
);
1014 bitmap_clear_bit (osi
->reexamine
, varno
);
1018 bitmap_set_bit (osi
->reexamine
, varno
);
1019 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1021 fprintf (dump_file
, "Need to reexamine ");
1022 print_generic_expr (dump_file
, var
, dump_flags
);
1023 fprintf (dump_file
, "\n");
1029 /* Helper function for check_for_plus_in_loops. Called recursively
1033 check_for_plus_in_loops_1 (struct object_size_info
*osi
, tree var
,
1036 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1037 unsigned int varno
= SSA_NAME_VERSION (var
);
1039 if (osi
->depths
[varno
])
1041 if (osi
->depths
[varno
] != depth
)
1045 /* Found a loop involving pointer addition. */
1046 for (sp
= osi
->tos
; sp
> osi
->stack
; )
1049 bitmap_clear_bit (osi
->reexamine
, *sp
);
1050 bitmap_set_bit (computed
[osi
->object_size_type
], *sp
);
1051 object_sizes
[osi
->object_size_type
][*sp
] = 0;
1058 else if (! bitmap_bit_p (osi
->reexamine
, varno
))
1061 osi
->depths
[varno
] = depth
;
1062 *osi
->tos
++ = varno
;
1064 switch (gimple_code (stmt
))
1069 if ((gimple_assign_single_p (stmt
)
1070 || gimple_assign_unary_nop_p (stmt
))
1071 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
1073 tree rhs
= gimple_assign_rhs1 (stmt
);
1075 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1077 else if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1079 tree basevar
= gimple_assign_rhs1 (stmt
);
1080 tree cst
= gimple_assign_rhs2 (stmt
);
1082 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1084 check_for_plus_in_loops_1 (osi
, basevar
,
1085 depth
+ !integer_zerop (cst
));
1094 tree arg
= pass_through_call (stmt
);
1097 if (TREE_CODE (arg
) == SSA_NAME
)
1098 check_for_plus_in_loops_1 (osi
, arg
, depth
);
1109 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
1111 tree rhs
= gimple_phi_arg (stmt
, i
)->def
;
1113 if (TREE_CODE (rhs
) == SSA_NAME
)
1114 check_for_plus_in_loops_1 (osi
, rhs
, depth
);
1123 osi
->depths
[varno
] = 0;
1128 /* Check if some pointer we are computing object size of is being increased
1129 within a loop. If yes, assume all the SSA variables participating in
1130 that loop have minimum object sizes 0. */
1133 check_for_plus_in_loops (struct object_size_info
*osi
, tree var
)
1135 gimple stmt
= SSA_NAME_DEF_STMT (var
);
1137 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1138 and looked for a POINTER_PLUS_EXPR in the pass-through
1139 argument, if any. In GIMPLE, however, such an expression
1140 is not a valid call operand. */
1142 if (is_gimple_assign (stmt
)
1143 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
1145 tree basevar
= gimple_assign_rhs1 (stmt
);
1146 tree cst
= gimple_assign_rhs2 (stmt
);
1148 gcc_assert (TREE_CODE (cst
) == INTEGER_CST
);
1150 if (integer_zerop (cst
))
1153 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 1;
1154 *osi
->tos
++ = SSA_NAME_VERSION (basevar
);
1155 check_for_plus_in_loops_1 (osi
, var
, 2);
1156 osi
->depths
[SSA_NAME_VERSION (basevar
)] = 0;
1162 /* Initialize data structures for the object size computation. */
1165 init_object_sizes (void)
1167 int object_size_type
;
1169 if (object_sizes
[0])
1172 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1174 object_sizes
[object_size_type
] = XNEWVEC (unsigned HOST_WIDE_INT
, num_ssa_names
);
1175 computed
[object_size_type
] = BITMAP_ALLOC (NULL
);
1178 init_offset_limit ();
1182 /* Destroy data structures after the object size computation. */
1185 fini_object_sizes (void)
1187 int object_size_type
;
1189 for (object_size_type
= 0; object_size_type
<= 3; object_size_type
++)
1191 free (object_sizes
[object_size_type
]);
1192 BITMAP_FREE (computed
[object_size_type
]);
1193 object_sizes
[object_size_type
] = NULL
;
1198 /* Simple pass to optimize all __builtin_object_size () builtins. */
1201 compute_object_sizes (void)
1206 gimple_stmt_iterator i
;
1207 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
1209 tree callee
, result
;
1210 gimple call
= gsi_stmt (i
);
1212 if (gimple_code (call
) != GIMPLE_CALL
)
1215 callee
= gimple_call_fndecl (call
);
1217 || DECL_BUILT_IN_CLASS (callee
) != BUILT_IN_NORMAL
1218 || DECL_FUNCTION_CODE (callee
) != BUILT_IN_OBJECT_SIZE
)
1221 init_object_sizes ();
1222 result
= fold_call_stmt (call
, false);
1225 if (gimple_call_num_args (call
) == 2
1226 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, 0))))
1228 tree ost
= gimple_call_arg (call
, 1);
1230 if (tree_fits_uhwi_p (ost
))
1232 unsigned HOST_WIDE_INT object_size_type
1233 = tree_to_uhwi (ost
);
1235 if (object_size_type
< 2)
1236 result
= fold_convert (size_type_node
,
1237 integer_minus_one_node
);
1238 else if (object_size_type
< 4)
1239 result
= build_zero_cst (size_type_node
);
1247 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1249 fprintf (dump_file
, "Simplified\n ");
1250 print_gimple_stmt (dump_file
, call
, 0, dump_flags
);
1253 if (!update_call_from_tree (&i
, result
))
1256 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1258 fprintf (dump_file
, "to\n ");
1259 print_gimple_stmt (dump_file
, gsi_stmt (i
), 0, dump_flags
);
1260 fprintf (dump_file
, "\n");
1265 fini_object_sizes ();
1271 const pass_data pass_data_object_sizes
=
1273 GIMPLE_PASS
, /* type */
1275 OPTGROUP_NONE
, /* optinfo_flags */
1276 false, /* has_gate */
1277 true, /* has_execute */
1278 TV_NONE
, /* tv_id */
1279 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1280 0, /* properties_provided */
1281 0, /* properties_destroyed */
1282 0, /* todo_flags_start */
1283 TODO_verify_ssa
, /* todo_flags_finish */
1286 class pass_object_sizes
: public gimple_opt_pass
1289 pass_object_sizes (gcc::context
*ctxt
)
1290 : gimple_opt_pass (pass_data_object_sizes
, ctxt
)
1293 /* opt_pass methods: */
1294 opt_pass
* clone () { return new pass_object_sizes (m_ctxt
); }
1295 unsigned int execute () { return compute_object_sizes (); }
1297 }; // class pass_object_sizes
1302 make_pass_object_sizes (gcc::context
*ctxt
)
1304 return new pass_object_sizes (ctxt
);