]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-object-size.c
gimple.h: Remove all includes.
[thirdparty/gcc.git] / gcc / tree-object-size.c
CommitLineData
10a0d495 1/* __builtin_object_size (ptr, object_size_type) computation
d1e082c2 2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
10a0d495
JJ
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
10a0d495
JJ
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
10a0d495
JJ
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
d8a2d370 26#include "tree-object-size.h"
718f9c0f 27#include "diagnostic-core.h"
cf835838 28#include "gimple-pretty-print.h"
442b4905 29#include "bitmap.h"
2fb9a547
AM
30#include "basic-block.h"
31#include "tree-ssa-alias.h"
32#include "internal-fn.h"
33#include "gimple-fold.h"
34#include "gimple-expr.h"
35#include "is-a.h"
442b4905 36#include "gimple.h"
5be5c238 37#include "gimple-iterator.h"
442b4905 38#include "gimple-ssa.h"
d8a2d370 39#include "stringpool.h"
442b4905 40#include "tree-ssanames.h"
10a0d495
JJ
41#include "tree-pass.h"
42#include "tree-ssa-propagate.h"
1eadb567
RB
43#include "tree-phinodes.h"
44#include "ssa-iterators.h"
10a0d495
JJ
45
46struct object_size_info
47{
48 int object_size_type;
49 bitmap visited, reexamine;
50 int pass;
51 bool changed;
52 unsigned int *depths;
53 unsigned int *stack, *tos;
54};
55
4cdce1a8 56static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
10a0d495 57
ac545c64 58static tree compute_object_offset (const_tree, const_tree);
eb9ed98a
JJ
59static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
60 const_tree, int);
726a989a
RB
61static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
62static tree pass_through_call (const_gimple);
10a0d495
JJ
63static void collect_object_sizes_for (struct object_size_info *, tree);
64static void expr_object_size (struct object_size_info *, tree, tree);
65static bool merge_object_sizes (struct object_size_info *, tree, tree,
66 unsigned HOST_WIDE_INT);
726a989a 67static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
4e71066d 68static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
c2924966 69static unsigned int compute_object_sizes (void);
10a0d495
JJ
70static void init_offset_limit (void);
71static void check_for_plus_in_loops (struct object_size_info *, tree);
72static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
73 unsigned int);
74
75/* object_sizes[0] is upper bound for number of bytes till the end of
76 the object.
77 object_sizes[1] is upper bound for number of bytes till the end of
78 the subobject (innermost array or field with address taken).
79 object_sizes[2] is lower bound for number of bytes till the end of
80 the object and object_sizes[3] lower bound for subobject. */
81static unsigned HOST_WIDE_INT *object_sizes[4];
82
83/* Bitmaps what object sizes have been computed already. */
84static bitmap computed[4];
85
86/* Maximum value of offset we consider to be addition. */
87static unsigned HOST_WIDE_INT offset_limit;
88
89
90/* Initialize OFFSET_LIMIT variable. */
91static void
92init_offset_limit (void)
93{
cc269bb6 94 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
ae7e9ddd 95 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
10a0d495
JJ
96 else
97 offset_limit = -1;
98 offset_limit /= 2;
99}
100
101
102/* Compute offset of EXPR within VAR. Return error_mark_node
103 if unknown. */
104
105static tree
ac545c64 106compute_object_offset (const_tree expr, const_tree var)
10a0d495
JJ
107{
108 enum tree_code code = PLUS_EXPR;
109 tree base, off, t;
110
111 if (expr == var)
112 return size_zero_node;
113
114 switch (TREE_CODE (expr))
115 {
116 case COMPONENT_REF:
117 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
118 if (base == error_mark_node)
119 return base;
120
121 t = TREE_OPERAND (expr, 1);
122 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
ae7e9ddd 123 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
10a0d495
JJ
124 / BITS_PER_UNIT));
125 break;
126
127 case REALPART_EXPR:
1043771b 128 CASE_CONVERT:
10a0d495
JJ
129 case VIEW_CONVERT_EXPR:
130 case NON_LVALUE_EXPR:
131 return compute_object_offset (TREE_OPERAND (expr, 0), var);
132
133 case IMAGPART_EXPR:
134 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
135 if (base == error_mark_node)
136 return base;
137
138 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
139 break;
140
141 case ARRAY_REF:
142 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
143 if (base == error_mark_node)
144 return base;
145
146 t = TREE_OPERAND (expr, 1);
147 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
148 {
149 code = MINUS_EXPR;
150 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
151 }
b6f65e3c 152 t = fold_convert (sizetype, t);
10a0d495
JJ
153 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
154 break;
155
70f34814
RG
156 case MEM_REF:
157 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
f8bce518 158 return double_int_to_tree (sizetype, mem_ref_offset (expr));
70f34814 159
10a0d495
JJ
160 default:
161 return error_mark_node;
162 }
163
164 return size_binop (code, base, off);
165}
166
167
168/* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
169 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
170 If unknown, return unknown[object_size_type]. */
171
172static unsigned HOST_WIDE_INT
eb9ed98a
JJ
173addr_object_size (struct object_size_info *osi, const_tree ptr,
174 int object_size_type)
10a0d495 175{
eb9ed98a 176 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
10a0d495
JJ
177
178 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
179
180 pt_var = TREE_OPERAND (ptr, 0);
d837d73d
RG
181 while (handled_component_p (pt_var))
182 pt_var = TREE_OPERAND (pt_var, 0);
10a0d495
JJ
183
184 if (pt_var
d837d73d 185 && TREE_CODE (pt_var) == MEM_REF)
10a0d495 186 {
eb9ed98a 187 unsigned HOST_WIDE_INT sz;
10a0d495 188
d837d73d 189 if (!osi || (object_size_type & 1) != 0
ea17de23 190 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
70f34814
RG
191 {
192 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
193 object_size_type & ~1);
70f34814 194 }
eb9ed98a 195 else
10a0d495 196 {
eb9ed98a
JJ
197 tree var = TREE_OPERAND (pt_var, 0);
198 if (osi->pass == 0)
199 collect_object_sizes_for (osi, var);
200 if (bitmap_bit_p (computed[object_size_type],
201 SSA_NAME_VERSION (var)))
202 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
203 else
204 sz = unknown[object_size_type];
d837d73d
RG
205 }
206 if (sz != unknown[object_size_type])
207 {
27bcd47c
LC
208 double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
209 if (dsz.is_negative ())
d837d73d 210 sz = 0;
27bcd47c
LC
211 else if (dsz.fits_uhwi ())
212 sz = dsz.to_uhwi ();
70f34814 213 else
d837d73d 214 sz = unknown[object_size_type];
eb9ed98a
JJ
215 }
216
217 if (sz != unknown[object_size_type] && sz < offset_limit)
218 pt_var_size = size_int (sz);
219 }
e497b9bd
RG
220 else if (pt_var
221 && DECL_P (pt_var)
cc269bb6 222 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
7d362f6c 223 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
e497b9bd 224 pt_var_size = DECL_SIZE_UNIT (pt_var);
eb9ed98a 225 else if (pt_var
d837d73d 226 && TREE_CODE (pt_var) == STRING_CST
eb9ed98a 227 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
cc269bb6 228 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
7d362f6c 229 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
eb9ed98a
JJ
230 < offset_limit)
231 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
232 else
233 return unknown[object_size_type];
234
235 if (pt_var != TREE_OPERAND (ptr, 0))
236 {
237 tree var;
10a0d495 238
eb9ed98a
JJ
239 if (object_size_type & 1)
240 {
241 var = TREE_OPERAND (ptr, 0);
242
243 while (var != pt_var
244 && TREE_CODE (var) != BIT_FIELD_REF
245 && TREE_CODE (var) != COMPONENT_REF
246 && TREE_CODE (var) != ARRAY_REF
247 && TREE_CODE (var) != ARRAY_RANGE_REF
248 && TREE_CODE (var) != REALPART_EXPR
249 && TREE_CODE (var) != IMAGPART_EXPR)
250 var = TREE_OPERAND (var, 0);
251 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
38027156 252 var = TREE_OPERAND (var, 0);
eb9ed98a 253 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
cc269bb6 254 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
eb9ed98a
JJ
255 || (pt_var_size
256 && tree_int_cst_lt (pt_var_size,
257 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
258 var = pt_var;
70f34814 259 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
10a0d495 260 {
eb9ed98a
JJ
261 tree v = var;
262 /* For &X->fld, compute object size only if fld isn't the last
263 field, as struct { int i; char c[1]; } is often used instead
264 of flexible array member. */
265 while (v && v != pt_var)
266 switch (TREE_CODE (v))
267 {
268 case ARRAY_REF:
269 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
270 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
271 {
272 tree domain
273 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
274 if (domain
275 && TYPE_MAX_VALUE (domain)
276 && TREE_CODE (TYPE_MAX_VALUE (domain))
277 == INTEGER_CST
278 && tree_int_cst_lt (TREE_OPERAND (v, 1),
279 TYPE_MAX_VALUE (domain)))
280 {
281 v = NULL_TREE;
282 break;
283 }
284 }
285 v = TREE_OPERAND (v, 0);
286 break;
287 case REALPART_EXPR:
288 case IMAGPART_EXPR:
289 v = NULL_TREE;
290 break;
291 case COMPONENT_REF:
8593e0b6
JJ
292 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
293 {
294 v = NULL_TREE;
295 break;
296 }
38027156
JJ
297 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
298 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
299 != UNION_TYPE
300 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
301 != QUAL_UNION_TYPE)
302 break;
303 else
304 v = TREE_OPERAND (v, 0);
305 if (TREE_CODE (v) == COMPONENT_REF
306 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
307 == RECORD_TYPE)
eb9ed98a 308 {
910ad8de
NF
309 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
310 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
8593e0b6
JJ
311 if (TREE_CODE (fld_chain) == FIELD_DECL)
312 break;
313
314 if (fld_chain)
315 {
316 v = NULL_TREE;
eb9ed98a 317 break;
8593e0b6 318 }
38027156 319 v = TREE_OPERAND (v, 0);
eb9ed98a 320 }
38027156
JJ
321 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
322 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
323 != UNION_TYPE
324 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
325 != QUAL_UNION_TYPE)
8593e0b6
JJ
326 break;
327 else
328 v = TREE_OPERAND (v, 0);
38027156 329 if (v != pt_var)
8593e0b6
JJ
330 v = NULL_TREE;
331 else
332 v = pt_var;
eb9ed98a
JJ
333 break;
334 default:
335 v = pt_var;
336 break;
337 }
338 if (v == pt_var)
10a0d495
JJ
339 var = pt_var;
340 }
eb9ed98a
JJ
341 }
342 else
343 var = pt_var;
10a0d495 344
eb9ed98a
JJ
345 if (var != pt_var)
346 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
347 else if (!pt_var_size)
348 return unknown[object_size_type];
349 else
350 var_size = pt_var_size;
351 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
352 if (bytes != error_mark_node)
353 {
354 if (TREE_CODE (bytes) == INTEGER_CST
355 && tree_int_cst_lt (var_size, bytes))
356 bytes = size_zero_node;
357 else
358 bytes = size_binop (MINUS_EXPR, var_size, bytes);
359 }
360 if (var != pt_var
361 && pt_var_size
70f34814 362 && TREE_CODE (pt_var) == MEM_REF
eb9ed98a
JJ
363 && bytes != error_mark_node)
364 {
365 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
366 if (bytes2 != error_mark_node)
10a0d495 367 {
eb9ed98a
JJ
368 if (TREE_CODE (bytes2) == INTEGER_CST
369 && tree_int_cst_lt (pt_var_size, bytes2))
370 bytes2 = size_zero_node;
10a0d495 371 else
98a129b9 372 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
eb9ed98a 373 bytes = size_binop (MIN_EXPR, bytes, bytes2);
10a0d495
JJ
374 }
375 }
10a0d495 376 }
eb9ed98a
JJ
377 else if (!pt_var_size)
378 return unknown[object_size_type];
379 else
380 bytes = pt_var_size;
381
cc269bb6 382 if (tree_fits_uhwi_p (bytes))
ae7e9ddd 383 return tree_to_uhwi (bytes);
10a0d495
JJ
384
385 return unknown[object_size_type];
386}
387
388
726a989a 389/* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
10a0d495
JJ
390 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
391 argument from __builtin_object_size. If unknown, return
392 unknown[object_size_type]. */
393
394static unsigned HOST_WIDE_INT
726a989a 395alloc_object_size (const_gimple call, int object_size_type)
10a0d495 396{
5039610b 397 tree callee, bytes = NULL_TREE;
51bc54a6
DM
398 tree alloc_size;
399 int arg1 = -1, arg2 = -1;
10a0d495 400
726a989a 401 gcc_assert (is_gimple_call (call));
10a0d495 402
726a989a 403 callee = gimple_call_fndecl (call);
51bc54a6
DM
404 if (!callee)
405 return unknown[object_size_type];
406
c3284718
RS
407 alloc_size = lookup_attribute ("alloc_size",
408 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
51bc54a6
DM
409 if (alloc_size && TREE_VALUE (alloc_size))
410 {
411 tree p = TREE_VALUE (alloc_size);
412
413 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
414 if (TREE_CHAIN (p))
726a989a 415 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
51bc54a6 416 }
b8698a0f 417
51bc54a6 418 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
10a0d495
JJ
419 switch (DECL_FUNCTION_CODE (callee))
420 {
51bc54a6
DM
421 case BUILT_IN_CALLOC:
422 arg2 = 1;
423 /* fall through */
10a0d495
JJ
424 case BUILT_IN_MALLOC:
425 case BUILT_IN_ALLOCA:
13e49da9 426 case BUILT_IN_ALLOCA_WITH_ALIGN:
51bc54a6 427 arg1 = 0;
10a0d495
JJ
428 default:
429 break;
430 }
431
726a989a
RB
432 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
433 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
b8698a0f 434 || (arg2 >= 0
726a989a
RB
435 && (arg2 >= (int)gimple_call_num_args (call)
436 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
b8698a0f 437 return unknown[object_size_type];
51bc54a6
DM
438
439 if (arg2 >= 0)
440 bytes = size_binop (MULT_EXPR,
726a989a
RB
441 fold_convert (sizetype, gimple_call_arg (call, arg1)),
442 fold_convert (sizetype, gimple_call_arg (call, arg2)));
51bc54a6 443 else if (arg1 >= 0)
726a989a 444 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
51bc54a6 445
cc269bb6 446 if (bytes && tree_fits_uhwi_p (bytes))
ae7e9ddd 447 return tree_to_uhwi (bytes);
10a0d495
JJ
448
449 return unknown[object_size_type];
450}
451
452
453/* If object size is propagated from one of function's arguments directly
726a989a 454 to its return value, return that argument for GIMPLE_CALL statement CALL.
10a0d495
JJ
455 Otherwise return NULL. */
456
457static tree
726a989a 458pass_through_call (const_gimple call)
10a0d495 459{
726a989a 460 tree callee = gimple_call_fndecl (call);
10a0d495
JJ
461
462 if (callee
463 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
464 switch (DECL_FUNCTION_CODE (callee))
465 {
466 case BUILT_IN_MEMCPY:
467 case BUILT_IN_MEMMOVE:
468 case BUILT_IN_MEMSET:
469 case BUILT_IN_STRCPY:
470 case BUILT_IN_STRNCPY:
471 case BUILT_IN_STRCAT:
472 case BUILT_IN_STRNCAT:
473 case BUILT_IN_MEMCPY_CHK:
474 case BUILT_IN_MEMMOVE_CHK:
475 case BUILT_IN_MEMSET_CHK:
476 case BUILT_IN_STRCPY_CHK:
477 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 478 case BUILT_IN_STPNCPY_CHK:
10a0d495
JJ
479 case BUILT_IN_STRCAT_CHK:
480 case BUILT_IN_STRNCAT_CHK:
8b1bdcc5 481 case BUILT_IN_ASSUME_ALIGNED:
726a989a
RB
482 if (gimple_call_num_args (call) >= 1)
483 return gimple_call_arg (call, 0);
10a0d495
JJ
484 break;
485 default:
486 break;
487 }
488
489 return NULL_TREE;
490}
491
492
493/* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
494 second argument from __builtin_object_size. */
495
496unsigned HOST_WIDE_INT
497compute_builtin_object_size (tree ptr, int object_size_type)
498{
499 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
500
501 if (! offset_limit)
502 init_offset_limit ();
503
504 if (TREE_CODE (ptr) == ADDR_EXPR)
eb9ed98a 505 return addr_object_size (NULL, ptr, object_size_type);
10a0d495 506
726a989a 507 if (TREE_CODE (ptr) == SSA_NAME
eb9ed98a
JJ
508 && POINTER_TYPE_P (TREE_TYPE (ptr))
509 && object_sizes[object_size_type] != NULL)
10a0d495
JJ
510 {
511 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
512 {
513 struct object_size_info osi;
514 bitmap_iterator bi;
515 unsigned int i;
516
517 if (dump_file)
518 {
519 fprintf (dump_file, "Computing %s %sobject size for ",
520 (object_size_type & 2) ? "minimum" : "maximum",
521 (object_size_type & 1) ? "sub" : "");
522 print_generic_expr (dump_file, ptr, dump_flags);
523 fprintf (dump_file, ":\n");
524 }
525
526 osi.visited = BITMAP_ALLOC (NULL);
527 osi.reexamine = BITMAP_ALLOC (NULL);
528 osi.object_size_type = object_size_type;
529 osi.depths = NULL;
530 osi.stack = NULL;
531 osi.tos = NULL;
532
533 /* First pass: walk UD chains, compute object sizes that
534 can be computed. osi.reexamine bitmap at the end will
535 contain what variables were found in dependency cycles
536 and therefore need to be reexamined. */
537 osi.pass = 0;
538 osi.changed = false;
539 collect_object_sizes_for (&osi, ptr);
540
541 /* Second pass: keep recomputing object sizes of variables
542 that need reexamination, until no object sizes are
543 increased or all object sizes are computed. */
544 if (! bitmap_empty_p (osi.reexamine))
545 {
546 bitmap reexamine = BITMAP_ALLOC (NULL);
547
548 /* If looking for minimum instead of maximum object size,
549 detect cases where a pointer is increased in a loop.
550 Although even without this detection pass 2 would eventually
551 terminate, it could take a long time. If a pointer is
552 increasing this way, we need to assume 0 object size.
553 E.g. p = &buf[0]; while (cond) p = p + 4; */
554 if (object_size_type & 2)
555 {
5ed6ace5
MD
556 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
557 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
10a0d495
JJ
558 osi.tos = osi.stack;
559 osi.pass = 1;
560 /* collect_object_sizes_for is changing
561 osi.reexamine bitmap, so iterate over a copy. */
562 bitmap_copy (reexamine, osi.reexamine);
563 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
564 if (bitmap_bit_p (osi.reexamine, i))
565 check_for_plus_in_loops (&osi, ssa_name (i));
566
567 free (osi.depths);
568 osi.depths = NULL;
569 free (osi.stack);
570 osi.stack = NULL;
571 osi.tos = NULL;
572 }
573
574 do
575 {
576 osi.pass = 2;
577 osi.changed = false;
578 /* collect_object_sizes_for is changing
579 osi.reexamine bitmap, so iterate over a copy. */
580 bitmap_copy (reexamine, osi.reexamine);
581 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
582 if (bitmap_bit_p (osi.reexamine, i))
583 {
584 collect_object_sizes_for (&osi, ssa_name (i));
585 if (dump_file && (dump_flags & TDF_DETAILS))
586 {
587 fprintf (dump_file, "Reexamining ");
588 print_generic_expr (dump_file, ssa_name (i),
589 dump_flags);
590 fprintf (dump_file, "\n");
591 }
592 }
593 }
594 while (osi.changed);
595
596 BITMAP_FREE (reexamine);
597 }
598 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
599 bitmap_set_bit (computed[object_size_type], i);
600
601 /* Debugging dumps. */
602 if (dump_file)
603 {
604 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
605 if (object_sizes[object_size_type][i]
606 != unknown[object_size_type])
607 {
608 print_generic_expr (dump_file, ssa_name (i),
609 dump_flags);
610 fprintf (dump_file,
611 ": %s %sobject size "
612 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
613 (object_size_type & 2) ? "minimum" : "maximum",
614 (object_size_type & 1) ? "sub" : "",
615 object_sizes[object_size_type][i]);
616 }
617 }
618
619 BITMAP_FREE (osi.reexamine);
620 BITMAP_FREE (osi.visited);
621 }
622
623 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
624 }
625
626 return unknown[object_size_type];
627}
628
726a989a 629/* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
10a0d495
JJ
630
631static void
632expr_object_size (struct object_size_info *osi, tree ptr, tree value)
633{
634 int object_size_type = osi->object_size_type;
635 unsigned int varno = SSA_NAME_VERSION (ptr);
636 unsigned HOST_WIDE_INT bytes;
637
638 gcc_assert (object_sizes[object_size_type][varno]
639 != unknown[object_size_type]);
640 gcc_assert (osi->pass == 0);
641
642 if (TREE_CODE (value) == WITH_SIZE_EXPR)
643 value = TREE_OPERAND (value, 0);
644
645 /* Pointer variables should have been handled by merge_object_sizes. */
646 gcc_assert (TREE_CODE (value) != SSA_NAME
647 || !POINTER_TYPE_P (TREE_TYPE (value)));
648
649 if (TREE_CODE (value) == ADDR_EXPR)
eb9ed98a 650 bytes = addr_object_size (osi, value, object_size_type);
10a0d495
JJ
651 else
652 bytes = unknown[object_size_type];
653
654 if ((object_size_type & 2) == 0)
655 {
656 if (object_sizes[object_size_type][varno] < bytes)
657 object_sizes[object_size_type][varno] = bytes;
658 }
659 else
660 {
661 if (object_sizes[object_size_type][varno] > bytes)
662 object_sizes[object_size_type][varno] = bytes;
663 }
664}
665
666
726a989a
RB
667/* Compute object_sizes for PTR, defined to the result of a call. */
668
669static void
670call_object_size (struct object_size_info *osi, tree ptr, gimple call)
671{
672 int object_size_type = osi->object_size_type;
673 unsigned int varno = SSA_NAME_VERSION (ptr);
674 unsigned HOST_WIDE_INT bytes;
675
676 gcc_assert (is_gimple_call (call));
677
678 gcc_assert (object_sizes[object_size_type][varno]
679 != unknown[object_size_type]);
680 gcc_assert (osi->pass == 0);
681
682 bytes = alloc_object_size (call, object_size_type);
683
684 if ((object_size_type & 2) == 0)
685 {
686 if (object_sizes[object_size_type][varno] < bytes)
687 object_sizes[object_size_type][varno] = bytes;
688 }
689 else
690 {
691 if (object_sizes[object_size_type][varno] > bytes)
692 object_sizes[object_size_type][varno] = bytes;
693 }
694}
695
696
697/* Compute object_sizes for PTR, defined to an unknown value. */
698
699static void
700unknown_object_size (struct object_size_info *osi, tree ptr)
701{
702 int object_size_type = osi->object_size_type;
703 unsigned int varno = SSA_NAME_VERSION (ptr);
704 unsigned HOST_WIDE_INT bytes;
705
706 gcc_assert (object_sizes[object_size_type][varno]
707 != unknown[object_size_type]);
708 gcc_assert (osi->pass == 0);
709
710 bytes = unknown[object_size_type];
711
712 if ((object_size_type & 2) == 0)
713 {
714 if (object_sizes[object_size_type][varno] < bytes)
715 object_sizes[object_size_type][varno] = bytes;
716 }
717 else
718 {
719 if (object_sizes[object_size_type][varno] > bytes)
720 object_sizes[object_size_type][varno] = bytes;
721 }
722}
723
724
10a0d495
JJ
725/* Merge object sizes of ORIG + OFFSET into DEST. Return true if
726 the object size might need reexamination later. */
727
728static bool
729merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
730 unsigned HOST_WIDE_INT offset)
731{
732 int object_size_type = osi->object_size_type;
733 unsigned int varno = SSA_NAME_VERSION (dest);
734 unsigned HOST_WIDE_INT orig_bytes;
735
736 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
737 return false;
738 if (offset >= offset_limit)
739 {
740 object_sizes[object_size_type][varno] = unknown[object_size_type];
741 return false;
742 }
743
744 if (osi->pass == 0)
745 collect_object_sizes_for (osi, orig);
746
747 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
748 if (orig_bytes != unknown[object_size_type])
749 orig_bytes = (offset > orig_bytes)
750 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
751
752 if ((object_size_type & 2) == 0)
753 {
754 if (object_sizes[object_size_type][varno] < orig_bytes)
755 {
756 object_sizes[object_size_type][varno] = orig_bytes;
757 osi->changed = true;
758 }
759 }
760 else
761 {
762 if (object_sizes[object_size_type][varno] > orig_bytes)
763 {
764 object_sizes[object_size_type][varno] = orig_bytes;
765 osi->changed = true;
766 }
767 }
768 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
769}
770
771
726a989a
RB
772/* Compute object_sizes for VAR, defined to the result of an assignment
773 with operator POINTER_PLUS_EXPR. Return true if the object size might
774 need reexamination later. */
10a0d495
JJ
775
776static bool
726a989a 777plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
10a0d495 778{
10a0d495
JJ
779 int object_size_type = osi->object_size_type;
780 unsigned int varno = SSA_NAME_VERSION (var);
781 unsigned HOST_WIDE_INT bytes;
726a989a
RB
782 tree op0, op1;
783
70f34814
RG
784 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
785 {
786 op0 = gimple_assign_rhs1 (stmt);
787 op1 = gimple_assign_rhs2 (stmt);
788 }
789 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
790 {
791 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
792 gcc_assert (TREE_CODE (rhs) == MEM_REF);
793 op0 = TREE_OPERAND (rhs, 0);
794 op1 = TREE_OPERAND (rhs, 1);
795 }
796 else
797 gcc_unreachable ();
10a0d495
JJ
798
799 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
800 return false;
801
10a0d495 802 /* Handle PTR + OFFSET here. */
5be014d5 803 if (TREE_CODE (op1) == INTEGER_CST
10a0d495
JJ
804 && (TREE_CODE (op0) == SSA_NAME
805 || TREE_CODE (op0) == ADDR_EXPR))
806 {
cc269bb6 807 if (! tree_fits_uhwi_p (op1))
10a0d495
JJ
808 bytes = unknown[object_size_type];
809 else if (TREE_CODE (op0) == SSA_NAME)
ae7e9ddd 810 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
10a0d495
JJ
811 else
812 {
ae7e9ddd 813 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
10a0d495 814
726a989a 815 /* op0 will be ADDR_EXPR here. */
eb9ed98a 816 bytes = addr_object_size (osi, op0, object_size_type);
ac5a28a6
JH
817 if (bytes == unknown[object_size_type])
818 ;
819 else if (off > offset_limit)
10a0d495
JJ
820 bytes = unknown[object_size_type];
821 else if (off > bytes)
822 bytes = 0;
823 else
824 bytes -= off;
825 }
826 }
827 else
828 bytes = unknown[object_size_type];
829
830 if ((object_size_type & 2) == 0)
831 {
832 if (object_sizes[object_size_type][varno] < bytes)
833 object_sizes[object_size_type][varno] = bytes;
834 }
835 else
836 {
837 if (object_sizes[object_size_type][varno] > bytes)
838 object_sizes[object_size_type][varno] = bytes;
839 }
840 return false;
841}
842
843
4e71066d 844/* Compute object_sizes for VAR, defined at STMT, which is
f255541f
RC
845 a COND_EXPR. Return true if the object size might need reexamination
846 later. */
847
848static bool
4e71066d 849cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
f255541f
RC
850{
851 tree then_, else_;
852 int object_size_type = osi->object_size_type;
853 unsigned int varno = SSA_NAME_VERSION (var);
854 bool reexamine = false;
855
4e71066d 856 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
f255541f
RC
857
858 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
859 return false;
860
4e71066d
RG
861 then_ = gimple_assign_rhs2 (stmt);
862 else_ = gimple_assign_rhs3 (stmt);
f255541f
RC
863
864 if (TREE_CODE (then_) == SSA_NAME)
865 reexamine |= merge_object_sizes (osi, var, then_, 0);
866 else
867 expr_object_size (osi, var, then_);
868
869 if (TREE_CODE (else_) == SSA_NAME)
870 reexamine |= merge_object_sizes (osi, var, else_, 0);
871 else
872 expr_object_size (osi, var, else_);
873
874 return reexamine;
875}
876
10a0d495
JJ
877/* Compute object sizes for VAR.
878 For ADDR_EXPR an object size is the number of remaining bytes
619519c8 879 to the end of the object (where what is considered an object depends on
10a0d495 880 OSI->object_size_type).
726a989a 881 For allocation GIMPLE_CALL like malloc or calloc object size is the size
10a0d495 882 of the allocation.
5be014d5 883 For POINTER_PLUS_EXPR where second operand is a constant integer,
10a0d495
JJ
884 object size is object size of the first operand minus the constant.
885 If the constant is bigger than the number of remaining bytes until the
886 end of the object, object size is 0, but if it is instead a pointer
887 subtraction, object size is unknown[object_size_type].
888 To differentiate addition from subtraction, ADDR_EXPR returns
889 unknown[object_size_type] for all objects bigger than half of the address
890 space, and constants less than half of the address space are considered
891 addition, while bigger constants subtraction.
726a989a 892 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
10a0d495
JJ
893 object size is object size of that argument.
894 Otherwise, object size is the maximum of object sizes of variables
895 that it might be set to. */
896
897static void
898collect_object_sizes_for (struct object_size_info *osi, tree var)
899{
900 int object_size_type = osi->object_size_type;
901 unsigned int varno = SSA_NAME_VERSION (var);
726a989a 902 gimple stmt;
10a0d495
JJ
903 bool reexamine;
904
905 if (bitmap_bit_p (computed[object_size_type], varno))
906 return;
907
908 if (osi->pass == 0)
909 {
fcaa4ca4 910 if (bitmap_set_bit (osi->visited, varno))
10a0d495 911 {
10a0d495
JJ
912 object_sizes[object_size_type][varno]
913 = (object_size_type & 2) ? -1 : 0;
914 }
915 else
916 {
917 /* Found a dependency loop. Mark the variable for later
918 re-examination. */
919 bitmap_set_bit (osi->reexamine, varno);
920 if (dump_file && (dump_flags & TDF_DETAILS))
921 {
922 fprintf (dump_file, "Found a dependency loop at ");
923 print_generic_expr (dump_file, var, dump_flags);
924 fprintf (dump_file, "\n");
925 }
926 return;
927 }
928 }
929
930 if (dump_file && (dump_flags & TDF_DETAILS))
931 {
932 fprintf (dump_file, "Visiting use-def links for ");
933 print_generic_expr (dump_file, var, dump_flags);
934 fprintf (dump_file, "\n");
935 }
936
937 stmt = SSA_NAME_DEF_STMT (var);
938 reexamine = false;
939
726a989a 940 switch (gimple_code (stmt))
10a0d495 941 {
726a989a 942 case GIMPLE_ASSIGN:
10a0d495 943 {
70f34814
RG
944 tree rhs = gimple_assign_rhs1 (stmt);
945 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
946 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
947 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
726a989a 948 reexamine = plus_stmt_object_size (osi, var, stmt);
4e71066d
RG
949 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
950 reexamine = cond_expr_object_size (osi, var, stmt);
726a989a
RB
951 else if (gimple_assign_single_p (stmt)
952 || gimple_assign_unary_nop_p (stmt))
953 {
726a989a
RB
954 if (TREE_CODE (rhs) == SSA_NAME
955 && POINTER_TYPE_P (TREE_TYPE (rhs)))
956 reexamine = merge_object_sizes (osi, var, rhs, 0);
726a989a
RB
957 else
958 expr_object_size (osi, var, rhs);
959 }
960 else
961 unknown_object_size (osi, var);
962 break;
963 }
f255541f 964
726a989a
RB
965 case GIMPLE_CALL:
966 {
967 tree arg = pass_through_call (stmt);
968 if (arg)
969 {
970 if (TREE_CODE (arg) == SSA_NAME
971 && POINTER_TYPE_P (TREE_TYPE (arg)))
972 reexamine = merge_object_sizes (osi, var, arg, 0);
726a989a
RB
973 else
974 expr_object_size (osi, var, arg);
975 }
976 else
977 call_object_size (osi, var, stmt);
10a0d495
JJ
978 break;
979 }
980
726a989a 981 case GIMPLE_ASM:
10a0d495
JJ
982 /* Pointers defined by __asm__ statements can point anywhere. */
983 object_sizes[object_size_type][varno] = unknown[object_size_type];
984 break;
985
726a989a 986 case GIMPLE_NOP:
70b5e7dc
RG
987 if (SSA_NAME_VAR (var)
988 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
989 expr_object_size (osi, var, SSA_NAME_VAR (var));
990 else
991 /* Uninitialized SSA names point nowhere. */
992 object_sizes[object_size_type][varno] = unknown[object_size_type];
10a0d495
JJ
993 break;
994
726a989a 995 case GIMPLE_PHI:
10a0d495 996 {
726a989a 997 unsigned i;
10a0d495 998
726a989a 999 for (i = 0; i < gimple_phi_num_args (stmt); i++)
10a0d495 1000 {
726a989a 1001 tree rhs = gimple_phi_arg (stmt, i)->def;
10a0d495
JJ
1002
1003 if (object_sizes[object_size_type][varno]
1004 == unknown[object_size_type])
1005 break;
1006
1007 if (TREE_CODE (rhs) == SSA_NAME)
1008 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1009 else if (osi->pass == 0)
1010 expr_object_size (osi, var, rhs);
1011 }
1012 break;
1013 }
726a989a 1014
10a0d495
JJ
1015 default:
1016 gcc_unreachable ();
1017 }
1018
1019 if (! reexamine
1020 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1021 {
1022 bitmap_set_bit (computed[object_size_type], varno);
1023 bitmap_clear_bit (osi->reexamine, varno);
1024 }
1025 else
1026 {
1027 bitmap_set_bit (osi->reexamine, varno);
1028 if (dump_file && (dump_flags & TDF_DETAILS))
1029 {
1030 fprintf (dump_file, "Need to reexamine ");
1031 print_generic_expr (dump_file, var, dump_flags);
1032 fprintf (dump_file, "\n");
1033 }
1034 }
1035}
1036
1037
1038/* Helper function for check_for_plus_in_loops. Called recursively
1039 to detect loops. */
1040
1041static void
1042check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1043 unsigned int depth)
1044{
726a989a 1045 gimple stmt = SSA_NAME_DEF_STMT (var);
10a0d495
JJ
1046 unsigned int varno = SSA_NAME_VERSION (var);
1047
1048 if (osi->depths[varno])
1049 {
1050 if (osi->depths[varno] != depth)
1051 {
1052 unsigned int *sp;
1053
1054 /* Found a loop involving pointer addition. */
1055 for (sp = osi->tos; sp > osi->stack; )
1056 {
1057 --sp;
1058 bitmap_clear_bit (osi->reexamine, *sp);
1059 bitmap_set_bit (computed[osi->object_size_type], *sp);
1060 object_sizes[osi->object_size_type][*sp] = 0;
1061 if (*sp == varno)
1062 break;
1063 }
1064 }
1065 return;
1066 }
1067 else if (! bitmap_bit_p (osi->reexamine, varno))
1068 return;
1069
1070 osi->depths[varno] = depth;
1071 *osi->tos++ = varno;
1072
726a989a 1073 switch (gimple_code (stmt))
10a0d495 1074 {
10a0d495 1075
726a989a 1076 case GIMPLE_ASSIGN:
10a0d495 1077 {
726a989a
RB
1078 if ((gimple_assign_single_p (stmt)
1079 || gimple_assign_unary_nop_p (stmt))
1080 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1081 {
1082 tree rhs = gimple_assign_rhs1 (stmt);
1083
1084 check_for_plus_in_loops_1 (osi, rhs, depth);
1085 }
1086 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1087 {
1088 tree basevar = gimple_assign_rhs1 (stmt);
1089 tree cst = gimple_assign_rhs2 (stmt);
1090
1091 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1092
1093 check_for_plus_in_loops_1 (osi, basevar,
1094 depth + !integer_zerop (cst));
1095 }
1096 else
1097 gcc_unreachable ();
1098 break;
1099 }
10a0d495 1100
726a989a
RB
1101 case GIMPLE_CALL:
1102 {
1103 tree arg = pass_through_call (stmt);
1104 if (arg)
1105 {
1106 if (TREE_CODE (arg) == SSA_NAME)
1107 check_for_plus_in_loops_1 (osi, arg, depth);
1108 else
1109 gcc_unreachable ();
1110 }
1111 break;
10a0d495 1112 }
726a989a
RB
1113
1114 case GIMPLE_PHI:
10a0d495 1115 {
726a989a 1116 unsigned i;
10a0d495 1117
726a989a 1118 for (i = 0; i < gimple_phi_num_args (stmt); i++)
10a0d495 1119 {
726a989a 1120 tree rhs = gimple_phi_arg (stmt, i)->def;
10a0d495
JJ
1121
1122 if (TREE_CODE (rhs) == SSA_NAME)
1123 check_for_plus_in_loops_1 (osi, rhs, depth);
1124 }
1125 break;
1126 }
726a989a 1127
10a0d495
JJ
1128 default:
1129 gcc_unreachable ();
1130 }
1131
1132 osi->depths[varno] = 0;
1133 osi->tos--;
1134}
1135
1136
1137/* Check if some pointer we are computing object size of is being increased
1138 within a loop. If yes, assume all the SSA variables participating in
1139 that loop have minimum object sizes 0. */
1140
1141static void
1142check_for_plus_in_loops (struct object_size_info *osi, tree var)
1143{
726a989a 1144 gimple stmt = SSA_NAME_DEF_STMT (var);
10a0d495 1145
726a989a
RB
1146 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1147 and looked for a POINTER_PLUS_EXPR in the pass-through
1148 argument, if any. In GIMPLE, however, such an expression
1149 is not a valid call operand. */
10a0d495 1150
726a989a
RB
1151 if (is_gimple_assign (stmt)
1152 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1153 {
1154 tree basevar = gimple_assign_rhs1 (stmt);
1155 tree cst = gimple_assign_rhs2 (stmt);
b8698a0f 1156
726a989a
RB
1157 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1158
1159 if (integer_zerop (cst))
1160 return;
1161
1162 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1163 *osi->tos++ = SSA_NAME_VERSION (basevar);
1164 check_for_plus_in_loops_1 (osi, var, 2);
1165 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1166 osi->tos--;
10a0d495
JJ
1167 }
1168}
1169
1170
1171/* Initialize data structures for the object size computation. */
1172
1173void
1174init_object_sizes (void)
1175{
1176 int object_size_type;
1177
1178 if (object_sizes[0])
1179 return;
1180
1181 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1182 {
5ed6ace5 1183 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
10a0d495
JJ
1184 computed[object_size_type] = BITMAP_ALLOC (NULL);
1185 }
1186
1187 init_offset_limit ();
1188}
1189
1190
1191/* Destroy data structures after the object size computation. */
1192
862d0b35 1193static void
10a0d495
JJ
1194fini_object_sizes (void)
1195{
1196 int object_size_type;
1197
1198 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1199 {
1200 free (object_sizes[object_size_type]);
1201 BITMAP_FREE (computed[object_size_type]);
1202 object_sizes[object_size_type] = NULL;
1203 }
1204}
1205
1206
1207/* Simple pass to optimize all __builtin_object_size () builtins. */
1208
c2924966 1209static unsigned int
10a0d495
JJ
1210compute_object_sizes (void)
1211{
1212 basic_block bb;
1213 FOR_EACH_BB (bb)
1214 {
726a989a
RB
1215 gimple_stmt_iterator i;
1216 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
10a0d495 1217 {
1eadb567 1218 tree result;
726a989a 1219 gimple call = gsi_stmt (i);
1eadb567 1220 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
10a0d495
JJ
1221 continue;
1222
1223 init_object_sizes ();
726a989a 1224 result = fold_call_stmt (call, false);
10a0d495
JJ
1225 if (!result)
1226 {
726a989a
RB
1227 if (gimple_call_num_args (call) == 2
1228 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
10a0d495 1229 {
726a989a 1230 tree ost = gimple_call_arg (call, 1);
10a0d495 1231
cc269bb6 1232 if (tree_fits_uhwi_p (ost))
10a0d495
JJ
1233 {
1234 unsigned HOST_WIDE_INT object_size_type
ae7e9ddd 1235 = tree_to_uhwi (ost);
10a0d495
JJ
1236
1237 if (object_size_type < 2)
1238 result = fold_convert (size_type_node,
1239 integer_minus_one_node);
1240 else if (object_size_type < 4)
e8160c9a 1241 result = build_zero_cst (size_type_node);
10a0d495
JJ
1242 }
1243 }
1244
1245 if (!result)
1246 continue;
1247 }
1248
1eadb567
RB
1249 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1250
10a0d495
JJ
1251 if (dump_file && (dump_flags & TDF_DETAILS))
1252 {
1253 fprintf (dump_file, "Simplified\n ");
726a989a 1254 print_gimple_stmt (dump_file, call, 0, dump_flags);
1eadb567
RB
1255 fprintf (dump_file, " to ");
1256 print_generic_expr (dump_file, result, 0);
1257 fprintf (dump_file, "\n");
10a0d495
JJ
1258 }
1259
1eadb567
RB
1260 tree lhs = gimple_call_lhs (call);
1261 if (!lhs)
1262 continue;
726a989a 1263
1eadb567
RB
1264 /* Propagate into all uses and fold those stmts. */
1265 gimple use_stmt;
1266 imm_use_iterator iter;
1267 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
10a0d495 1268 {
1eadb567
RB
1269 use_operand_p use_p;
1270 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1271 SET_USE (use_p, result);
1272 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1273 fold_stmt (&gsi);
1274 update_stmt (gsi_stmt (gsi));
10a0d495
JJ
1275 }
1276 }
1277 }
1278
1279 fini_object_sizes ();
c2924966 1280 return 0;
10a0d495
JJ
1281}
1282
27a4cd48
DM
1283namespace {
1284
1285const pass_data pass_data_object_sizes =
10a0d495 1286{
27a4cd48
DM
1287 GIMPLE_PASS, /* type */
1288 "objsz", /* name */
1289 OPTGROUP_NONE, /* optinfo_flags */
1290 false, /* has_gate */
1291 true, /* has_execute */
1292 TV_NONE, /* tv_id */
1293 ( PROP_cfg | PROP_ssa ), /* properties_required */
1294 0, /* properties_provided */
1295 0, /* properties_destroyed */
1296 0, /* todo_flags_start */
1297 TODO_verify_ssa, /* todo_flags_finish */
10a0d495 1298};
27a4cd48
DM
1299
1300class pass_object_sizes : public gimple_opt_pass
1301{
1302public:
c3284718
RS
1303 pass_object_sizes (gcc::context *ctxt)
1304 : gimple_opt_pass (pass_data_object_sizes, ctxt)
27a4cd48
DM
1305 {}
1306
1307 /* opt_pass methods: */
65d3284b 1308 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
27a4cd48
DM
1309 unsigned int execute () { return compute_object_sizes (); }
1310
1311}; // class pass_object_sizes
1312
1313} // anon namespace
1314
1315gimple_opt_pass *
1316make_pass_object_sizes (gcc::context *ctxt)
1317{
1318 return new pass_object_sizes (ctxt);
1319}