]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-object-size.c
cfgloop.c (get_loop_body_in_bfs_order): Avoid redundant call to bitmap_bit_p.
[thirdparty/gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "diagnostic-core.h"
28 #include "toplev.h"
29 #include "tree-pretty-print.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-ssa-propagate.h"
34
35 struct object_size_info
36 {
37 int object_size_type;
38 bitmap visited, reexamine;
39 int pass;
40 bool changed;
41 unsigned int *depths;
42 unsigned int *stack, *tos;
43 };
44
45 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
46
47 static tree compute_object_offset (const_tree, const_tree);
48 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
49 const_tree, int);
50 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
51 static tree pass_through_call (const_gimple);
52 static void collect_object_sizes_for (struct object_size_info *, tree);
53 static void expr_object_size (struct object_size_info *, tree, tree);
54 static bool merge_object_sizes (struct object_size_info *, tree, tree,
55 unsigned HOST_WIDE_INT);
56 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
57 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
58 static unsigned int compute_object_sizes (void);
59 static void init_offset_limit (void);
60 static void check_for_plus_in_loops (struct object_size_info *, tree);
61 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
62 unsigned int);
63
64 /* object_sizes[0] is upper bound for number of bytes till the end of
65 the object.
66 object_sizes[1] is upper bound for number of bytes till the end of
67 the subobject (innermost array or field with address taken).
68 object_sizes[2] is lower bound for number of bytes till the end of
69 the object and object_sizes[3] lower bound for subobject. */
70 static unsigned HOST_WIDE_INT *object_sizes[4];
71
72 /* Bitmaps what object sizes have been computed already. */
73 static bitmap computed[4];
74
75 /* Maximum value of offset we consider to be addition. */
76 static unsigned HOST_WIDE_INT offset_limit;
77
78
79 /* Initialize OFFSET_LIMIT variable. */
80 static void
81 init_offset_limit (void)
82 {
83 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
84 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
85 else
86 offset_limit = -1;
87 offset_limit /= 2;
88 }
89
90
91 /* Compute offset of EXPR within VAR. Return error_mark_node
92 if unknown. */
93
94 static tree
95 compute_object_offset (const_tree expr, const_tree var)
96 {
97 enum tree_code code = PLUS_EXPR;
98 tree base, off, t;
99
100 if (expr == var)
101 return size_zero_node;
102
103 switch (TREE_CODE (expr))
104 {
105 case COMPONENT_REF:
106 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
107 if (base == error_mark_node)
108 return base;
109
110 t = TREE_OPERAND (expr, 1);
111 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
112 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
113 / BITS_PER_UNIT));
114 break;
115
116 case REALPART_EXPR:
117 CASE_CONVERT:
118 case VIEW_CONVERT_EXPR:
119 case NON_LVALUE_EXPR:
120 return compute_object_offset (TREE_OPERAND (expr, 0), var);
121
122 case IMAGPART_EXPR:
123 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
124 if (base == error_mark_node)
125 return base;
126
127 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
128 break;
129
130 case ARRAY_REF:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
134
135 t = TREE_OPERAND (expr, 1);
136 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
137 {
138 code = MINUS_EXPR;
139 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
140 }
141 t = fold_convert (sizetype, t);
142 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
143 break;
144
145 case MEM_REF:
146 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
147 return TREE_OPERAND (expr, 1);
148
149 default:
150 return error_mark_node;
151 }
152
153 return size_binop (code, base, off);
154 }
155
156
157 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
158 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
159 If unknown, return unknown[object_size_type]. */
160
161 static unsigned HOST_WIDE_INT
162 addr_object_size (struct object_size_info *osi, const_tree ptr,
163 int object_size_type)
164 {
165 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
166
167 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
168
169 pt_var = TREE_OPERAND (ptr, 0);
170 if (REFERENCE_CLASS_P (pt_var))
171 pt_var = get_base_address (pt_var);
172
173 if (pt_var
174 && TREE_CODE (pt_var) == MEM_REF
175 && TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
176 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
177 {
178 unsigned HOST_WIDE_INT sz;
179
180 if (!osi || (object_size_type & 1) != 0)
181 {
182 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
183 object_size_type & ~1);
184 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
185 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
186 else
187 sz = offset_limit;
188 }
189 else
190 {
191 tree var = TREE_OPERAND (pt_var, 0);
192 if (osi->pass == 0)
193 collect_object_sizes_for (osi, var);
194 if (bitmap_bit_p (computed[object_size_type],
195 SSA_NAME_VERSION (var)))
196 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
197 else
198 sz = unknown[object_size_type];
199 if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
200 sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
201 else
202 sz = offset_limit;
203 }
204
205 if (sz != unknown[object_size_type] && sz < offset_limit)
206 pt_var_size = size_int (sz);
207 }
208 else if (pt_var
209 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
210 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
211 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
212 && (unsigned HOST_WIDE_INT)
213 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
214 < offset_limit)
215 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
216 else
217 return unknown[object_size_type];
218
219 if (pt_var != TREE_OPERAND (ptr, 0))
220 {
221 tree var;
222
223 if (object_size_type & 1)
224 {
225 var = TREE_OPERAND (ptr, 0);
226
227 while (var != pt_var
228 && TREE_CODE (var) != BIT_FIELD_REF
229 && TREE_CODE (var) != COMPONENT_REF
230 && TREE_CODE (var) != ARRAY_REF
231 && TREE_CODE (var) != ARRAY_RANGE_REF
232 && TREE_CODE (var) != REALPART_EXPR
233 && TREE_CODE (var) != IMAGPART_EXPR)
234 var = TREE_OPERAND (var, 0);
235 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
236 var = TREE_OPERAND (var, 0);
237 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
238 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
239 || (pt_var_size
240 && tree_int_cst_lt (pt_var_size,
241 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
242 var = pt_var;
243 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
244 {
245 tree v = var;
246 /* For &X->fld, compute object size only if fld isn't the last
247 field, as struct { int i; char c[1]; } is often used instead
248 of flexible array member. */
249 while (v && v != pt_var)
250 switch (TREE_CODE (v))
251 {
252 case ARRAY_REF:
253 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
254 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
255 {
256 tree domain
257 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
258 if (domain
259 && TYPE_MAX_VALUE (domain)
260 && TREE_CODE (TYPE_MAX_VALUE (domain))
261 == INTEGER_CST
262 && tree_int_cst_lt (TREE_OPERAND (v, 1),
263 TYPE_MAX_VALUE (domain)))
264 {
265 v = NULL_TREE;
266 break;
267 }
268 }
269 v = TREE_OPERAND (v, 0);
270 break;
271 case REALPART_EXPR:
272 case IMAGPART_EXPR:
273 v = NULL_TREE;
274 break;
275 case COMPONENT_REF:
276 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
277 {
278 v = NULL_TREE;
279 break;
280 }
281 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
282 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
283 != UNION_TYPE
284 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
285 != QUAL_UNION_TYPE)
286 break;
287 else
288 v = TREE_OPERAND (v, 0);
289 if (TREE_CODE (v) == COMPONENT_REF
290 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
291 == RECORD_TYPE)
292 {
293 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
294 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
295 if (TREE_CODE (fld_chain) == FIELD_DECL)
296 break;
297
298 if (fld_chain)
299 {
300 v = NULL_TREE;
301 break;
302 }
303 v = TREE_OPERAND (v, 0);
304 }
305 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
306 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
307 != UNION_TYPE
308 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
309 != QUAL_UNION_TYPE)
310 break;
311 else
312 v = TREE_OPERAND (v, 0);
313 if (v != pt_var)
314 v = NULL_TREE;
315 else
316 v = pt_var;
317 break;
318 default:
319 v = pt_var;
320 break;
321 }
322 if (v == pt_var)
323 var = pt_var;
324 }
325 }
326 else
327 var = pt_var;
328
329 if (var != pt_var)
330 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
331 else if (!pt_var_size)
332 return unknown[object_size_type];
333 else
334 var_size = pt_var_size;
335 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
336 if (bytes != error_mark_node)
337 {
338 if (TREE_CODE (bytes) == INTEGER_CST
339 && tree_int_cst_lt (var_size, bytes))
340 bytes = size_zero_node;
341 else
342 bytes = size_binop (MINUS_EXPR, var_size, bytes);
343 }
344 if (var != pt_var
345 && pt_var_size
346 && TREE_CODE (pt_var) == MEM_REF
347 && bytes != error_mark_node)
348 {
349 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
350 if (bytes2 != error_mark_node)
351 {
352 bytes2 = size_binop (PLUS_EXPR, bytes2,
353 TREE_OPERAND (pt_var, 1));
354 if (TREE_CODE (bytes2) == INTEGER_CST
355 && tree_int_cst_lt (pt_var_size, bytes2))
356 bytes2 = size_zero_node;
357 else
358 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
359 bytes = size_binop (MIN_EXPR, bytes, bytes2);
360 }
361 }
362 }
363 else if (!pt_var_size)
364 return unknown[object_size_type];
365 else
366 bytes = pt_var_size;
367
368 if (host_integerp (bytes, 1))
369 return tree_low_cst (bytes, 1);
370
371 return unknown[object_size_type];
372 }
373
374
375 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
376 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
377 argument from __builtin_object_size. If unknown, return
378 unknown[object_size_type]. */
379
380 static unsigned HOST_WIDE_INT
381 alloc_object_size (const_gimple call, int object_size_type)
382 {
383 tree callee, bytes = NULL_TREE;
384 tree alloc_size;
385 int arg1 = -1, arg2 = -1;
386
387 gcc_assert (is_gimple_call (call));
388
389 callee = gimple_call_fndecl (call);
390 if (!callee)
391 return unknown[object_size_type];
392
393 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
394 if (alloc_size && TREE_VALUE (alloc_size))
395 {
396 tree p = TREE_VALUE (alloc_size);
397
398 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
399 if (TREE_CHAIN (p))
400 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
401 }
402
403 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
404 switch (DECL_FUNCTION_CODE (callee))
405 {
406 case BUILT_IN_CALLOC:
407 arg2 = 1;
408 /* fall through */
409 case BUILT_IN_MALLOC:
410 case BUILT_IN_ALLOCA:
411 arg1 = 0;
412 default:
413 break;
414 }
415
416 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
417 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
418 || (arg2 >= 0
419 && (arg2 >= (int)gimple_call_num_args (call)
420 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
421 return unknown[object_size_type];
422
423 if (arg2 >= 0)
424 bytes = size_binop (MULT_EXPR,
425 fold_convert (sizetype, gimple_call_arg (call, arg1)),
426 fold_convert (sizetype, gimple_call_arg (call, arg2)));
427 else if (arg1 >= 0)
428 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
429
430 if (bytes && host_integerp (bytes, 1))
431 return tree_low_cst (bytes, 1);
432
433 return unknown[object_size_type];
434 }
435
436
437 /* If object size is propagated from one of function's arguments directly
438 to its return value, return that argument for GIMPLE_CALL statement CALL.
439 Otherwise return NULL. */
440
441 static tree
442 pass_through_call (const_gimple call)
443 {
444 tree callee = gimple_call_fndecl (call);
445
446 if (callee
447 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
448 switch (DECL_FUNCTION_CODE (callee))
449 {
450 case BUILT_IN_MEMCPY:
451 case BUILT_IN_MEMMOVE:
452 case BUILT_IN_MEMSET:
453 case BUILT_IN_STRCPY:
454 case BUILT_IN_STRNCPY:
455 case BUILT_IN_STRCAT:
456 case BUILT_IN_STRNCAT:
457 case BUILT_IN_MEMCPY_CHK:
458 case BUILT_IN_MEMMOVE_CHK:
459 case BUILT_IN_MEMSET_CHK:
460 case BUILT_IN_STRCPY_CHK:
461 case BUILT_IN_STRNCPY_CHK:
462 case BUILT_IN_STRCAT_CHK:
463 case BUILT_IN_STRNCAT_CHK:
464 if (gimple_call_num_args (call) >= 1)
465 return gimple_call_arg (call, 0);
466 break;
467 default:
468 break;
469 }
470
471 return NULL_TREE;
472 }
473
474
475 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
476 second argument from __builtin_object_size. */
477
478 unsigned HOST_WIDE_INT
479 compute_builtin_object_size (tree ptr, int object_size_type)
480 {
481 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
482
483 if (! offset_limit)
484 init_offset_limit ();
485
486 if (TREE_CODE (ptr) == ADDR_EXPR)
487 return addr_object_size (NULL, ptr, object_size_type);
488
489 if (TREE_CODE (ptr) == SSA_NAME
490 && POINTER_TYPE_P (TREE_TYPE (ptr))
491 && object_sizes[object_size_type] != NULL)
492 {
493 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
494 {
495 struct object_size_info osi;
496 bitmap_iterator bi;
497 unsigned int i;
498
499 if (dump_file)
500 {
501 fprintf (dump_file, "Computing %s %sobject size for ",
502 (object_size_type & 2) ? "minimum" : "maximum",
503 (object_size_type & 1) ? "sub" : "");
504 print_generic_expr (dump_file, ptr, dump_flags);
505 fprintf (dump_file, ":\n");
506 }
507
508 osi.visited = BITMAP_ALLOC (NULL);
509 osi.reexamine = BITMAP_ALLOC (NULL);
510 osi.object_size_type = object_size_type;
511 osi.depths = NULL;
512 osi.stack = NULL;
513 osi.tos = NULL;
514
515 /* First pass: walk UD chains, compute object sizes that
516 can be computed. osi.reexamine bitmap at the end will
517 contain what variables were found in dependency cycles
518 and therefore need to be reexamined. */
519 osi.pass = 0;
520 osi.changed = false;
521 collect_object_sizes_for (&osi, ptr);
522
523 /* Second pass: keep recomputing object sizes of variables
524 that need reexamination, until no object sizes are
525 increased or all object sizes are computed. */
526 if (! bitmap_empty_p (osi.reexamine))
527 {
528 bitmap reexamine = BITMAP_ALLOC (NULL);
529
530 /* If looking for minimum instead of maximum object size,
531 detect cases where a pointer is increased in a loop.
532 Although even without this detection pass 2 would eventually
533 terminate, it could take a long time. If a pointer is
534 increasing this way, we need to assume 0 object size.
535 E.g. p = &buf[0]; while (cond) p = p + 4; */
536 if (object_size_type & 2)
537 {
538 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
539 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
540 osi.tos = osi.stack;
541 osi.pass = 1;
542 /* collect_object_sizes_for is changing
543 osi.reexamine bitmap, so iterate over a copy. */
544 bitmap_copy (reexamine, osi.reexamine);
545 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
546 if (bitmap_bit_p (osi.reexamine, i))
547 check_for_plus_in_loops (&osi, ssa_name (i));
548
549 free (osi.depths);
550 osi.depths = NULL;
551 free (osi.stack);
552 osi.stack = NULL;
553 osi.tos = NULL;
554 }
555
556 do
557 {
558 osi.pass = 2;
559 osi.changed = false;
560 /* collect_object_sizes_for is changing
561 osi.reexamine bitmap, so iterate over a copy. */
562 bitmap_copy (reexamine, osi.reexamine);
563 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
564 if (bitmap_bit_p (osi.reexamine, i))
565 {
566 collect_object_sizes_for (&osi, ssa_name (i));
567 if (dump_file && (dump_flags & TDF_DETAILS))
568 {
569 fprintf (dump_file, "Reexamining ");
570 print_generic_expr (dump_file, ssa_name (i),
571 dump_flags);
572 fprintf (dump_file, "\n");
573 }
574 }
575 }
576 while (osi.changed);
577
578 BITMAP_FREE (reexamine);
579 }
580 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
581 bitmap_set_bit (computed[object_size_type], i);
582
583 /* Debugging dumps. */
584 if (dump_file)
585 {
586 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
587 if (object_sizes[object_size_type][i]
588 != unknown[object_size_type])
589 {
590 print_generic_expr (dump_file, ssa_name (i),
591 dump_flags);
592 fprintf (dump_file,
593 ": %s %sobject size "
594 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
595 (object_size_type & 2) ? "minimum" : "maximum",
596 (object_size_type & 1) ? "sub" : "",
597 object_sizes[object_size_type][i]);
598 }
599 }
600
601 BITMAP_FREE (osi.reexamine);
602 BITMAP_FREE (osi.visited);
603 }
604
605 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
606 }
607
608 return unknown[object_size_type];
609 }
610
611 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
612
613 static void
614 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
615 {
616 int object_size_type = osi->object_size_type;
617 unsigned int varno = SSA_NAME_VERSION (ptr);
618 unsigned HOST_WIDE_INT bytes;
619
620 gcc_assert (object_sizes[object_size_type][varno]
621 != unknown[object_size_type]);
622 gcc_assert (osi->pass == 0);
623
624 if (TREE_CODE (value) == WITH_SIZE_EXPR)
625 value = TREE_OPERAND (value, 0);
626
627 /* Pointer variables should have been handled by merge_object_sizes. */
628 gcc_assert (TREE_CODE (value) != SSA_NAME
629 || !POINTER_TYPE_P (TREE_TYPE (value)));
630
631 if (TREE_CODE (value) == ADDR_EXPR)
632 bytes = addr_object_size (osi, value, object_size_type);
633 else
634 bytes = unknown[object_size_type];
635
636 if ((object_size_type & 2) == 0)
637 {
638 if (object_sizes[object_size_type][varno] < bytes)
639 object_sizes[object_size_type][varno] = bytes;
640 }
641 else
642 {
643 if (object_sizes[object_size_type][varno] > bytes)
644 object_sizes[object_size_type][varno] = bytes;
645 }
646 }
647
648
649 /* Compute object_sizes for PTR, defined to the result of a call. */
650
651 static void
652 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
653 {
654 int object_size_type = osi->object_size_type;
655 unsigned int varno = SSA_NAME_VERSION (ptr);
656 unsigned HOST_WIDE_INT bytes;
657
658 gcc_assert (is_gimple_call (call));
659
660 gcc_assert (object_sizes[object_size_type][varno]
661 != unknown[object_size_type]);
662 gcc_assert (osi->pass == 0);
663
664 bytes = alloc_object_size (call, object_size_type);
665
666 if ((object_size_type & 2) == 0)
667 {
668 if (object_sizes[object_size_type][varno] < bytes)
669 object_sizes[object_size_type][varno] = bytes;
670 }
671 else
672 {
673 if (object_sizes[object_size_type][varno] > bytes)
674 object_sizes[object_size_type][varno] = bytes;
675 }
676 }
677
678
679 /* Compute object_sizes for PTR, defined to an unknown value. */
680
681 static void
682 unknown_object_size (struct object_size_info *osi, tree ptr)
683 {
684 int object_size_type = osi->object_size_type;
685 unsigned int varno = SSA_NAME_VERSION (ptr);
686 unsigned HOST_WIDE_INT bytes;
687
688 gcc_assert (object_sizes[object_size_type][varno]
689 != unknown[object_size_type]);
690 gcc_assert (osi->pass == 0);
691
692 bytes = unknown[object_size_type];
693
694 if ((object_size_type & 2) == 0)
695 {
696 if (object_sizes[object_size_type][varno] < bytes)
697 object_sizes[object_size_type][varno] = bytes;
698 }
699 else
700 {
701 if (object_sizes[object_size_type][varno] > bytes)
702 object_sizes[object_size_type][varno] = bytes;
703 }
704 }
705
706
707 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
708 the object size might need reexamination later. */
709
710 static bool
711 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
712 unsigned HOST_WIDE_INT offset)
713 {
714 int object_size_type = osi->object_size_type;
715 unsigned int varno = SSA_NAME_VERSION (dest);
716 unsigned HOST_WIDE_INT orig_bytes;
717
718 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
719 return false;
720 if (offset >= offset_limit)
721 {
722 object_sizes[object_size_type][varno] = unknown[object_size_type];
723 return false;
724 }
725
726 if (osi->pass == 0)
727 collect_object_sizes_for (osi, orig);
728
729 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
730 if (orig_bytes != unknown[object_size_type])
731 orig_bytes = (offset > orig_bytes)
732 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
733
734 if ((object_size_type & 2) == 0)
735 {
736 if (object_sizes[object_size_type][varno] < orig_bytes)
737 {
738 object_sizes[object_size_type][varno] = orig_bytes;
739 osi->changed = true;
740 }
741 }
742 else
743 {
744 if (object_sizes[object_size_type][varno] > orig_bytes)
745 {
746 object_sizes[object_size_type][varno] = orig_bytes;
747 osi->changed = true;
748 }
749 }
750 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
751 }
752
753
754 /* Compute object_sizes for VAR, defined to the result of an assignment
755 with operator POINTER_PLUS_EXPR. Return true if the object size might
756 need reexamination later. */
757
758 static bool
759 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
760 {
761 int object_size_type = osi->object_size_type;
762 unsigned int varno = SSA_NAME_VERSION (var);
763 unsigned HOST_WIDE_INT bytes;
764 tree op0, op1;
765
766 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
767 {
768 op0 = gimple_assign_rhs1 (stmt);
769 op1 = gimple_assign_rhs2 (stmt);
770 }
771 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
772 {
773 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
774 gcc_assert (TREE_CODE (rhs) == MEM_REF);
775 op0 = TREE_OPERAND (rhs, 0);
776 op1 = TREE_OPERAND (rhs, 1);
777 }
778 else
779 gcc_unreachable ();
780
781 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
782 return false;
783
784 /* Handle PTR + OFFSET here. */
785 if (TREE_CODE (op1) == INTEGER_CST
786 && (TREE_CODE (op0) == SSA_NAME
787 || TREE_CODE (op0) == ADDR_EXPR))
788 {
789 if (! host_integerp (op1, 1))
790 bytes = unknown[object_size_type];
791 else if (TREE_CODE (op0) == SSA_NAME)
792 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
793 else
794 {
795 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
796
797 /* op0 will be ADDR_EXPR here. */
798 bytes = addr_object_size (osi, op0, object_size_type);
799 if (bytes == unknown[object_size_type])
800 ;
801 else if (off > offset_limit)
802 bytes = unknown[object_size_type];
803 else if (off > bytes)
804 bytes = 0;
805 else
806 bytes -= off;
807 }
808 }
809 else
810 bytes = unknown[object_size_type];
811
812 if ((object_size_type & 2) == 0)
813 {
814 if (object_sizes[object_size_type][varno] < bytes)
815 object_sizes[object_size_type][varno] = bytes;
816 }
817 else
818 {
819 if (object_sizes[object_size_type][varno] > bytes)
820 object_sizes[object_size_type][varno] = bytes;
821 }
822 return false;
823 }
824
825
826 /* Compute object_sizes for VAR, defined to VALUE, which is
827 a COND_EXPR. Return true if the object size might need reexamination
828 later. */
829
830 static bool
831 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
832 {
833 tree then_, else_;
834 int object_size_type = osi->object_size_type;
835 unsigned int varno = SSA_NAME_VERSION (var);
836 bool reexamine = false;
837
838 gcc_assert (TREE_CODE (value) == COND_EXPR);
839
840 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
841 return false;
842
843 then_ = COND_EXPR_THEN (value);
844 else_ = COND_EXPR_ELSE (value);
845
846 if (TREE_CODE (then_) == SSA_NAME)
847 reexamine |= merge_object_sizes (osi, var, then_, 0);
848 else
849 expr_object_size (osi, var, then_);
850
851 if (TREE_CODE (else_) == SSA_NAME)
852 reexamine |= merge_object_sizes (osi, var, else_, 0);
853 else
854 expr_object_size (osi, var, else_);
855
856 return reexamine;
857 }
858
859 /* Compute object sizes for VAR.
860 For ADDR_EXPR an object size is the number of remaining bytes
861 to the end of the object (where what is considered an object depends on
862 OSI->object_size_type).
863 For allocation GIMPLE_CALL like malloc or calloc object size is the size
864 of the allocation.
865 For POINTER_PLUS_EXPR where second operand is a constant integer,
866 object size is object size of the first operand minus the constant.
867 If the constant is bigger than the number of remaining bytes until the
868 end of the object, object size is 0, but if it is instead a pointer
869 subtraction, object size is unknown[object_size_type].
870 To differentiate addition from subtraction, ADDR_EXPR returns
871 unknown[object_size_type] for all objects bigger than half of the address
872 space, and constants less than half of the address space are considered
873 addition, while bigger constants subtraction.
874 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
875 object size is object size of that argument.
876 Otherwise, object size is the maximum of object sizes of variables
877 that it might be set to. */
878
879 static void
880 collect_object_sizes_for (struct object_size_info *osi, tree var)
881 {
882 int object_size_type = osi->object_size_type;
883 unsigned int varno = SSA_NAME_VERSION (var);
884 gimple stmt;
885 bool reexamine;
886
887 if (bitmap_bit_p (computed[object_size_type], varno))
888 return;
889
890 if (osi->pass == 0)
891 {
892 if (bitmap_set_bit (osi->visited, varno))
893 {
894 object_sizes[object_size_type][varno]
895 = (object_size_type & 2) ? -1 : 0;
896 }
897 else
898 {
899 /* Found a dependency loop. Mark the variable for later
900 re-examination. */
901 bitmap_set_bit (osi->reexamine, varno);
902 if (dump_file && (dump_flags & TDF_DETAILS))
903 {
904 fprintf (dump_file, "Found a dependency loop at ");
905 print_generic_expr (dump_file, var, dump_flags);
906 fprintf (dump_file, "\n");
907 }
908 return;
909 }
910 }
911
912 if (dump_file && (dump_flags & TDF_DETAILS))
913 {
914 fprintf (dump_file, "Visiting use-def links for ");
915 print_generic_expr (dump_file, var, dump_flags);
916 fprintf (dump_file, "\n");
917 }
918
919 stmt = SSA_NAME_DEF_STMT (var);
920 reexamine = false;
921
922 switch (gimple_code (stmt))
923 {
924 case GIMPLE_ASSIGN:
925 {
926 tree rhs = gimple_assign_rhs1 (stmt);
927 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
928 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
929 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
930 reexamine = plus_stmt_object_size (osi, var, stmt);
931 else if (gimple_assign_single_p (stmt)
932 || gimple_assign_unary_nop_p (stmt))
933 {
934 if (TREE_CODE (rhs) == SSA_NAME
935 && POINTER_TYPE_P (TREE_TYPE (rhs)))
936 reexamine = merge_object_sizes (osi, var, rhs, 0);
937 else if (TREE_CODE (rhs) == COND_EXPR)
938 reexamine = cond_expr_object_size (osi, var, rhs);
939 else
940 expr_object_size (osi, var, rhs);
941 }
942 else
943 unknown_object_size (osi, var);
944 break;
945 }
946
947 case GIMPLE_CALL:
948 {
949 tree arg = pass_through_call (stmt);
950 if (arg)
951 {
952 if (TREE_CODE (arg) == SSA_NAME
953 && POINTER_TYPE_P (TREE_TYPE (arg)))
954 reexamine = merge_object_sizes (osi, var, arg, 0);
955 else if (TREE_CODE (arg) == COND_EXPR)
956 reexamine = cond_expr_object_size (osi, var, arg);
957 else
958 expr_object_size (osi, var, arg);
959 }
960 else
961 call_object_size (osi, var, stmt);
962 break;
963 }
964
965 case GIMPLE_ASM:
966 /* Pointers defined by __asm__ statements can point anywhere. */
967 object_sizes[object_size_type][varno] = unknown[object_size_type];
968 break;
969
970 case GIMPLE_NOP:
971 {
972 tree decl = SSA_NAME_VAR (var);
973
974 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
975 expr_object_size (osi, var, DECL_INITIAL (decl));
976 else
977 expr_object_size (osi, var, decl);
978 }
979 break;
980
981 case GIMPLE_PHI:
982 {
983 unsigned i;
984
985 for (i = 0; i < gimple_phi_num_args (stmt); i++)
986 {
987 tree rhs = gimple_phi_arg (stmt, i)->def;
988
989 if (object_sizes[object_size_type][varno]
990 == unknown[object_size_type])
991 break;
992
993 if (TREE_CODE (rhs) == SSA_NAME)
994 reexamine |= merge_object_sizes (osi, var, rhs, 0);
995 else if (osi->pass == 0)
996 expr_object_size (osi, var, rhs);
997 }
998 break;
999 }
1000
1001 default:
1002 gcc_unreachable ();
1003 }
1004
1005 if (! reexamine
1006 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1007 {
1008 bitmap_set_bit (computed[object_size_type], varno);
1009 bitmap_clear_bit (osi->reexamine, varno);
1010 }
1011 else
1012 {
1013 bitmap_set_bit (osi->reexamine, varno);
1014 if (dump_file && (dump_flags & TDF_DETAILS))
1015 {
1016 fprintf (dump_file, "Need to reexamine ");
1017 print_generic_expr (dump_file, var, dump_flags);
1018 fprintf (dump_file, "\n");
1019 }
1020 }
1021 }
1022
1023
1024 /* Helper function for check_for_plus_in_loops. Called recursively
1025 to detect loops. */
1026
1027 static void
1028 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1029 unsigned int depth)
1030 {
1031 gimple stmt = SSA_NAME_DEF_STMT (var);
1032 unsigned int varno = SSA_NAME_VERSION (var);
1033
1034 if (osi->depths[varno])
1035 {
1036 if (osi->depths[varno] != depth)
1037 {
1038 unsigned int *sp;
1039
1040 /* Found a loop involving pointer addition. */
1041 for (sp = osi->tos; sp > osi->stack; )
1042 {
1043 --sp;
1044 bitmap_clear_bit (osi->reexamine, *sp);
1045 bitmap_set_bit (computed[osi->object_size_type], *sp);
1046 object_sizes[osi->object_size_type][*sp] = 0;
1047 if (*sp == varno)
1048 break;
1049 }
1050 }
1051 return;
1052 }
1053 else if (! bitmap_bit_p (osi->reexamine, varno))
1054 return;
1055
1056 osi->depths[varno] = depth;
1057 *osi->tos++ = varno;
1058
1059 switch (gimple_code (stmt))
1060 {
1061
1062 case GIMPLE_ASSIGN:
1063 {
1064 if ((gimple_assign_single_p (stmt)
1065 || gimple_assign_unary_nop_p (stmt))
1066 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1067 {
1068 tree rhs = gimple_assign_rhs1 (stmt);
1069
1070 check_for_plus_in_loops_1 (osi, rhs, depth);
1071 }
1072 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1073 {
1074 tree basevar = gimple_assign_rhs1 (stmt);
1075 tree cst = gimple_assign_rhs2 (stmt);
1076
1077 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1078
1079 check_for_plus_in_loops_1 (osi, basevar,
1080 depth + !integer_zerop (cst));
1081 }
1082 else
1083 gcc_unreachable ();
1084 break;
1085 }
1086
1087 case GIMPLE_CALL:
1088 {
1089 tree arg = pass_through_call (stmt);
1090 if (arg)
1091 {
1092 if (TREE_CODE (arg) == SSA_NAME)
1093 check_for_plus_in_loops_1 (osi, arg, depth);
1094 else
1095 gcc_unreachable ();
1096 }
1097 break;
1098 }
1099
1100 case GIMPLE_PHI:
1101 {
1102 unsigned i;
1103
1104 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1105 {
1106 tree rhs = gimple_phi_arg (stmt, i)->def;
1107
1108 if (TREE_CODE (rhs) == SSA_NAME)
1109 check_for_plus_in_loops_1 (osi, rhs, depth);
1110 }
1111 break;
1112 }
1113
1114 default:
1115 gcc_unreachable ();
1116 }
1117
1118 osi->depths[varno] = 0;
1119 osi->tos--;
1120 }
1121
1122
1123 /* Check if some pointer we are computing object size of is being increased
1124 within a loop. If yes, assume all the SSA variables participating in
1125 that loop have minimum object sizes 0. */
1126
1127 static void
1128 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1129 {
1130 gimple stmt = SSA_NAME_DEF_STMT (var);
1131
1132 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1133 and looked for a POINTER_PLUS_EXPR in the pass-through
1134 argument, if any. In GIMPLE, however, such an expression
1135 is not a valid call operand. */
1136
1137 if (is_gimple_assign (stmt)
1138 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1139 {
1140 tree basevar = gimple_assign_rhs1 (stmt);
1141 tree cst = gimple_assign_rhs2 (stmt);
1142
1143 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1144
1145 if (integer_zerop (cst))
1146 return;
1147
1148 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1149 *osi->tos++ = SSA_NAME_VERSION (basevar);
1150 check_for_plus_in_loops_1 (osi, var, 2);
1151 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1152 osi->tos--;
1153 }
1154 }
1155
1156
1157 /* Initialize data structures for the object size computation. */
1158
1159 void
1160 init_object_sizes (void)
1161 {
1162 int object_size_type;
1163
1164 if (object_sizes[0])
1165 return;
1166
1167 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1168 {
1169 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1170 computed[object_size_type] = BITMAP_ALLOC (NULL);
1171 }
1172
1173 init_offset_limit ();
1174 }
1175
1176
1177 /* Destroy data structures after the object size computation. */
1178
1179 void
1180 fini_object_sizes (void)
1181 {
1182 int object_size_type;
1183
1184 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1185 {
1186 free (object_sizes[object_size_type]);
1187 BITMAP_FREE (computed[object_size_type]);
1188 object_sizes[object_size_type] = NULL;
1189 }
1190 }
1191
1192
1193 /* Simple pass to optimize all __builtin_object_size () builtins. */
1194
1195 static unsigned int
1196 compute_object_sizes (void)
1197 {
1198 basic_block bb;
1199 FOR_EACH_BB (bb)
1200 {
1201 gimple_stmt_iterator i;
1202 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1203 {
1204 tree callee, result;
1205 gimple call = gsi_stmt (i);
1206
1207 if (gimple_code (call) != GIMPLE_CALL)
1208 continue;
1209
1210 callee = gimple_call_fndecl (call);
1211 if (!callee
1212 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1213 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1214 continue;
1215
1216 init_object_sizes ();
1217 result = fold_call_stmt (call, false);
1218 if (!result)
1219 {
1220 if (gimple_call_num_args (call) == 2
1221 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1222 {
1223 tree ost = gimple_call_arg (call, 1);
1224
1225 if (host_integerp (ost, 1))
1226 {
1227 unsigned HOST_WIDE_INT object_size_type
1228 = tree_low_cst (ost, 1);
1229
1230 if (object_size_type < 2)
1231 result = fold_convert (size_type_node,
1232 integer_minus_one_node);
1233 else if (object_size_type < 4)
1234 result = fold_convert (size_type_node,
1235 integer_zero_node);
1236 }
1237 }
1238
1239 if (!result)
1240 continue;
1241 }
1242
1243 if (dump_file && (dump_flags & TDF_DETAILS))
1244 {
1245 fprintf (dump_file, "Simplified\n ");
1246 print_gimple_stmt (dump_file, call, 0, dump_flags);
1247 }
1248
1249 if (!update_call_from_tree (&i, result))
1250 gcc_unreachable ();
1251
1252 /* NOTE: In the pre-tuples code, we called update_stmt here. This is
1253 now handled by gsi_replace, called from update_call_from_tree. */
1254
1255 if (dump_file && (dump_flags & TDF_DETAILS))
1256 {
1257 fprintf (dump_file, "to\n ");
1258 print_gimple_stmt (dump_file, call, 0, dump_flags);
1259 fprintf (dump_file, "\n");
1260 }
1261 }
1262 }
1263
1264 fini_object_sizes ();
1265 return 0;
1266 }
1267
1268 struct gimple_opt_pass pass_object_sizes =
1269 {
1270 {
1271 GIMPLE_PASS,
1272 "objsz", /* name */
1273 NULL, /* gate */
1274 compute_object_sizes, /* execute */
1275 NULL, /* sub */
1276 NULL, /* next */
1277 0, /* static_pass_number */
1278 TV_NONE, /* tv_id */
1279 PROP_cfg | PROP_ssa, /* properties_required */
1280 0, /* properties_provided */
1281 0, /* properties_destroyed */
1282 0, /* todo_flags_start */
1283 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1284 }
1285 };