]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-object-size.c
* diagnostic.c: Don't include tm.h, tree.h, tm_p.h, langhooks.h or
[thirdparty/gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "toplev.h"
28 #include "diagnostic.h"
29 #include "tree-pretty-print.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-ssa-propagate.h"
34
35 struct object_size_info
36 {
37 int object_size_type;
38 bitmap visited, reexamine;
39 int pass;
40 bool changed;
41 unsigned int *depths;
42 unsigned int *stack, *tos;
43 };
44
45 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
46
47 static tree compute_object_offset (const_tree, const_tree);
48 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
49 const_tree, int);
50 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
51 static tree pass_through_call (const_gimple);
52 static void collect_object_sizes_for (struct object_size_info *, tree);
53 static void expr_object_size (struct object_size_info *, tree, tree);
54 static bool merge_object_sizes (struct object_size_info *, tree, tree,
55 unsigned HOST_WIDE_INT);
56 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
57 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
58 static unsigned int compute_object_sizes (void);
59 static void init_offset_limit (void);
60 static void check_for_plus_in_loops (struct object_size_info *, tree);
61 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
62 unsigned int);
63
64 /* object_sizes[0] is upper bound for number of bytes till the end of
65 the object.
66 object_sizes[1] is upper bound for number of bytes till the end of
67 the subobject (innermost array or field with address taken).
68 object_sizes[2] is lower bound for number of bytes till the end of
69 the object and object_sizes[3] lower bound for subobject. */
70 static unsigned HOST_WIDE_INT *object_sizes[4];
71
72 /* Bitmaps what object sizes have been computed already. */
73 static bitmap computed[4];
74
75 /* Maximum value of offset we consider to be addition. */
76 static unsigned HOST_WIDE_INT offset_limit;
77
78
79 /* Initialize OFFSET_LIMIT variable. */
80 static void
81 init_offset_limit (void)
82 {
83 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
84 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
85 else
86 offset_limit = -1;
87 offset_limit /= 2;
88 }
89
90
91 /* Compute offset of EXPR within VAR. Return error_mark_node
92 if unknown. */
93
94 static tree
95 compute_object_offset (const_tree expr, const_tree var)
96 {
97 enum tree_code code = PLUS_EXPR;
98 tree base, off, t;
99
100 if (expr == var)
101 return size_zero_node;
102
103 switch (TREE_CODE (expr))
104 {
105 case COMPONENT_REF:
106 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
107 if (base == error_mark_node)
108 return base;
109
110 t = TREE_OPERAND (expr, 1);
111 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
112 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
113 / BITS_PER_UNIT));
114 break;
115
116 case REALPART_EXPR:
117 CASE_CONVERT:
118 case VIEW_CONVERT_EXPR:
119 case NON_LVALUE_EXPR:
120 return compute_object_offset (TREE_OPERAND (expr, 0), var);
121
122 case IMAGPART_EXPR:
123 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
124 if (base == error_mark_node)
125 return base;
126
127 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
128 break;
129
130 case ARRAY_REF:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
134
135 t = TREE_OPERAND (expr, 1);
136 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
137 {
138 code = MINUS_EXPR;
139 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
140 }
141 t = fold_convert (sizetype, t);
142 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
143 break;
144
145 default:
146 return error_mark_node;
147 }
148
149 return size_binop (code, base, off);
150 }
151
152
153 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
154 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
155 If unknown, return unknown[object_size_type]. */
156
157 static unsigned HOST_WIDE_INT
158 addr_object_size (struct object_size_info *osi, const_tree ptr,
159 int object_size_type)
160 {
161 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
162
163 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
164
165 pt_var = TREE_OPERAND (ptr, 0);
166 if (REFERENCE_CLASS_P (pt_var))
167 pt_var = get_base_address (pt_var);
168
169 if (pt_var
170 && TREE_CODE (pt_var) == INDIRECT_REF
171 && TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
172 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
173 {
174 unsigned HOST_WIDE_INT sz;
175
176 if (!osi || (object_size_type & 1) != 0)
177 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
178 object_size_type & ~1);
179 else
180 {
181 tree var = TREE_OPERAND (pt_var, 0);
182 if (osi->pass == 0)
183 collect_object_sizes_for (osi, var);
184 if (bitmap_bit_p (computed[object_size_type],
185 SSA_NAME_VERSION (var)))
186 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
187 else
188 sz = unknown[object_size_type];
189 }
190
191 if (sz != unknown[object_size_type] && sz < offset_limit)
192 pt_var_size = size_int (sz);
193 }
194 else if (pt_var
195 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
196 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
197 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
198 && (unsigned HOST_WIDE_INT)
199 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
200 < offset_limit)
201 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
202 else
203 return unknown[object_size_type];
204
205 if (pt_var != TREE_OPERAND (ptr, 0))
206 {
207 tree var;
208
209 if (object_size_type & 1)
210 {
211 var = TREE_OPERAND (ptr, 0);
212
213 while (var != pt_var
214 && TREE_CODE (var) != BIT_FIELD_REF
215 && TREE_CODE (var) != COMPONENT_REF
216 && TREE_CODE (var) != ARRAY_REF
217 && TREE_CODE (var) != ARRAY_RANGE_REF
218 && TREE_CODE (var) != REALPART_EXPR
219 && TREE_CODE (var) != IMAGPART_EXPR)
220 var = TREE_OPERAND (var, 0);
221 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
222 var = TREE_OPERAND (var, 0);
223 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
224 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
225 || (pt_var_size
226 && tree_int_cst_lt (pt_var_size,
227 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
228 var = pt_var;
229 else if (var != pt_var && TREE_CODE (pt_var) == INDIRECT_REF)
230 {
231 tree v = var;
232 /* For &X->fld, compute object size only if fld isn't the last
233 field, as struct { int i; char c[1]; } is often used instead
234 of flexible array member. */
235 while (v && v != pt_var)
236 switch (TREE_CODE (v))
237 {
238 case ARRAY_REF:
239 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
240 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
241 {
242 tree domain
243 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
244 if (domain
245 && TYPE_MAX_VALUE (domain)
246 && TREE_CODE (TYPE_MAX_VALUE (domain))
247 == INTEGER_CST
248 && tree_int_cst_lt (TREE_OPERAND (v, 1),
249 TYPE_MAX_VALUE (domain)))
250 {
251 v = NULL_TREE;
252 break;
253 }
254 }
255 v = TREE_OPERAND (v, 0);
256 break;
257 case REALPART_EXPR:
258 case IMAGPART_EXPR:
259 v = NULL_TREE;
260 break;
261 case COMPONENT_REF:
262 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
263 {
264 v = NULL_TREE;
265 break;
266 }
267 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
268 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
269 != UNION_TYPE
270 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
271 != QUAL_UNION_TYPE)
272 break;
273 else
274 v = TREE_OPERAND (v, 0);
275 if (TREE_CODE (v) == COMPONENT_REF
276 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
277 == RECORD_TYPE)
278 {
279 tree fld_chain = TREE_CHAIN (TREE_OPERAND (v, 1));
280 for (; fld_chain; fld_chain = TREE_CHAIN (fld_chain))
281 if (TREE_CODE (fld_chain) == FIELD_DECL)
282 break;
283
284 if (fld_chain)
285 {
286 v = NULL_TREE;
287 break;
288 }
289 v = TREE_OPERAND (v, 0);
290 }
291 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
292 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
293 != UNION_TYPE
294 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
295 != QUAL_UNION_TYPE)
296 break;
297 else
298 v = TREE_OPERAND (v, 0);
299 if (v != pt_var)
300 v = NULL_TREE;
301 else
302 v = pt_var;
303 break;
304 default:
305 v = pt_var;
306 break;
307 }
308 if (v == pt_var)
309 var = pt_var;
310 }
311 }
312 else
313 var = pt_var;
314
315 if (var != pt_var)
316 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
317 else if (!pt_var_size)
318 return unknown[object_size_type];
319 else
320 var_size = pt_var_size;
321 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
322 if (bytes != error_mark_node)
323 {
324 if (TREE_CODE (bytes) == INTEGER_CST
325 && tree_int_cst_lt (var_size, bytes))
326 bytes = size_zero_node;
327 else
328 bytes = size_binop (MINUS_EXPR, var_size, bytes);
329 }
330 if (var != pt_var
331 && pt_var_size
332 && TREE_CODE (pt_var) == INDIRECT_REF
333 && bytes != error_mark_node)
334 {
335 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
336 if (bytes2 != error_mark_node)
337 {
338 if (TREE_CODE (bytes2) == INTEGER_CST
339 && tree_int_cst_lt (pt_var_size, bytes2))
340 bytes2 = size_zero_node;
341 else
342 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
343 bytes = size_binop (MIN_EXPR, bytes, bytes2);
344 }
345 }
346 }
347 else if (!pt_var_size)
348 return unknown[object_size_type];
349 else
350 bytes = pt_var_size;
351
352 if (host_integerp (bytes, 1))
353 return tree_low_cst (bytes, 1);
354
355 return unknown[object_size_type];
356 }
357
358
359 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
360 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
361 argument from __builtin_object_size. If unknown, return
362 unknown[object_size_type]. */
363
364 static unsigned HOST_WIDE_INT
365 alloc_object_size (const_gimple call, int object_size_type)
366 {
367 tree callee, bytes = NULL_TREE;
368 tree alloc_size;
369 int arg1 = -1, arg2 = -1;
370
371 gcc_assert (is_gimple_call (call));
372
373 callee = gimple_call_fndecl (call);
374 if (!callee)
375 return unknown[object_size_type];
376
377 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
378 if (alloc_size && TREE_VALUE (alloc_size))
379 {
380 tree p = TREE_VALUE (alloc_size);
381
382 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
383 if (TREE_CHAIN (p))
384 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
385 }
386
387 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
388 switch (DECL_FUNCTION_CODE (callee))
389 {
390 case BUILT_IN_CALLOC:
391 arg2 = 1;
392 /* fall through */
393 case BUILT_IN_MALLOC:
394 case BUILT_IN_ALLOCA:
395 arg1 = 0;
396 default:
397 break;
398 }
399
400 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
401 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
402 || (arg2 >= 0
403 && (arg2 >= (int)gimple_call_num_args (call)
404 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
405 return unknown[object_size_type];
406
407 if (arg2 >= 0)
408 bytes = size_binop (MULT_EXPR,
409 fold_convert (sizetype, gimple_call_arg (call, arg1)),
410 fold_convert (sizetype, gimple_call_arg (call, arg2)));
411 else if (arg1 >= 0)
412 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
413
414 if (bytes && host_integerp (bytes, 1))
415 return tree_low_cst (bytes, 1);
416
417 return unknown[object_size_type];
418 }
419
420
421 /* If object size is propagated from one of function's arguments directly
422 to its return value, return that argument for GIMPLE_CALL statement CALL.
423 Otherwise return NULL. */
424
425 static tree
426 pass_through_call (const_gimple call)
427 {
428 tree callee = gimple_call_fndecl (call);
429
430 if (callee
431 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
432 switch (DECL_FUNCTION_CODE (callee))
433 {
434 case BUILT_IN_MEMCPY:
435 case BUILT_IN_MEMMOVE:
436 case BUILT_IN_MEMSET:
437 case BUILT_IN_STRCPY:
438 case BUILT_IN_STRNCPY:
439 case BUILT_IN_STRCAT:
440 case BUILT_IN_STRNCAT:
441 case BUILT_IN_MEMCPY_CHK:
442 case BUILT_IN_MEMMOVE_CHK:
443 case BUILT_IN_MEMSET_CHK:
444 case BUILT_IN_STRCPY_CHK:
445 case BUILT_IN_STRNCPY_CHK:
446 case BUILT_IN_STRCAT_CHK:
447 case BUILT_IN_STRNCAT_CHK:
448 if (gimple_call_num_args (call) >= 1)
449 return gimple_call_arg (call, 0);
450 break;
451 default:
452 break;
453 }
454
455 return NULL_TREE;
456 }
457
458
459 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
460 second argument from __builtin_object_size. */
461
462 unsigned HOST_WIDE_INT
463 compute_builtin_object_size (tree ptr, int object_size_type)
464 {
465 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
466
467 if (! offset_limit)
468 init_offset_limit ();
469
470 if (TREE_CODE (ptr) == ADDR_EXPR)
471 return addr_object_size (NULL, ptr, object_size_type);
472
473 if (TREE_CODE (ptr) == SSA_NAME
474 && POINTER_TYPE_P (TREE_TYPE (ptr))
475 && object_sizes[object_size_type] != NULL)
476 {
477 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
478 {
479 struct object_size_info osi;
480 bitmap_iterator bi;
481 unsigned int i;
482
483 if (dump_file)
484 {
485 fprintf (dump_file, "Computing %s %sobject size for ",
486 (object_size_type & 2) ? "minimum" : "maximum",
487 (object_size_type & 1) ? "sub" : "");
488 print_generic_expr (dump_file, ptr, dump_flags);
489 fprintf (dump_file, ":\n");
490 }
491
492 osi.visited = BITMAP_ALLOC (NULL);
493 osi.reexamine = BITMAP_ALLOC (NULL);
494 osi.object_size_type = object_size_type;
495 osi.depths = NULL;
496 osi.stack = NULL;
497 osi.tos = NULL;
498
499 /* First pass: walk UD chains, compute object sizes that
500 can be computed. osi.reexamine bitmap at the end will
501 contain what variables were found in dependency cycles
502 and therefore need to be reexamined. */
503 osi.pass = 0;
504 osi.changed = false;
505 collect_object_sizes_for (&osi, ptr);
506
507 /* Second pass: keep recomputing object sizes of variables
508 that need reexamination, until no object sizes are
509 increased or all object sizes are computed. */
510 if (! bitmap_empty_p (osi.reexamine))
511 {
512 bitmap reexamine = BITMAP_ALLOC (NULL);
513
514 /* If looking for minimum instead of maximum object size,
515 detect cases where a pointer is increased in a loop.
516 Although even without this detection pass 2 would eventually
517 terminate, it could take a long time. If a pointer is
518 increasing this way, we need to assume 0 object size.
519 E.g. p = &buf[0]; while (cond) p = p + 4; */
520 if (object_size_type & 2)
521 {
522 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
523 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
524 osi.tos = osi.stack;
525 osi.pass = 1;
526 /* collect_object_sizes_for is changing
527 osi.reexamine bitmap, so iterate over a copy. */
528 bitmap_copy (reexamine, osi.reexamine);
529 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
530 if (bitmap_bit_p (osi.reexamine, i))
531 check_for_plus_in_loops (&osi, ssa_name (i));
532
533 free (osi.depths);
534 osi.depths = NULL;
535 free (osi.stack);
536 osi.stack = NULL;
537 osi.tos = NULL;
538 }
539
540 do
541 {
542 osi.pass = 2;
543 osi.changed = false;
544 /* collect_object_sizes_for is changing
545 osi.reexamine bitmap, so iterate over a copy. */
546 bitmap_copy (reexamine, osi.reexamine);
547 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
548 if (bitmap_bit_p (osi.reexamine, i))
549 {
550 collect_object_sizes_for (&osi, ssa_name (i));
551 if (dump_file && (dump_flags & TDF_DETAILS))
552 {
553 fprintf (dump_file, "Reexamining ");
554 print_generic_expr (dump_file, ssa_name (i),
555 dump_flags);
556 fprintf (dump_file, "\n");
557 }
558 }
559 }
560 while (osi.changed);
561
562 BITMAP_FREE (reexamine);
563 }
564 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
565 bitmap_set_bit (computed[object_size_type], i);
566
567 /* Debugging dumps. */
568 if (dump_file)
569 {
570 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
571 if (object_sizes[object_size_type][i]
572 != unknown[object_size_type])
573 {
574 print_generic_expr (dump_file, ssa_name (i),
575 dump_flags);
576 fprintf (dump_file,
577 ": %s %sobject size "
578 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
579 (object_size_type & 2) ? "minimum" : "maximum",
580 (object_size_type & 1) ? "sub" : "",
581 object_sizes[object_size_type][i]);
582 }
583 }
584
585 BITMAP_FREE (osi.reexamine);
586 BITMAP_FREE (osi.visited);
587 }
588
589 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
590 }
591
592 return unknown[object_size_type];
593 }
594
595 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
596
597 static void
598 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
599 {
600 int object_size_type = osi->object_size_type;
601 unsigned int varno = SSA_NAME_VERSION (ptr);
602 unsigned HOST_WIDE_INT bytes;
603
604 gcc_assert (object_sizes[object_size_type][varno]
605 != unknown[object_size_type]);
606 gcc_assert (osi->pass == 0);
607
608 if (TREE_CODE (value) == WITH_SIZE_EXPR)
609 value = TREE_OPERAND (value, 0);
610
611 /* Pointer variables should have been handled by merge_object_sizes. */
612 gcc_assert (TREE_CODE (value) != SSA_NAME
613 || !POINTER_TYPE_P (TREE_TYPE (value)));
614
615 if (TREE_CODE (value) == ADDR_EXPR)
616 bytes = addr_object_size (osi, value, object_size_type);
617 else
618 bytes = unknown[object_size_type];
619
620 if ((object_size_type & 2) == 0)
621 {
622 if (object_sizes[object_size_type][varno] < bytes)
623 object_sizes[object_size_type][varno] = bytes;
624 }
625 else
626 {
627 if (object_sizes[object_size_type][varno] > bytes)
628 object_sizes[object_size_type][varno] = bytes;
629 }
630 }
631
632
633 /* Compute object_sizes for PTR, defined to the result of a call. */
634
635 static void
636 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
637 {
638 int object_size_type = osi->object_size_type;
639 unsigned int varno = SSA_NAME_VERSION (ptr);
640 unsigned HOST_WIDE_INT bytes;
641
642 gcc_assert (is_gimple_call (call));
643
644 gcc_assert (object_sizes[object_size_type][varno]
645 != unknown[object_size_type]);
646 gcc_assert (osi->pass == 0);
647
648 bytes = alloc_object_size (call, object_size_type);
649
650 if ((object_size_type & 2) == 0)
651 {
652 if (object_sizes[object_size_type][varno] < bytes)
653 object_sizes[object_size_type][varno] = bytes;
654 }
655 else
656 {
657 if (object_sizes[object_size_type][varno] > bytes)
658 object_sizes[object_size_type][varno] = bytes;
659 }
660 }
661
662
663 /* Compute object_sizes for PTR, defined to an unknown value. */
664
665 static void
666 unknown_object_size (struct object_size_info *osi, tree ptr)
667 {
668 int object_size_type = osi->object_size_type;
669 unsigned int varno = SSA_NAME_VERSION (ptr);
670 unsigned HOST_WIDE_INT bytes;
671
672 gcc_assert (object_sizes[object_size_type][varno]
673 != unknown[object_size_type]);
674 gcc_assert (osi->pass == 0);
675
676 bytes = unknown[object_size_type];
677
678 if ((object_size_type & 2) == 0)
679 {
680 if (object_sizes[object_size_type][varno] < bytes)
681 object_sizes[object_size_type][varno] = bytes;
682 }
683 else
684 {
685 if (object_sizes[object_size_type][varno] > bytes)
686 object_sizes[object_size_type][varno] = bytes;
687 }
688 }
689
690
691 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
692 the object size might need reexamination later. */
693
694 static bool
695 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
696 unsigned HOST_WIDE_INT offset)
697 {
698 int object_size_type = osi->object_size_type;
699 unsigned int varno = SSA_NAME_VERSION (dest);
700 unsigned HOST_WIDE_INT orig_bytes;
701
702 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
703 return false;
704 if (offset >= offset_limit)
705 {
706 object_sizes[object_size_type][varno] = unknown[object_size_type];
707 return false;
708 }
709
710 if (osi->pass == 0)
711 collect_object_sizes_for (osi, orig);
712
713 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
714 if (orig_bytes != unknown[object_size_type])
715 orig_bytes = (offset > orig_bytes)
716 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
717
718 if ((object_size_type & 2) == 0)
719 {
720 if (object_sizes[object_size_type][varno] < orig_bytes)
721 {
722 object_sizes[object_size_type][varno] = orig_bytes;
723 osi->changed = true;
724 }
725 }
726 else
727 {
728 if (object_sizes[object_size_type][varno] > orig_bytes)
729 {
730 object_sizes[object_size_type][varno] = orig_bytes;
731 osi->changed = true;
732 }
733 }
734 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
735 }
736
737
738 /* Compute object_sizes for VAR, defined to the result of an assignment
739 with operator POINTER_PLUS_EXPR. Return true if the object size might
740 need reexamination later. */
741
742 static bool
743 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
744 {
745 int object_size_type = osi->object_size_type;
746 unsigned int varno = SSA_NAME_VERSION (var);
747 unsigned HOST_WIDE_INT bytes;
748 tree op0, op1;
749
750 gcc_assert (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR);
751
752 op0 = gimple_assign_rhs1 (stmt);
753 op1 = gimple_assign_rhs2 (stmt);
754
755 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
756 return false;
757
758 /* Handle PTR + OFFSET here. */
759 if (TREE_CODE (op1) == INTEGER_CST
760 && (TREE_CODE (op0) == SSA_NAME
761 || TREE_CODE (op0) == ADDR_EXPR))
762 {
763 if (! host_integerp (op1, 1))
764 bytes = unknown[object_size_type];
765 else if (TREE_CODE (op0) == SSA_NAME)
766 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
767 else
768 {
769 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
770
771 /* op0 will be ADDR_EXPR here. */
772 bytes = addr_object_size (osi, op0, object_size_type);
773 if (bytes == unknown[object_size_type])
774 ;
775 else if (off > offset_limit)
776 bytes = unknown[object_size_type];
777 else if (off > bytes)
778 bytes = 0;
779 else
780 bytes -= off;
781 }
782 }
783 else
784 bytes = unknown[object_size_type];
785
786 if ((object_size_type & 2) == 0)
787 {
788 if (object_sizes[object_size_type][varno] < bytes)
789 object_sizes[object_size_type][varno] = bytes;
790 }
791 else
792 {
793 if (object_sizes[object_size_type][varno] > bytes)
794 object_sizes[object_size_type][varno] = bytes;
795 }
796 return false;
797 }
798
799
800 /* Compute object_sizes for VAR, defined to VALUE, which is
801 a COND_EXPR. Return true if the object size might need reexamination
802 later. */
803
804 static bool
805 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
806 {
807 tree then_, else_;
808 int object_size_type = osi->object_size_type;
809 unsigned int varno = SSA_NAME_VERSION (var);
810 bool reexamine = false;
811
812 gcc_assert (TREE_CODE (value) == COND_EXPR);
813
814 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
815 return false;
816
817 then_ = COND_EXPR_THEN (value);
818 else_ = COND_EXPR_ELSE (value);
819
820 if (TREE_CODE (then_) == SSA_NAME)
821 reexamine |= merge_object_sizes (osi, var, then_, 0);
822 else
823 expr_object_size (osi, var, then_);
824
825 if (TREE_CODE (else_) == SSA_NAME)
826 reexamine |= merge_object_sizes (osi, var, else_, 0);
827 else
828 expr_object_size (osi, var, else_);
829
830 return reexamine;
831 }
832
833 /* Compute object sizes for VAR.
834 For ADDR_EXPR an object size is the number of remaining bytes
835 to the end of the object (where what is considered an object depends on
836 OSI->object_size_type).
837 For allocation GIMPLE_CALL like malloc or calloc object size is the size
838 of the allocation.
839 For POINTER_PLUS_EXPR where second operand is a constant integer,
840 object size is object size of the first operand minus the constant.
841 If the constant is bigger than the number of remaining bytes until the
842 end of the object, object size is 0, but if it is instead a pointer
843 subtraction, object size is unknown[object_size_type].
844 To differentiate addition from subtraction, ADDR_EXPR returns
845 unknown[object_size_type] for all objects bigger than half of the address
846 space, and constants less than half of the address space are considered
847 addition, while bigger constants subtraction.
848 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
849 object size is object size of that argument.
850 Otherwise, object size is the maximum of object sizes of variables
851 that it might be set to. */
852
853 static void
854 collect_object_sizes_for (struct object_size_info *osi, tree var)
855 {
856 int object_size_type = osi->object_size_type;
857 unsigned int varno = SSA_NAME_VERSION (var);
858 gimple stmt;
859 bool reexamine;
860
861 if (bitmap_bit_p (computed[object_size_type], varno))
862 return;
863
864 if (osi->pass == 0)
865 {
866 if (! bitmap_bit_p (osi->visited, varno))
867 {
868 bitmap_set_bit (osi->visited, varno);
869 object_sizes[object_size_type][varno]
870 = (object_size_type & 2) ? -1 : 0;
871 }
872 else
873 {
874 /* Found a dependency loop. Mark the variable for later
875 re-examination. */
876 bitmap_set_bit (osi->reexamine, varno);
877 if (dump_file && (dump_flags & TDF_DETAILS))
878 {
879 fprintf (dump_file, "Found a dependency loop at ");
880 print_generic_expr (dump_file, var, dump_flags);
881 fprintf (dump_file, "\n");
882 }
883 return;
884 }
885 }
886
887 if (dump_file && (dump_flags & TDF_DETAILS))
888 {
889 fprintf (dump_file, "Visiting use-def links for ");
890 print_generic_expr (dump_file, var, dump_flags);
891 fprintf (dump_file, "\n");
892 }
893
894 stmt = SSA_NAME_DEF_STMT (var);
895 reexamine = false;
896
897 switch (gimple_code (stmt))
898 {
899 case GIMPLE_ASSIGN:
900 {
901 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
902 reexamine = plus_stmt_object_size (osi, var, stmt);
903 else if (gimple_assign_single_p (stmt)
904 || gimple_assign_unary_nop_p (stmt))
905 {
906 tree rhs = gimple_assign_rhs1 (stmt);
907
908 if (TREE_CODE (rhs) == SSA_NAME
909 && POINTER_TYPE_P (TREE_TYPE (rhs)))
910 reexamine = merge_object_sizes (osi, var, rhs, 0);
911 else if (TREE_CODE (rhs) == COND_EXPR)
912 reexamine = cond_expr_object_size (osi, var, rhs);
913 else
914 expr_object_size (osi, var, rhs);
915 }
916 else
917 unknown_object_size (osi, var);
918 break;
919 }
920
921 case GIMPLE_CALL:
922 {
923 tree arg = pass_through_call (stmt);
924 if (arg)
925 {
926 if (TREE_CODE (arg) == SSA_NAME
927 && POINTER_TYPE_P (TREE_TYPE (arg)))
928 reexamine = merge_object_sizes (osi, var, arg, 0);
929 else if (TREE_CODE (arg) == COND_EXPR)
930 reexamine = cond_expr_object_size (osi, var, arg);
931 else
932 expr_object_size (osi, var, arg);
933 }
934 else
935 call_object_size (osi, var, stmt);
936 break;
937 }
938
939 case GIMPLE_ASM:
940 /* Pointers defined by __asm__ statements can point anywhere. */
941 object_sizes[object_size_type][varno] = unknown[object_size_type];
942 break;
943
944 case GIMPLE_NOP:
945 {
946 tree decl = SSA_NAME_VAR (var);
947
948 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
949 expr_object_size (osi, var, DECL_INITIAL (decl));
950 else
951 expr_object_size (osi, var, decl);
952 }
953 break;
954
955 case GIMPLE_PHI:
956 {
957 unsigned i;
958
959 for (i = 0; i < gimple_phi_num_args (stmt); i++)
960 {
961 tree rhs = gimple_phi_arg (stmt, i)->def;
962
963 if (object_sizes[object_size_type][varno]
964 == unknown[object_size_type])
965 break;
966
967 if (TREE_CODE (rhs) == SSA_NAME)
968 reexamine |= merge_object_sizes (osi, var, rhs, 0);
969 else if (osi->pass == 0)
970 expr_object_size (osi, var, rhs);
971 }
972 break;
973 }
974
975 default:
976 gcc_unreachable ();
977 }
978
979 if (! reexamine
980 || object_sizes[object_size_type][varno] == unknown[object_size_type])
981 {
982 bitmap_set_bit (computed[object_size_type], varno);
983 bitmap_clear_bit (osi->reexamine, varno);
984 }
985 else
986 {
987 bitmap_set_bit (osi->reexamine, varno);
988 if (dump_file && (dump_flags & TDF_DETAILS))
989 {
990 fprintf (dump_file, "Need to reexamine ");
991 print_generic_expr (dump_file, var, dump_flags);
992 fprintf (dump_file, "\n");
993 }
994 }
995 }
996
997
998 /* Helper function for check_for_plus_in_loops. Called recursively
999 to detect loops. */
1000
1001 static void
1002 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1003 unsigned int depth)
1004 {
1005 gimple stmt = SSA_NAME_DEF_STMT (var);
1006 unsigned int varno = SSA_NAME_VERSION (var);
1007
1008 if (osi->depths[varno])
1009 {
1010 if (osi->depths[varno] != depth)
1011 {
1012 unsigned int *sp;
1013
1014 /* Found a loop involving pointer addition. */
1015 for (sp = osi->tos; sp > osi->stack; )
1016 {
1017 --sp;
1018 bitmap_clear_bit (osi->reexamine, *sp);
1019 bitmap_set_bit (computed[osi->object_size_type], *sp);
1020 object_sizes[osi->object_size_type][*sp] = 0;
1021 if (*sp == varno)
1022 break;
1023 }
1024 }
1025 return;
1026 }
1027 else if (! bitmap_bit_p (osi->reexamine, varno))
1028 return;
1029
1030 osi->depths[varno] = depth;
1031 *osi->tos++ = varno;
1032
1033 switch (gimple_code (stmt))
1034 {
1035
1036 case GIMPLE_ASSIGN:
1037 {
1038 if ((gimple_assign_single_p (stmt)
1039 || gimple_assign_unary_nop_p (stmt))
1040 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1041 {
1042 tree rhs = gimple_assign_rhs1 (stmt);
1043
1044 check_for_plus_in_loops_1 (osi, rhs, depth);
1045 }
1046 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1047 {
1048 tree basevar = gimple_assign_rhs1 (stmt);
1049 tree cst = gimple_assign_rhs2 (stmt);
1050
1051 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1052
1053 check_for_plus_in_loops_1 (osi, basevar,
1054 depth + !integer_zerop (cst));
1055 }
1056 else
1057 gcc_unreachable ();
1058 break;
1059 }
1060
1061 case GIMPLE_CALL:
1062 {
1063 tree arg = pass_through_call (stmt);
1064 if (arg)
1065 {
1066 if (TREE_CODE (arg) == SSA_NAME)
1067 check_for_plus_in_loops_1 (osi, arg, depth);
1068 else
1069 gcc_unreachable ();
1070 }
1071 break;
1072 }
1073
1074 case GIMPLE_PHI:
1075 {
1076 unsigned i;
1077
1078 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1079 {
1080 tree rhs = gimple_phi_arg (stmt, i)->def;
1081
1082 if (TREE_CODE (rhs) == SSA_NAME)
1083 check_for_plus_in_loops_1 (osi, rhs, depth);
1084 }
1085 break;
1086 }
1087
1088 default:
1089 gcc_unreachable ();
1090 }
1091
1092 osi->depths[varno] = 0;
1093 osi->tos--;
1094 }
1095
1096
1097 /* Check if some pointer we are computing object size of is being increased
1098 within a loop. If yes, assume all the SSA variables participating in
1099 that loop have minimum object sizes 0. */
1100
1101 static void
1102 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1103 {
1104 gimple stmt = SSA_NAME_DEF_STMT (var);
1105
1106 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1107 and looked for a POINTER_PLUS_EXPR in the pass-through
1108 argument, if any. In GIMPLE, however, such an expression
1109 is not a valid call operand. */
1110
1111 if (is_gimple_assign (stmt)
1112 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1113 {
1114 tree basevar = gimple_assign_rhs1 (stmt);
1115 tree cst = gimple_assign_rhs2 (stmt);
1116
1117 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1118
1119 if (integer_zerop (cst))
1120 return;
1121
1122 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1123 *osi->tos++ = SSA_NAME_VERSION (basevar);
1124 check_for_plus_in_loops_1 (osi, var, 2);
1125 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1126 osi->tos--;
1127 }
1128 }
1129
1130
1131 /* Initialize data structures for the object size computation. */
1132
1133 void
1134 init_object_sizes (void)
1135 {
1136 int object_size_type;
1137
1138 if (object_sizes[0])
1139 return;
1140
1141 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1142 {
1143 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1144 computed[object_size_type] = BITMAP_ALLOC (NULL);
1145 }
1146
1147 init_offset_limit ();
1148 }
1149
1150
1151 /* Destroy data structures after the object size computation. */
1152
1153 void
1154 fini_object_sizes (void)
1155 {
1156 int object_size_type;
1157
1158 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1159 {
1160 free (object_sizes[object_size_type]);
1161 BITMAP_FREE (computed[object_size_type]);
1162 object_sizes[object_size_type] = NULL;
1163 }
1164 }
1165
1166
1167 /* Simple pass to optimize all __builtin_object_size () builtins. */
1168
1169 static unsigned int
1170 compute_object_sizes (void)
1171 {
1172 basic_block bb;
1173 FOR_EACH_BB (bb)
1174 {
1175 gimple_stmt_iterator i;
1176 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1177 {
1178 tree callee, result;
1179 gimple call = gsi_stmt (i);
1180
1181 if (gimple_code (call) != GIMPLE_CALL)
1182 continue;
1183
1184 callee = gimple_call_fndecl (call);
1185 if (!callee
1186 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1187 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1188 continue;
1189
1190 init_object_sizes ();
1191 result = fold_call_stmt (call, false);
1192 if (!result)
1193 {
1194 if (gimple_call_num_args (call) == 2
1195 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1196 {
1197 tree ost = gimple_call_arg (call, 1);
1198
1199 if (host_integerp (ost, 1))
1200 {
1201 unsigned HOST_WIDE_INT object_size_type
1202 = tree_low_cst (ost, 1);
1203
1204 if (object_size_type < 2)
1205 result = fold_convert (size_type_node,
1206 integer_minus_one_node);
1207 else if (object_size_type < 4)
1208 result = fold_convert (size_type_node,
1209 integer_zero_node);
1210 }
1211 }
1212
1213 if (!result)
1214 continue;
1215 }
1216
1217 if (dump_file && (dump_flags & TDF_DETAILS))
1218 {
1219 fprintf (dump_file, "Simplified\n ");
1220 print_gimple_stmt (dump_file, call, 0, dump_flags);
1221 }
1222
1223 if (!update_call_from_tree (&i, result))
1224 gcc_unreachable ();
1225
1226 /* NOTE: In the pre-tuples code, we called update_stmt here. This is
1227 now handled by gsi_replace, called from update_call_from_tree. */
1228
1229 if (dump_file && (dump_flags & TDF_DETAILS))
1230 {
1231 fprintf (dump_file, "to\n ");
1232 print_gimple_stmt (dump_file, call, 0, dump_flags);
1233 fprintf (dump_file, "\n");
1234 }
1235 }
1236 }
1237
1238 fini_object_sizes ();
1239 return 0;
1240 }
1241
1242 struct gimple_opt_pass pass_object_sizes =
1243 {
1244 {
1245 GIMPLE_PASS,
1246 "objsz", /* name */
1247 NULL, /* gate */
1248 compute_object_sizes, /* execute */
1249 NULL, /* sub */
1250 NULL, /* next */
1251 0, /* static_pass_number */
1252 TV_NONE, /* tv_id */
1253 PROP_cfg | PROP_ssa, /* properties_required */
1254 0, /* properties_provided */
1255 0, /* properties_destroyed */
1256 0, /* todo_flags_start */
1257 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1258 }
1259 };