]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-object-size.c
[multiple changes]
[thirdparty/gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "toplev.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32
33 struct object_size_info
34 {
35 int object_size_type;
36 bitmap visited, reexamine;
37 int pass;
38 bool changed;
39 unsigned int *depths;
40 unsigned int *stack, *tos;
41 };
42
43 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
44
45 static tree compute_object_offset (tree, tree);
46 static unsigned HOST_WIDE_INT addr_object_size (tree, int);
47 static unsigned HOST_WIDE_INT alloc_object_size (tree, int);
48 static tree pass_through_call (tree);
49 static void collect_object_sizes_for (struct object_size_info *, tree);
50 static void expr_object_size (struct object_size_info *, tree, tree);
51 static bool merge_object_sizes (struct object_size_info *, tree, tree,
52 unsigned HOST_WIDE_INT);
53 static bool plus_expr_object_size (struct object_size_info *, tree, tree);
54 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
55 static unsigned int compute_object_sizes (void);
56 static void init_offset_limit (void);
57 static void check_for_plus_in_loops (struct object_size_info *, tree);
58 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
59 unsigned int);
60
61 /* object_sizes[0] is upper bound for number of bytes till the end of
62 the object.
63 object_sizes[1] is upper bound for number of bytes till the end of
64 the subobject (innermost array or field with address taken).
65 object_sizes[2] is lower bound for number of bytes till the end of
66 the object and object_sizes[3] lower bound for subobject. */
67 static unsigned HOST_WIDE_INT *object_sizes[4];
68
69 /* Bitmaps what object sizes have been computed already. */
70 static bitmap computed[4];
71
72 /* Maximum value of offset we consider to be addition. */
73 static unsigned HOST_WIDE_INT offset_limit;
74
75
76 /* Initialize OFFSET_LIMIT variable. */
77 static void
78 init_offset_limit (void)
79 {
80 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
81 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
82 else
83 offset_limit = -1;
84 offset_limit /= 2;
85 }
86
87
88 /* Compute offset of EXPR within VAR. Return error_mark_node
89 if unknown. */
90
91 static tree
92 compute_object_offset (tree expr, tree var)
93 {
94 enum tree_code code = PLUS_EXPR;
95 tree base, off, t;
96
97 if (expr == var)
98 return size_zero_node;
99
100 switch (TREE_CODE (expr))
101 {
102 case COMPONENT_REF:
103 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
104 if (base == error_mark_node)
105 return base;
106
107 t = TREE_OPERAND (expr, 1);
108 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
109 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
110 / BITS_PER_UNIT));
111 break;
112
113 case REALPART_EXPR:
114 case NOP_EXPR:
115 case CONVERT_EXPR:
116 case VIEW_CONVERT_EXPR:
117 case NON_LVALUE_EXPR:
118 return compute_object_offset (TREE_OPERAND (expr, 0), var);
119
120 case IMAGPART_EXPR:
121 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
122 if (base == error_mark_node)
123 return base;
124
125 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
126 break;
127
128 case ARRAY_REF:
129 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
130 if (base == error_mark_node)
131 return base;
132
133 t = TREE_OPERAND (expr, 1);
134 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
135 {
136 code = MINUS_EXPR;
137 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
138 }
139 t = fold_convert (sizetype, t);
140 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
141 break;
142
143 default:
144 return error_mark_node;
145 }
146
147 return size_binop (code, base, off);
148 }
149
150
151 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
152 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
153 If unknown, return unknown[object_size_type]. */
154
155 static unsigned HOST_WIDE_INT
156 addr_object_size (tree ptr, int object_size_type)
157 {
158 tree pt_var;
159
160 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
161
162 pt_var = TREE_OPERAND (ptr, 0);
163 if (REFERENCE_CLASS_P (pt_var))
164 pt_var = get_base_address (pt_var);
165
166 if (pt_var
167 && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
168 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
169 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
170 && (unsigned HOST_WIDE_INT)
171 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1) < offset_limit)
172 {
173 tree bytes;
174
175 if (pt_var != TREE_OPERAND (ptr, 0))
176 {
177 tree var;
178
179 if (object_size_type & 1)
180 {
181 var = TREE_OPERAND (ptr, 0);
182
183 while (var != pt_var
184 && TREE_CODE (var) != BIT_FIELD_REF
185 && TREE_CODE (var) != COMPONENT_REF
186 && TREE_CODE (var) != ARRAY_REF
187 && TREE_CODE (var) != ARRAY_RANGE_REF
188 && TREE_CODE (var) != REALPART_EXPR
189 && TREE_CODE (var) != IMAGPART_EXPR)
190 var = TREE_OPERAND (var, 0);
191 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
192 var = TREE_OPERAND (var, 0);
193 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
194 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
195 || tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)),
196 TYPE_SIZE_UNIT (TREE_TYPE (var))))
197 var = pt_var;
198 }
199 else
200 var = pt_var;
201
202 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
203 if (bytes != error_mark_node)
204 {
205 if (TREE_CODE (bytes) == INTEGER_CST
206 && tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes))
207 bytes = size_zero_node;
208 else
209 bytes = size_binop (MINUS_EXPR,
210 TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes);
211 }
212 }
213 else
214 bytes = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
215
216 if (host_integerp (bytes, 1))
217 return tree_low_cst (bytes, 1);
218 }
219
220 return unknown[object_size_type];
221 }
222
223
224 /* Compute __builtin_object_size for CALL, which is a CALL_EXPR.
225 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
226 argument from __builtin_object_size. If unknown, return
227 unknown[object_size_type]. */
228
229 static unsigned HOST_WIDE_INT
230 alloc_object_size (tree call, int object_size_type)
231 {
232 tree callee, bytes = NULL_TREE;
233 tree alloc_size;
234 int arg1 = -1, arg2 = -1;
235
236 gcc_assert (TREE_CODE (call) == CALL_EXPR);
237
238 callee = get_callee_fndecl (call);
239 if (!callee)
240 return unknown[object_size_type];
241
242 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
243 if (alloc_size && TREE_VALUE (alloc_size))
244 {
245 tree p = TREE_VALUE (alloc_size);
246
247 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
248 if (TREE_CHAIN (p))
249 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
250 }
251
252 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
253 switch (DECL_FUNCTION_CODE (callee))
254 {
255 case BUILT_IN_CALLOC:
256 arg2 = 1;
257 /* fall through */
258 case BUILT_IN_MALLOC:
259 case BUILT_IN_ALLOCA:
260 arg1 = 0;
261 default:
262 break;
263 }
264
265 if (arg1 < 0 || arg1 >= call_expr_nargs (call)
266 || TREE_CODE (CALL_EXPR_ARG (call, arg1)) != INTEGER_CST
267 || (arg2 >= 0
268 && (arg2 >= call_expr_nargs (call)
269 || TREE_CODE (CALL_EXPR_ARG (call, arg2)) != INTEGER_CST)))
270 return unknown[object_size_type];
271
272 if (arg2 >= 0)
273 bytes = size_binop (MULT_EXPR,
274 fold_convert (sizetype, CALL_EXPR_ARG (call, arg1)),
275 fold_convert (sizetype, CALL_EXPR_ARG (call, arg2)));
276 else if (arg1 >= 0)
277 bytes = fold_convert (sizetype, CALL_EXPR_ARG (call, arg1));
278
279 if (bytes && host_integerp (bytes, 1))
280 return tree_low_cst (bytes, 1);
281
282 return unknown[object_size_type];
283 }
284
285
286 /* If object size is propagated from one of function's arguments directly
287 to its return value, return that argument for CALL_EXPR CALL.
288 Otherwise return NULL. */
289
290 static tree
291 pass_through_call (tree call)
292 {
293 tree callee = get_callee_fndecl (call);
294
295 if (callee
296 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
297 switch (DECL_FUNCTION_CODE (callee))
298 {
299 case BUILT_IN_MEMCPY:
300 case BUILT_IN_MEMMOVE:
301 case BUILT_IN_MEMSET:
302 case BUILT_IN_STRCPY:
303 case BUILT_IN_STRNCPY:
304 case BUILT_IN_STRCAT:
305 case BUILT_IN_STRNCAT:
306 case BUILT_IN_MEMCPY_CHK:
307 case BUILT_IN_MEMMOVE_CHK:
308 case BUILT_IN_MEMSET_CHK:
309 case BUILT_IN_STRCPY_CHK:
310 case BUILT_IN_STRNCPY_CHK:
311 case BUILT_IN_STRCAT_CHK:
312 case BUILT_IN_STRNCAT_CHK:
313 if (call_expr_nargs (call) >= 1)
314 return CALL_EXPR_ARG (call, 0);
315 break;
316 default:
317 break;
318 }
319
320 return NULL_TREE;
321 }
322
323
324 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
325 second argument from __builtin_object_size. */
326
327 unsigned HOST_WIDE_INT
328 compute_builtin_object_size (tree ptr, int object_size_type)
329 {
330 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
331
332 if (! offset_limit)
333 init_offset_limit ();
334
335 if (TREE_CODE (ptr) == ADDR_EXPR)
336 return addr_object_size (ptr, object_size_type);
337 else if (TREE_CODE (ptr) == CALL_EXPR)
338 {
339 tree arg = pass_through_call (ptr);
340
341 if (arg)
342 return compute_builtin_object_size (arg, object_size_type);
343 else
344 return alloc_object_size (ptr, object_size_type);
345 }
346 else if (TREE_CODE (ptr) == SSA_NAME
347 && POINTER_TYPE_P (TREE_TYPE (ptr))
348 && object_sizes[object_size_type] != NULL)
349 {
350 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
351 {
352 struct object_size_info osi;
353 bitmap_iterator bi;
354 unsigned int i;
355
356 if (dump_file)
357 {
358 fprintf (dump_file, "Computing %s %sobject size for ",
359 (object_size_type & 2) ? "minimum" : "maximum",
360 (object_size_type & 1) ? "sub" : "");
361 print_generic_expr (dump_file, ptr, dump_flags);
362 fprintf (dump_file, ":\n");
363 }
364
365 osi.visited = BITMAP_ALLOC (NULL);
366 osi.reexamine = BITMAP_ALLOC (NULL);
367 osi.object_size_type = object_size_type;
368 osi.depths = NULL;
369 osi.stack = NULL;
370 osi.tos = NULL;
371
372 /* First pass: walk UD chains, compute object sizes that
373 can be computed. osi.reexamine bitmap at the end will
374 contain what variables were found in dependency cycles
375 and therefore need to be reexamined. */
376 osi.pass = 0;
377 osi.changed = false;
378 collect_object_sizes_for (&osi, ptr);
379
380 /* Second pass: keep recomputing object sizes of variables
381 that need reexamination, until no object sizes are
382 increased or all object sizes are computed. */
383 if (! bitmap_empty_p (osi.reexamine))
384 {
385 bitmap reexamine = BITMAP_ALLOC (NULL);
386
387 /* If looking for minimum instead of maximum object size,
388 detect cases where a pointer is increased in a loop.
389 Although even without this detection pass 2 would eventually
390 terminate, it could take a long time. If a pointer is
391 increasing this way, we need to assume 0 object size.
392 E.g. p = &buf[0]; while (cond) p = p + 4; */
393 if (object_size_type & 2)
394 {
395 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
396 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
397 osi.tos = osi.stack;
398 osi.pass = 1;
399 /* collect_object_sizes_for is changing
400 osi.reexamine bitmap, so iterate over a copy. */
401 bitmap_copy (reexamine, osi.reexamine);
402 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
403 if (bitmap_bit_p (osi.reexamine, i))
404 check_for_plus_in_loops (&osi, ssa_name (i));
405
406 free (osi.depths);
407 osi.depths = NULL;
408 free (osi.stack);
409 osi.stack = NULL;
410 osi.tos = NULL;
411 }
412
413 do
414 {
415 osi.pass = 2;
416 osi.changed = false;
417 /* collect_object_sizes_for is changing
418 osi.reexamine bitmap, so iterate over a copy. */
419 bitmap_copy (reexamine, osi.reexamine);
420 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
421 if (bitmap_bit_p (osi.reexamine, i))
422 {
423 collect_object_sizes_for (&osi, ssa_name (i));
424 if (dump_file && (dump_flags & TDF_DETAILS))
425 {
426 fprintf (dump_file, "Reexamining ");
427 print_generic_expr (dump_file, ssa_name (i),
428 dump_flags);
429 fprintf (dump_file, "\n");
430 }
431 }
432 }
433 while (osi.changed);
434
435 BITMAP_FREE (reexamine);
436 }
437 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
438 bitmap_set_bit (computed[object_size_type], i);
439
440 /* Debugging dumps. */
441 if (dump_file)
442 {
443 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
444 if (object_sizes[object_size_type][i]
445 != unknown[object_size_type])
446 {
447 print_generic_expr (dump_file, ssa_name (i),
448 dump_flags);
449 fprintf (dump_file,
450 ": %s %sobject size "
451 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
452 (object_size_type & 2) ? "minimum" : "maximum",
453 (object_size_type & 1) ? "sub" : "",
454 object_sizes[object_size_type][i]);
455 }
456 }
457
458 BITMAP_FREE (osi.reexamine);
459 BITMAP_FREE (osi.visited);
460 }
461
462 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
463 }
464
465 return unknown[object_size_type];
466 }
467
468
469 /* Compute object_sizes for PTR, defined to VALUE, which is not
470 a SSA_NAME. */
471
472 static void
473 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
474 {
475 int object_size_type = osi->object_size_type;
476 unsigned int varno = SSA_NAME_VERSION (ptr);
477 unsigned HOST_WIDE_INT bytes;
478
479 gcc_assert (object_sizes[object_size_type][varno]
480 != unknown[object_size_type]);
481 gcc_assert (osi->pass == 0);
482
483 if (TREE_CODE (value) == WITH_SIZE_EXPR)
484 value = TREE_OPERAND (value, 0);
485
486 /* Pointer variables should have been handled by merge_object_sizes. */
487 gcc_assert (TREE_CODE (value) != SSA_NAME
488 || !POINTER_TYPE_P (TREE_TYPE (value)));
489
490 if (TREE_CODE (value) == ADDR_EXPR)
491 bytes = addr_object_size (value, object_size_type);
492 else if (TREE_CODE (value) == CALL_EXPR)
493 bytes = alloc_object_size (value, object_size_type);
494 else
495 bytes = unknown[object_size_type];
496
497 if ((object_size_type & 2) == 0)
498 {
499 if (object_sizes[object_size_type][varno] < bytes)
500 object_sizes[object_size_type][varno] = bytes;
501 }
502 else
503 {
504 if (object_sizes[object_size_type][varno] > bytes)
505 object_sizes[object_size_type][varno] = bytes;
506 }
507 }
508
509
510 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
511 the object size might need reexamination later. */
512
513 static bool
514 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
515 unsigned HOST_WIDE_INT offset)
516 {
517 int object_size_type = osi->object_size_type;
518 unsigned int varno = SSA_NAME_VERSION (dest);
519 unsigned HOST_WIDE_INT orig_bytes;
520
521 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
522 return false;
523 if (offset >= offset_limit)
524 {
525 object_sizes[object_size_type][varno] = unknown[object_size_type];
526 return false;
527 }
528
529 if (osi->pass == 0)
530 collect_object_sizes_for (osi, orig);
531
532 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
533 if (orig_bytes != unknown[object_size_type])
534 orig_bytes = (offset > orig_bytes)
535 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
536
537 if ((object_size_type & 2) == 0)
538 {
539 if (object_sizes[object_size_type][varno] < orig_bytes)
540 {
541 object_sizes[object_size_type][varno] = orig_bytes;
542 osi->changed = true;
543 }
544 }
545 else
546 {
547 if (object_sizes[object_size_type][varno] > orig_bytes)
548 {
549 object_sizes[object_size_type][varno] = orig_bytes;
550 osi->changed = true;
551 }
552 }
553 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
554 }
555
556
557 /* Compute object_sizes for PTR, defined to VALUE, which is
558 a POINTER_PLUS_EXPR. Return true if the object size might need reexamination
559 later. */
560
561 static bool
562 plus_expr_object_size (struct object_size_info *osi, tree var, tree value)
563 {
564 tree op0 = TREE_OPERAND (value, 0);
565 tree op1 = TREE_OPERAND (value, 1);
566 int object_size_type = osi->object_size_type;
567 unsigned int varno = SSA_NAME_VERSION (var);
568 unsigned HOST_WIDE_INT bytes;
569
570 gcc_assert (TREE_CODE (value) == POINTER_PLUS_EXPR);
571
572 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
573 return false;
574
575 /* Handle PTR + OFFSET here. */
576 if (TREE_CODE (op1) == INTEGER_CST
577 && (TREE_CODE (op0) == SSA_NAME
578 || TREE_CODE (op0) == ADDR_EXPR))
579 {
580 if (! host_integerp (op1, 1))
581 bytes = unknown[object_size_type];
582 else if (TREE_CODE (op0) == SSA_NAME)
583 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
584 else
585 {
586 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
587
588 bytes = compute_builtin_object_size (op0, object_size_type);
589 if (bytes == unknown[object_size_type])
590 ;
591 else if (off > offset_limit)
592 bytes = unknown[object_size_type];
593 else if (off > bytes)
594 bytes = 0;
595 else
596 bytes -= off;
597 }
598 }
599 else
600 bytes = unknown[object_size_type];
601
602 if ((object_size_type & 2) == 0)
603 {
604 if (object_sizes[object_size_type][varno] < bytes)
605 object_sizes[object_size_type][varno] = bytes;
606 }
607 else
608 {
609 if (object_sizes[object_size_type][varno] > bytes)
610 object_sizes[object_size_type][varno] = bytes;
611 }
612 return false;
613 }
614
615
616 /* Compute object_sizes for PTR, defined to VALUE, which is
617 a COND_EXPR. Return true if the object size might need reexamination
618 later. */
619
620 static bool
621 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
622 {
623 tree then_, else_;
624 int object_size_type = osi->object_size_type;
625 unsigned int varno = SSA_NAME_VERSION (var);
626 bool reexamine = false;
627
628 gcc_assert (TREE_CODE (value) == COND_EXPR);
629
630 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
631 return false;
632
633 then_ = COND_EXPR_THEN (value);
634 else_ = COND_EXPR_ELSE (value);
635
636 if (TREE_CODE (then_) == SSA_NAME)
637 reexamine |= merge_object_sizes (osi, var, then_, 0);
638 else
639 expr_object_size (osi, var, then_);
640
641 if (TREE_CODE (else_) == SSA_NAME)
642 reexamine |= merge_object_sizes (osi, var, else_, 0);
643 else
644 expr_object_size (osi, var, else_);
645
646 return reexamine;
647 }
648
649
650 /* Compute object sizes for VAR.
651 For ADDR_EXPR an object size is the number of remaining bytes
652 to the end of the object (where what is considered an object depends on
653 OSI->object_size_type).
654 For allocation CALL_EXPR like malloc or calloc object size is the size
655 of the allocation.
656 For POINTER_PLUS_EXPR where second operand is a constant integer,
657 object size is object size of the first operand minus the constant.
658 If the constant is bigger than the number of remaining bytes until the
659 end of the object, object size is 0, but if it is instead a pointer
660 subtraction, object size is unknown[object_size_type].
661 To differentiate addition from subtraction, ADDR_EXPR returns
662 unknown[object_size_type] for all objects bigger than half of the address
663 space, and constants less than half of the address space are considered
664 addition, while bigger constants subtraction.
665 For a memcpy like CALL_EXPR that always returns one of its arguments, the
666 object size is object size of that argument.
667 Otherwise, object size is the maximum of object sizes of variables
668 that it might be set to. */
669
670 static void
671 collect_object_sizes_for (struct object_size_info *osi, tree var)
672 {
673 int object_size_type = osi->object_size_type;
674 unsigned int varno = SSA_NAME_VERSION (var);
675 tree stmt;
676 bool reexamine;
677
678 if (bitmap_bit_p (computed[object_size_type], varno))
679 return;
680
681 if (osi->pass == 0)
682 {
683 if (! bitmap_bit_p (osi->visited, varno))
684 {
685 bitmap_set_bit (osi->visited, varno);
686 object_sizes[object_size_type][varno]
687 = (object_size_type & 2) ? -1 : 0;
688 }
689 else
690 {
691 /* Found a dependency loop. Mark the variable for later
692 re-examination. */
693 bitmap_set_bit (osi->reexamine, varno);
694 if (dump_file && (dump_flags & TDF_DETAILS))
695 {
696 fprintf (dump_file, "Found a dependency loop at ");
697 print_generic_expr (dump_file, var, dump_flags);
698 fprintf (dump_file, "\n");
699 }
700 return;
701 }
702 }
703
704 if (dump_file && (dump_flags & TDF_DETAILS))
705 {
706 fprintf (dump_file, "Visiting use-def links for ");
707 print_generic_expr (dump_file, var, dump_flags);
708 fprintf (dump_file, "\n");
709 }
710
711 stmt = SSA_NAME_DEF_STMT (var);
712 reexamine = false;
713
714 switch (TREE_CODE (stmt))
715 {
716 case RETURN_EXPR:
717 gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT);
718 stmt = TREE_OPERAND (stmt, 0);
719 /* FALLTHRU */
720
721 case GIMPLE_MODIFY_STMT:
722 {
723 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1), arg;
724 STRIP_NOPS (rhs);
725
726 if (TREE_CODE (rhs) == CALL_EXPR)
727 {
728 arg = pass_through_call (rhs);
729 if (arg)
730 rhs = arg;
731 }
732
733 if (TREE_CODE (rhs) == SSA_NAME
734 && POINTER_TYPE_P (TREE_TYPE (rhs)))
735 reexamine = merge_object_sizes (osi, var, rhs, 0);
736
737 else if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
738 reexamine = plus_expr_object_size (osi, var, rhs);
739
740 else if (TREE_CODE (rhs) == COND_EXPR)
741 reexamine = cond_expr_object_size (osi, var, rhs);
742
743 else
744 expr_object_size (osi, var, rhs);
745 break;
746 }
747
748 case ASM_EXPR:
749 /* Pointers defined by __asm__ statements can point anywhere. */
750 object_sizes[object_size_type][varno] = unknown[object_size_type];
751 break;
752
753 case NOP_EXPR:
754 {
755 tree decl = SSA_NAME_VAR (var);
756
757 gcc_assert (IS_EMPTY_STMT (stmt));
758
759 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
760 expr_object_size (osi, var, DECL_INITIAL (decl));
761 else
762 expr_object_size (osi, var, decl);
763 }
764 break;
765
766 case PHI_NODE:
767 {
768 int i;
769
770 for (i = 0; i < PHI_NUM_ARGS (stmt); i++)
771 {
772 tree rhs = PHI_ARG_DEF (stmt, i);
773
774 if (object_sizes[object_size_type][varno]
775 == unknown[object_size_type])
776 break;
777
778 if (TREE_CODE (rhs) == SSA_NAME)
779 reexamine |= merge_object_sizes (osi, var, rhs, 0);
780 else if (osi->pass == 0)
781 expr_object_size (osi, var, rhs);
782 }
783 break;
784 }
785 default:
786 gcc_unreachable ();
787 }
788
789 if (! reexamine
790 || object_sizes[object_size_type][varno] == unknown[object_size_type])
791 {
792 bitmap_set_bit (computed[object_size_type], varno);
793 bitmap_clear_bit (osi->reexamine, varno);
794 }
795 else
796 {
797 bitmap_set_bit (osi->reexamine, varno);
798 if (dump_file && (dump_flags & TDF_DETAILS))
799 {
800 fprintf (dump_file, "Need to reexamine ");
801 print_generic_expr (dump_file, var, dump_flags);
802 fprintf (dump_file, "\n");
803 }
804 }
805 }
806
807
808 /* Helper function for check_for_plus_in_loops. Called recursively
809 to detect loops. */
810
811 static void
812 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
813 unsigned int depth)
814 {
815 tree stmt = SSA_NAME_DEF_STMT (var);
816 unsigned int varno = SSA_NAME_VERSION (var);
817
818 if (osi->depths[varno])
819 {
820 if (osi->depths[varno] != depth)
821 {
822 unsigned int *sp;
823
824 /* Found a loop involving pointer addition. */
825 for (sp = osi->tos; sp > osi->stack; )
826 {
827 --sp;
828 bitmap_clear_bit (osi->reexamine, *sp);
829 bitmap_set_bit (computed[osi->object_size_type], *sp);
830 object_sizes[osi->object_size_type][*sp] = 0;
831 if (*sp == varno)
832 break;
833 }
834 }
835 return;
836 }
837 else if (! bitmap_bit_p (osi->reexamine, varno))
838 return;
839
840 osi->depths[varno] = depth;
841 *osi->tos++ = varno;
842
843 switch (TREE_CODE (stmt))
844 {
845 case RETURN_EXPR:
846 gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT);
847 stmt = TREE_OPERAND (stmt, 0);
848 /* FALLTHRU */
849
850 case GIMPLE_MODIFY_STMT:
851 {
852 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1), arg;
853 STRIP_NOPS (rhs);
854
855 if (TREE_CODE (rhs) == CALL_EXPR)
856 {
857 arg = pass_through_call (rhs);
858 if (arg)
859 rhs = arg;
860 }
861
862 if (TREE_CODE (rhs) == SSA_NAME)
863 check_for_plus_in_loops_1 (osi, rhs, depth);
864 else if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
865 {
866 tree op0 = TREE_OPERAND (rhs, 0);
867 tree op1 = TREE_OPERAND (rhs, 1);
868 tree cst, basevar;
869
870 basevar = op0;
871 cst = op1;
872 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
873
874 check_for_plus_in_loops_1 (osi, basevar,
875 depth + !integer_zerop (cst));
876 }
877 else
878 gcc_unreachable ();
879 break;
880 }
881 case PHI_NODE:
882 {
883 int i;
884
885 for (i = 0; i < PHI_NUM_ARGS (stmt); i++)
886 {
887 tree rhs = PHI_ARG_DEF (stmt, i);
888
889 if (TREE_CODE (rhs) == SSA_NAME)
890 check_for_plus_in_loops_1 (osi, rhs, depth);
891 }
892 break;
893 }
894 default:
895 gcc_unreachable ();
896 }
897
898 osi->depths[varno] = 0;
899 osi->tos--;
900 }
901
902
903 /* Check if some pointer we are computing object size of is being increased
904 within a loop. If yes, assume all the SSA variables participating in
905 that loop have minimum object sizes 0. */
906
907 static void
908 check_for_plus_in_loops (struct object_size_info *osi, tree var)
909 {
910 tree stmt = SSA_NAME_DEF_STMT (var);
911
912 switch (TREE_CODE (stmt))
913 {
914 case RETURN_EXPR:
915 gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT);
916 stmt = TREE_OPERAND (stmt, 0);
917 /* FALLTHRU */
918
919 case GIMPLE_MODIFY_STMT:
920 {
921 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1), arg;
922 STRIP_NOPS (rhs);
923
924 if (TREE_CODE (rhs) == CALL_EXPR)
925 {
926 arg = pass_through_call (rhs);
927 if (arg)
928 rhs = arg;
929 }
930
931 if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
932 {
933 tree op0 = TREE_OPERAND (rhs, 0);
934 tree op1 = TREE_OPERAND (rhs, 1);
935 tree cst, basevar;
936
937 basevar = op0;
938 cst = op1;
939 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
940
941 if (integer_zerop (cst))
942 break;
943
944 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
945 *osi->tos++ = SSA_NAME_VERSION (basevar);
946 check_for_plus_in_loops_1 (osi, var, 2);
947 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
948 osi->tos--;
949 }
950 break;
951 }
952 default:
953 break;
954 }
955 }
956
957
958 /* Initialize data structures for the object size computation. */
959
960 void
961 init_object_sizes (void)
962 {
963 int object_size_type;
964
965 if (object_sizes[0])
966 return;
967
968 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
969 {
970 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
971 computed[object_size_type] = BITMAP_ALLOC (NULL);
972 }
973
974 init_offset_limit ();
975 }
976
977
978 /* Destroy data structures after the object size computation. */
979
980 void
981 fini_object_sizes (void)
982 {
983 int object_size_type;
984
985 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
986 {
987 free (object_sizes[object_size_type]);
988 BITMAP_FREE (computed[object_size_type]);
989 object_sizes[object_size_type] = NULL;
990 }
991 }
992
993
994 /* Simple pass to optimize all __builtin_object_size () builtins. */
995
996 static unsigned int
997 compute_object_sizes (void)
998 {
999 basic_block bb;
1000 FOR_EACH_BB (bb)
1001 {
1002 block_stmt_iterator i;
1003 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1004 {
1005 tree *stmtp = bsi_stmt_ptr (i);
1006 tree call = get_rhs (*stmtp);
1007 tree callee, result;
1008
1009 if (!call || TREE_CODE (call) != CALL_EXPR)
1010 continue;
1011
1012 callee = get_callee_fndecl (call);
1013 if (!callee
1014 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1015 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1016 continue;
1017
1018 init_object_sizes ();
1019 result = fold_call_expr (call, false);
1020 if (!result)
1021 {
1022 if (call_expr_nargs (call) == 2
1023 && POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (call, 0))))
1024 {
1025 tree ost = CALL_EXPR_ARG (call, 1);
1026
1027 if (host_integerp (ost, 1))
1028 {
1029 unsigned HOST_WIDE_INT object_size_type
1030 = tree_low_cst (ost, 1);
1031
1032 if (object_size_type < 2)
1033 result = fold_convert (size_type_node,
1034 integer_minus_one_node);
1035 else if (object_size_type < 4)
1036 result = size_zero_node;
1037 }
1038 }
1039
1040 if (!result)
1041 continue;
1042 }
1043
1044 if (dump_file && (dump_flags & TDF_DETAILS))
1045 {
1046 fprintf (dump_file, "Simplified\n ");
1047 print_generic_stmt (dump_file, *stmtp, dump_flags);
1048 }
1049
1050 if (!set_rhs (stmtp, result))
1051 gcc_unreachable ();
1052 update_stmt (*stmtp);
1053
1054 if (dump_file && (dump_flags & TDF_DETAILS))
1055 {
1056 fprintf (dump_file, "to\n ");
1057 print_generic_stmt (dump_file, *stmtp, dump_flags);
1058 fprintf (dump_file, "\n");
1059 }
1060 }
1061 }
1062
1063 fini_object_sizes ();
1064 return 0;
1065 }
1066
1067 struct tree_opt_pass pass_object_sizes =
1068 {
1069 "objsz", /* name */
1070 NULL, /* gate */
1071 compute_object_sizes, /* execute */
1072 NULL, /* sub */
1073 NULL, /* next */
1074 0, /* static_pass_number */
1075 0, /* tv_id */
1076 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1077 0, /* properties_provided */
1078 0, /* properties_destroyed */
1079 0, /* todo_flags_start */
1080 TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
1081 0 /* letter */
1082 };