]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-object-size.c
2013-11-18 Richard Biener <rguenther@suse.de>
[thirdparty/gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "bitmap.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "gimple-ssa.h"
32 #include "tree-ssanames.h"
33 #include "tree-pass.h"
34 #include "tree-ssa-propagate.h"
35 #include "tree-phinodes.h"
36 #include "ssa-iterators.h"
37
38 struct object_size_info
39 {
40 int object_size_type;
41 bitmap visited, reexamine;
42 int pass;
43 bool changed;
44 unsigned int *depths;
45 unsigned int *stack, *tos;
46 };
47
48 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
49
50 static tree compute_object_offset (const_tree, const_tree);
51 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
52 const_tree, int);
53 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
54 static tree pass_through_call (const_gimple);
55 static void collect_object_sizes_for (struct object_size_info *, tree);
56 static void expr_object_size (struct object_size_info *, tree, tree);
57 static bool merge_object_sizes (struct object_size_info *, tree, tree,
58 unsigned HOST_WIDE_INT);
59 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
60 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
61 static unsigned int compute_object_sizes (void);
62 static void init_offset_limit (void);
63 static void check_for_plus_in_loops (struct object_size_info *, tree);
64 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
65 unsigned int);
66
67 /* object_sizes[0] is upper bound for number of bytes till the end of
68 the object.
69 object_sizes[1] is upper bound for number of bytes till the end of
70 the subobject (innermost array or field with address taken).
71 object_sizes[2] is lower bound for number of bytes till the end of
72 the object and object_sizes[3] lower bound for subobject. */
73 static unsigned HOST_WIDE_INT *object_sizes[4];
74
75 /* Bitmaps what object sizes have been computed already. */
76 static bitmap computed[4];
77
78 /* Maximum value of offset we consider to be addition. */
79 static unsigned HOST_WIDE_INT offset_limit;
80
81
82 /* Initialize OFFSET_LIMIT variable. */
83 static void
84 init_offset_limit (void)
85 {
86 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
87 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
88 else
89 offset_limit = -1;
90 offset_limit /= 2;
91 }
92
93
94 /* Compute offset of EXPR within VAR. Return error_mark_node
95 if unknown. */
96
97 static tree
98 compute_object_offset (const_tree expr, const_tree var)
99 {
100 enum tree_code code = PLUS_EXPR;
101 tree base, off, t;
102
103 if (expr == var)
104 return size_zero_node;
105
106 switch (TREE_CODE (expr))
107 {
108 case COMPONENT_REF:
109 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
110 if (base == error_mark_node)
111 return base;
112
113 t = TREE_OPERAND (expr, 1);
114 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
115 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
116 / BITS_PER_UNIT));
117 break;
118
119 case REALPART_EXPR:
120 CASE_CONVERT:
121 case VIEW_CONVERT_EXPR:
122 case NON_LVALUE_EXPR:
123 return compute_object_offset (TREE_OPERAND (expr, 0), var);
124
125 case IMAGPART_EXPR:
126 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
127 if (base == error_mark_node)
128 return base;
129
130 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
131 break;
132
133 case ARRAY_REF:
134 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
135 if (base == error_mark_node)
136 return base;
137
138 t = TREE_OPERAND (expr, 1);
139 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
140 {
141 code = MINUS_EXPR;
142 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
143 }
144 t = fold_convert (sizetype, t);
145 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
146 break;
147
148 case MEM_REF:
149 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
150 return double_int_to_tree (sizetype, mem_ref_offset (expr));
151
152 default:
153 return error_mark_node;
154 }
155
156 return size_binop (code, base, off);
157 }
158
159
160 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
161 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
162 If unknown, return unknown[object_size_type]. */
163
164 static unsigned HOST_WIDE_INT
165 addr_object_size (struct object_size_info *osi, const_tree ptr,
166 int object_size_type)
167 {
168 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
169
170 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
171
172 pt_var = TREE_OPERAND (ptr, 0);
173 while (handled_component_p (pt_var))
174 pt_var = TREE_OPERAND (pt_var, 0);
175
176 if (pt_var
177 && TREE_CODE (pt_var) == MEM_REF)
178 {
179 unsigned HOST_WIDE_INT sz;
180
181 if (!osi || (object_size_type & 1) != 0
182 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
183 {
184 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
185 object_size_type & ~1);
186 }
187 else
188 {
189 tree var = TREE_OPERAND (pt_var, 0);
190 if (osi->pass == 0)
191 collect_object_sizes_for (osi, var);
192 if (bitmap_bit_p (computed[object_size_type],
193 SSA_NAME_VERSION (var)))
194 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
195 else
196 sz = unknown[object_size_type];
197 }
198 if (sz != unknown[object_size_type])
199 {
200 double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
201 if (dsz.is_negative ())
202 sz = 0;
203 else if (dsz.fits_uhwi ())
204 sz = dsz.to_uhwi ();
205 else
206 sz = unknown[object_size_type];
207 }
208
209 if (sz != unknown[object_size_type] && sz < offset_limit)
210 pt_var_size = size_int (sz);
211 }
212 else if (pt_var
213 && DECL_P (pt_var)
214 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
215 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
216 pt_var_size = DECL_SIZE_UNIT (pt_var);
217 else if (pt_var
218 && TREE_CODE (pt_var) == STRING_CST
219 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
220 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
221 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
222 < offset_limit)
223 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
224 else
225 return unknown[object_size_type];
226
227 if (pt_var != TREE_OPERAND (ptr, 0))
228 {
229 tree var;
230
231 if (object_size_type & 1)
232 {
233 var = TREE_OPERAND (ptr, 0);
234
235 while (var != pt_var
236 && TREE_CODE (var) != BIT_FIELD_REF
237 && TREE_CODE (var) != COMPONENT_REF
238 && TREE_CODE (var) != ARRAY_REF
239 && TREE_CODE (var) != ARRAY_RANGE_REF
240 && TREE_CODE (var) != REALPART_EXPR
241 && TREE_CODE (var) != IMAGPART_EXPR)
242 var = TREE_OPERAND (var, 0);
243 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
244 var = TREE_OPERAND (var, 0);
245 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
246 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
247 || (pt_var_size
248 && tree_int_cst_lt (pt_var_size,
249 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
250 var = pt_var;
251 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
252 {
253 tree v = var;
254 /* For &X->fld, compute object size only if fld isn't the last
255 field, as struct { int i; char c[1]; } is often used instead
256 of flexible array member. */
257 while (v && v != pt_var)
258 switch (TREE_CODE (v))
259 {
260 case ARRAY_REF:
261 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
262 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
263 {
264 tree domain
265 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
266 if (domain
267 && TYPE_MAX_VALUE (domain)
268 && TREE_CODE (TYPE_MAX_VALUE (domain))
269 == INTEGER_CST
270 && tree_int_cst_lt (TREE_OPERAND (v, 1),
271 TYPE_MAX_VALUE (domain)))
272 {
273 v = NULL_TREE;
274 break;
275 }
276 }
277 v = TREE_OPERAND (v, 0);
278 break;
279 case REALPART_EXPR:
280 case IMAGPART_EXPR:
281 v = NULL_TREE;
282 break;
283 case COMPONENT_REF:
284 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
285 {
286 v = NULL_TREE;
287 break;
288 }
289 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
290 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
291 != UNION_TYPE
292 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
293 != QUAL_UNION_TYPE)
294 break;
295 else
296 v = TREE_OPERAND (v, 0);
297 if (TREE_CODE (v) == COMPONENT_REF
298 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
299 == RECORD_TYPE)
300 {
301 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
302 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
303 if (TREE_CODE (fld_chain) == FIELD_DECL)
304 break;
305
306 if (fld_chain)
307 {
308 v = NULL_TREE;
309 break;
310 }
311 v = TREE_OPERAND (v, 0);
312 }
313 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
314 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
315 != UNION_TYPE
316 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
317 != QUAL_UNION_TYPE)
318 break;
319 else
320 v = TREE_OPERAND (v, 0);
321 if (v != pt_var)
322 v = NULL_TREE;
323 else
324 v = pt_var;
325 break;
326 default:
327 v = pt_var;
328 break;
329 }
330 if (v == pt_var)
331 var = pt_var;
332 }
333 }
334 else
335 var = pt_var;
336
337 if (var != pt_var)
338 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
339 else if (!pt_var_size)
340 return unknown[object_size_type];
341 else
342 var_size = pt_var_size;
343 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
344 if (bytes != error_mark_node)
345 {
346 if (TREE_CODE (bytes) == INTEGER_CST
347 && tree_int_cst_lt (var_size, bytes))
348 bytes = size_zero_node;
349 else
350 bytes = size_binop (MINUS_EXPR, var_size, bytes);
351 }
352 if (var != pt_var
353 && pt_var_size
354 && TREE_CODE (pt_var) == MEM_REF
355 && bytes != error_mark_node)
356 {
357 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
358 if (bytes2 != error_mark_node)
359 {
360 if (TREE_CODE (bytes2) == INTEGER_CST
361 && tree_int_cst_lt (pt_var_size, bytes2))
362 bytes2 = size_zero_node;
363 else
364 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
365 bytes = size_binop (MIN_EXPR, bytes, bytes2);
366 }
367 }
368 }
369 else if (!pt_var_size)
370 return unknown[object_size_type];
371 else
372 bytes = pt_var_size;
373
374 if (tree_fits_uhwi_p (bytes))
375 return tree_to_uhwi (bytes);
376
377 return unknown[object_size_type];
378 }
379
380
381 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
382 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
383 argument from __builtin_object_size. If unknown, return
384 unknown[object_size_type]. */
385
386 static unsigned HOST_WIDE_INT
387 alloc_object_size (const_gimple call, int object_size_type)
388 {
389 tree callee, bytes = NULL_TREE;
390 tree alloc_size;
391 int arg1 = -1, arg2 = -1;
392
393 gcc_assert (is_gimple_call (call));
394
395 callee = gimple_call_fndecl (call);
396 if (!callee)
397 return unknown[object_size_type];
398
399 alloc_size = lookup_attribute ("alloc_size",
400 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
401 if (alloc_size && TREE_VALUE (alloc_size))
402 {
403 tree p = TREE_VALUE (alloc_size);
404
405 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
406 if (TREE_CHAIN (p))
407 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
408 }
409
410 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
411 switch (DECL_FUNCTION_CODE (callee))
412 {
413 case BUILT_IN_CALLOC:
414 arg2 = 1;
415 /* fall through */
416 case BUILT_IN_MALLOC:
417 case BUILT_IN_ALLOCA:
418 case BUILT_IN_ALLOCA_WITH_ALIGN:
419 arg1 = 0;
420 default:
421 break;
422 }
423
424 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
425 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
426 || (arg2 >= 0
427 && (arg2 >= (int)gimple_call_num_args (call)
428 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
429 return unknown[object_size_type];
430
431 if (arg2 >= 0)
432 bytes = size_binop (MULT_EXPR,
433 fold_convert (sizetype, gimple_call_arg (call, arg1)),
434 fold_convert (sizetype, gimple_call_arg (call, arg2)));
435 else if (arg1 >= 0)
436 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
437
438 if (bytes && tree_fits_uhwi_p (bytes))
439 return tree_to_uhwi (bytes);
440
441 return unknown[object_size_type];
442 }
443
444
445 /* If object size is propagated from one of function's arguments directly
446 to its return value, return that argument for GIMPLE_CALL statement CALL.
447 Otherwise return NULL. */
448
449 static tree
450 pass_through_call (const_gimple call)
451 {
452 tree callee = gimple_call_fndecl (call);
453
454 if (callee
455 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
456 switch (DECL_FUNCTION_CODE (callee))
457 {
458 case BUILT_IN_MEMCPY:
459 case BUILT_IN_MEMMOVE:
460 case BUILT_IN_MEMSET:
461 case BUILT_IN_STRCPY:
462 case BUILT_IN_STRNCPY:
463 case BUILT_IN_STRCAT:
464 case BUILT_IN_STRNCAT:
465 case BUILT_IN_MEMCPY_CHK:
466 case BUILT_IN_MEMMOVE_CHK:
467 case BUILT_IN_MEMSET_CHK:
468 case BUILT_IN_STRCPY_CHK:
469 case BUILT_IN_STRNCPY_CHK:
470 case BUILT_IN_STPNCPY_CHK:
471 case BUILT_IN_STRCAT_CHK:
472 case BUILT_IN_STRNCAT_CHK:
473 case BUILT_IN_ASSUME_ALIGNED:
474 if (gimple_call_num_args (call) >= 1)
475 return gimple_call_arg (call, 0);
476 break;
477 default:
478 break;
479 }
480
481 return NULL_TREE;
482 }
483
484
485 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
486 second argument from __builtin_object_size. */
487
488 unsigned HOST_WIDE_INT
489 compute_builtin_object_size (tree ptr, int object_size_type)
490 {
491 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
492
493 if (! offset_limit)
494 init_offset_limit ();
495
496 if (TREE_CODE (ptr) == ADDR_EXPR)
497 return addr_object_size (NULL, ptr, object_size_type);
498
499 if (TREE_CODE (ptr) == SSA_NAME
500 && POINTER_TYPE_P (TREE_TYPE (ptr))
501 && object_sizes[object_size_type] != NULL)
502 {
503 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
504 {
505 struct object_size_info osi;
506 bitmap_iterator bi;
507 unsigned int i;
508
509 if (dump_file)
510 {
511 fprintf (dump_file, "Computing %s %sobject size for ",
512 (object_size_type & 2) ? "minimum" : "maximum",
513 (object_size_type & 1) ? "sub" : "");
514 print_generic_expr (dump_file, ptr, dump_flags);
515 fprintf (dump_file, ":\n");
516 }
517
518 osi.visited = BITMAP_ALLOC (NULL);
519 osi.reexamine = BITMAP_ALLOC (NULL);
520 osi.object_size_type = object_size_type;
521 osi.depths = NULL;
522 osi.stack = NULL;
523 osi.tos = NULL;
524
525 /* First pass: walk UD chains, compute object sizes that
526 can be computed. osi.reexamine bitmap at the end will
527 contain what variables were found in dependency cycles
528 and therefore need to be reexamined. */
529 osi.pass = 0;
530 osi.changed = false;
531 collect_object_sizes_for (&osi, ptr);
532
533 /* Second pass: keep recomputing object sizes of variables
534 that need reexamination, until no object sizes are
535 increased or all object sizes are computed. */
536 if (! bitmap_empty_p (osi.reexamine))
537 {
538 bitmap reexamine = BITMAP_ALLOC (NULL);
539
540 /* If looking for minimum instead of maximum object size,
541 detect cases where a pointer is increased in a loop.
542 Although even without this detection pass 2 would eventually
543 terminate, it could take a long time. If a pointer is
544 increasing this way, we need to assume 0 object size.
545 E.g. p = &buf[0]; while (cond) p = p + 4; */
546 if (object_size_type & 2)
547 {
548 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
549 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
550 osi.tos = osi.stack;
551 osi.pass = 1;
552 /* collect_object_sizes_for is changing
553 osi.reexamine bitmap, so iterate over a copy. */
554 bitmap_copy (reexamine, osi.reexamine);
555 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
556 if (bitmap_bit_p (osi.reexamine, i))
557 check_for_plus_in_loops (&osi, ssa_name (i));
558
559 free (osi.depths);
560 osi.depths = NULL;
561 free (osi.stack);
562 osi.stack = NULL;
563 osi.tos = NULL;
564 }
565
566 do
567 {
568 osi.pass = 2;
569 osi.changed = false;
570 /* collect_object_sizes_for is changing
571 osi.reexamine bitmap, so iterate over a copy. */
572 bitmap_copy (reexamine, osi.reexamine);
573 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
574 if (bitmap_bit_p (osi.reexamine, i))
575 {
576 collect_object_sizes_for (&osi, ssa_name (i));
577 if (dump_file && (dump_flags & TDF_DETAILS))
578 {
579 fprintf (dump_file, "Reexamining ");
580 print_generic_expr (dump_file, ssa_name (i),
581 dump_flags);
582 fprintf (dump_file, "\n");
583 }
584 }
585 }
586 while (osi.changed);
587
588 BITMAP_FREE (reexamine);
589 }
590 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
591 bitmap_set_bit (computed[object_size_type], i);
592
593 /* Debugging dumps. */
594 if (dump_file)
595 {
596 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
597 if (object_sizes[object_size_type][i]
598 != unknown[object_size_type])
599 {
600 print_generic_expr (dump_file, ssa_name (i),
601 dump_flags);
602 fprintf (dump_file,
603 ": %s %sobject size "
604 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
605 (object_size_type & 2) ? "minimum" : "maximum",
606 (object_size_type & 1) ? "sub" : "",
607 object_sizes[object_size_type][i]);
608 }
609 }
610
611 BITMAP_FREE (osi.reexamine);
612 BITMAP_FREE (osi.visited);
613 }
614
615 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
616 }
617
618 return unknown[object_size_type];
619 }
620
621 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
622
623 static void
624 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
625 {
626 int object_size_type = osi->object_size_type;
627 unsigned int varno = SSA_NAME_VERSION (ptr);
628 unsigned HOST_WIDE_INT bytes;
629
630 gcc_assert (object_sizes[object_size_type][varno]
631 != unknown[object_size_type]);
632 gcc_assert (osi->pass == 0);
633
634 if (TREE_CODE (value) == WITH_SIZE_EXPR)
635 value = TREE_OPERAND (value, 0);
636
637 /* Pointer variables should have been handled by merge_object_sizes. */
638 gcc_assert (TREE_CODE (value) != SSA_NAME
639 || !POINTER_TYPE_P (TREE_TYPE (value)));
640
641 if (TREE_CODE (value) == ADDR_EXPR)
642 bytes = addr_object_size (osi, value, object_size_type);
643 else
644 bytes = unknown[object_size_type];
645
646 if ((object_size_type & 2) == 0)
647 {
648 if (object_sizes[object_size_type][varno] < bytes)
649 object_sizes[object_size_type][varno] = bytes;
650 }
651 else
652 {
653 if (object_sizes[object_size_type][varno] > bytes)
654 object_sizes[object_size_type][varno] = bytes;
655 }
656 }
657
658
659 /* Compute object_sizes for PTR, defined to the result of a call. */
660
661 static void
662 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
663 {
664 int object_size_type = osi->object_size_type;
665 unsigned int varno = SSA_NAME_VERSION (ptr);
666 unsigned HOST_WIDE_INT bytes;
667
668 gcc_assert (is_gimple_call (call));
669
670 gcc_assert (object_sizes[object_size_type][varno]
671 != unknown[object_size_type]);
672 gcc_assert (osi->pass == 0);
673
674 bytes = alloc_object_size (call, object_size_type);
675
676 if ((object_size_type & 2) == 0)
677 {
678 if (object_sizes[object_size_type][varno] < bytes)
679 object_sizes[object_size_type][varno] = bytes;
680 }
681 else
682 {
683 if (object_sizes[object_size_type][varno] > bytes)
684 object_sizes[object_size_type][varno] = bytes;
685 }
686 }
687
688
689 /* Compute object_sizes for PTR, defined to an unknown value. */
690
691 static void
692 unknown_object_size (struct object_size_info *osi, tree ptr)
693 {
694 int object_size_type = osi->object_size_type;
695 unsigned int varno = SSA_NAME_VERSION (ptr);
696 unsigned HOST_WIDE_INT bytes;
697
698 gcc_assert (object_sizes[object_size_type][varno]
699 != unknown[object_size_type]);
700 gcc_assert (osi->pass == 0);
701
702 bytes = unknown[object_size_type];
703
704 if ((object_size_type & 2) == 0)
705 {
706 if (object_sizes[object_size_type][varno] < bytes)
707 object_sizes[object_size_type][varno] = bytes;
708 }
709 else
710 {
711 if (object_sizes[object_size_type][varno] > bytes)
712 object_sizes[object_size_type][varno] = bytes;
713 }
714 }
715
716
717 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
718 the object size might need reexamination later. */
719
720 static bool
721 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
722 unsigned HOST_WIDE_INT offset)
723 {
724 int object_size_type = osi->object_size_type;
725 unsigned int varno = SSA_NAME_VERSION (dest);
726 unsigned HOST_WIDE_INT orig_bytes;
727
728 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
729 return false;
730 if (offset >= offset_limit)
731 {
732 object_sizes[object_size_type][varno] = unknown[object_size_type];
733 return false;
734 }
735
736 if (osi->pass == 0)
737 collect_object_sizes_for (osi, orig);
738
739 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
740 if (orig_bytes != unknown[object_size_type])
741 orig_bytes = (offset > orig_bytes)
742 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
743
744 if ((object_size_type & 2) == 0)
745 {
746 if (object_sizes[object_size_type][varno] < orig_bytes)
747 {
748 object_sizes[object_size_type][varno] = orig_bytes;
749 osi->changed = true;
750 }
751 }
752 else
753 {
754 if (object_sizes[object_size_type][varno] > orig_bytes)
755 {
756 object_sizes[object_size_type][varno] = orig_bytes;
757 osi->changed = true;
758 }
759 }
760 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
761 }
762
763
764 /* Compute object_sizes for VAR, defined to the result of an assignment
765 with operator POINTER_PLUS_EXPR. Return true if the object size might
766 need reexamination later. */
767
768 static bool
769 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
770 {
771 int object_size_type = osi->object_size_type;
772 unsigned int varno = SSA_NAME_VERSION (var);
773 unsigned HOST_WIDE_INT bytes;
774 tree op0, op1;
775
776 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
777 {
778 op0 = gimple_assign_rhs1 (stmt);
779 op1 = gimple_assign_rhs2 (stmt);
780 }
781 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
782 {
783 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
784 gcc_assert (TREE_CODE (rhs) == MEM_REF);
785 op0 = TREE_OPERAND (rhs, 0);
786 op1 = TREE_OPERAND (rhs, 1);
787 }
788 else
789 gcc_unreachable ();
790
791 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
792 return false;
793
794 /* Handle PTR + OFFSET here. */
795 if (TREE_CODE (op1) == INTEGER_CST
796 && (TREE_CODE (op0) == SSA_NAME
797 || TREE_CODE (op0) == ADDR_EXPR))
798 {
799 if (! tree_fits_uhwi_p (op1))
800 bytes = unknown[object_size_type];
801 else if (TREE_CODE (op0) == SSA_NAME)
802 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
803 else
804 {
805 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
806
807 /* op0 will be ADDR_EXPR here. */
808 bytes = addr_object_size (osi, op0, object_size_type);
809 if (bytes == unknown[object_size_type])
810 ;
811 else if (off > offset_limit)
812 bytes = unknown[object_size_type];
813 else if (off > bytes)
814 bytes = 0;
815 else
816 bytes -= off;
817 }
818 }
819 else
820 bytes = unknown[object_size_type];
821
822 if ((object_size_type & 2) == 0)
823 {
824 if (object_sizes[object_size_type][varno] < bytes)
825 object_sizes[object_size_type][varno] = bytes;
826 }
827 else
828 {
829 if (object_sizes[object_size_type][varno] > bytes)
830 object_sizes[object_size_type][varno] = bytes;
831 }
832 return false;
833 }
834
835
836 /* Compute object_sizes for VAR, defined at STMT, which is
837 a COND_EXPR. Return true if the object size might need reexamination
838 later. */
839
840 static bool
841 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
842 {
843 tree then_, else_;
844 int object_size_type = osi->object_size_type;
845 unsigned int varno = SSA_NAME_VERSION (var);
846 bool reexamine = false;
847
848 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
849
850 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
851 return false;
852
853 then_ = gimple_assign_rhs2 (stmt);
854 else_ = gimple_assign_rhs3 (stmt);
855
856 if (TREE_CODE (then_) == SSA_NAME)
857 reexamine |= merge_object_sizes (osi, var, then_, 0);
858 else
859 expr_object_size (osi, var, then_);
860
861 if (TREE_CODE (else_) == SSA_NAME)
862 reexamine |= merge_object_sizes (osi, var, else_, 0);
863 else
864 expr_object_size (osi, var, else_);
865
866 return reexamine;
867 }
868
869 /* Compute object sizes for VAR.
870 For ADDR_EXPR an object size is the number of remaining bytes
871 to the end of the object (where what is considered an object depends on
872 OSI->object_size_type).
873 For allocation GIMPLE_CALL like malloc or calloc object size is the size
874 of the allocation.
875 For POINTER_PLUS_EXPR where second operand is a constant integer,
876 object size is object size of the first operand minus the constant.
877 If the constant is bigger than the number of remaining bytes until the
878 end of the object, object size is 0, but if it is instead a pointer
879 subtraction, object size is unknown[object_size_type].
880 To differentiate addition from subtraction, ADDR_EXPR returns
881 unknown[object_size_type] for all objects bigger than half of the address
882 space, and constants less than half of the address space are considered
883 addition, while bigger constants subtraction.
884 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
885 object size is object size of that argument.
886 Otherwise, object size is the maximum of object sizes of variables
887 that it might be set to. */
888
889 static void
890 collect_object_sizes_for (struct object_size_info *osi, tree var)
891 {
892 int object_size_type = osi->object_size_type;
893 unsigned int varno = SSA_NAME_VERSION (var);
894 gimple stmt;
895 bool reexamine;
896
897 if (bitmap_bit_p (computed[object_size_type], varno))
898 return;
899
900 if (osi->pass == 0)
901 {
902 if (bitmap_set_bit (osi->visited, varno))
903 {
904 object_sizes[object_size_type][varno]
905 = (object_size_type & 2) ? -1 : 0;
906 }
907 else
908 {
909 /* Found a dependency loop. Mark the variable for later
910 re-examination. */
911 bitmap_set_bit (osi->reexamine, varno);
912 if (dump_file && (dump_flags & TDF_DETAILS))
913 {
914 fprintf (dump_file, "Found a dependency loop at ");
915 print_generic_expr (dump_file, var, dump_flags);
916 fprintf (dump_file, "\n");
917 }
918 return;
919 }
920 }
921
922 if (dump_file && (dump_flags & TDF_DETAILS))
923 {
924 fprintf (dump_file, "Visiting use-def links for ");
925 print_generic_expr (dump_file, var, dump_flags);
926 fprintf (dump_file, "\n");
927 }
928
929 stmt = SSA_NAME_DEF_STMT (var);
930 reexamine = false;
931
932 switch (gimple_code (stmt))
933 {
934 case GIMPLE_ASSIGN:
935 {
936 tree rhs = gimple_assign_rhs1 (stmt);
937 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
938 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
939 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
940 reexamine = plus_stmt_object_size (osi, var, stmt);
941 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
942 reexamine = cond_expr_object_size (osi, var, stmt);
943 else if (gimple_assign_single_p (stmt)
944 || gimple_assign_unary_nop_p (stmt))
945 {
946 if (TREE_CODE (rhs) == SSA_NAME
947 && POINTER_TYPE_P (TREE_TYPE (rhs)))
948 reexamine = merge_object_sizes (osi, var, rhs, 0);
949 else
950 expr_object_size (osi, var, rhs);
951 }
952 else
953 unknown_object_size (osi, var);
954 break;
955 }
956
957 case GIMPLE_CALL:
958 {
959 tree arg = pass_through_call (stmt);
960 if (arg)
961 {
962 if (TREE_CODE (arg) == SSA_NAME
963 && POINTER_TYPE_P (TREE_TYPE (arg)))
964 reexamine = merge_object_sizes (osi, var, arg, 0);
965 else
966 expr_object_size (osi, var, arg);
967 }
968 else
969 call_object_size (osi, var, stmt);
970 break;
971 }
972
973 case GIMPLE_ASM:
974 /* Pointers defined by __asm__ statements can point anywhere. */
975 object_sizes[object_size_type][varno] = unknown[object_size_type];
976 break;
977
978 case GIMPLE_NOP:
979 if (SSA_NAME_VAR (var)
980 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
981 expr_object_size (osi, var, SSA_NAME_VAR (var));
982 else
983 /* Uninitialized SSA names point nowhere. */
984 object_sizes[object_size_type][varno] = unknown[object_size_type];
985 break;
986
987 case GIMPLE_PHI:
988 {
989 unsigned i;
990
991 for (i = 0; i < gimple_phi_num_args (stmt); i++)
992 {
993 tree rhs = gimple_phi_arg (stmt, i)->def;
994
995 if (object_sizes[object_size_type][varno]
996 == unknown[object_size_type])
997 break;
998
999 if (TREE_CODE (rhs) == SSA_NAME)
1000 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1001 else if (osi->pass == 0)
1002 expr_object_size (osi, var, rhs);
1003 }
1004 break;
1005 }
1006
1007 default:
1008 gcc_unreachable ();
1009 }
1010
1011 if (! reexamine
1012 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1013 {
1014 bitmap_set_bit (computed[object_size_type], varno);
1015 bitmap_clear_bit (osi->reexamine, varno);
1016 }
1017 else
1018 {
1019 bitmap_set_bit (osi->reexamine, varno);
1020 if (dump_file && (dump_flags & TDF_DETAILS))
1021 {
1022 fprintf (dump_file, "Need to reexamine ");
1023 print_generic_expr (dump_file, var, dump_flags);
1024 fprintf (dump_file, "\n");
1025 }
1026 }
1027 }
1028
1029
1030 /* Helper function for check_for_plus_in_loops. Called recursively
1031 to detect loops. */
1032
1033 static void
1034 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1035 unsigned int depth)
1036 {
1037 gimple stmt = SSA_NAME_DEF_STMT (var);
1038 unsigned int varno = SSA_NAME_VERSION (var);
1039
1040 if (osi->depths[varno])
1041 {
1042 if (osi->depths[varno] != depth)
1043 {
1044 unsigned int *sp;
1045
1046 /* Found a loop involving pointer addition. */
1047 for (sp = osi->tos; sp > osi->stack; )
1048 {
1049 --sp;
1050 bitmap_clear_bit (osi->reexamine, *sp);
1051 bitmap_set_bit (computed[osi->object_size_type], *sp);
1052 object_sizes[osi->object_size_type][*sp] = 0;
1053 if (*sp == varno)
1054 break;
1055 }
1056 }
1057 return;
1058 }
1059 else if (! bitmap_bit_p (osi->reexamine, varno))
1060 return;
1061
1062 osi->depths[varno] = depth;
1063 *osi->tos++ = varno;
1064
1065 switch (gimple_code (stmt))
1066 {
1067
1068 case GIMPLE_ASSIGN:
1069 {
1070 if ((gimple_assign_single_p (stmt)
1071 || gimple_assign_unary_nop_p (stmt))
1072 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1073 {
1074 tree rhs = gimple_assign_rhs1 (stmt);
1075
1076 check_for_plus_in_loops_1 (osi, rhs, depth);
1077 }
1078 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1079 {
1080 tree basevar = gimple_assign_rhs1 (stmt);
1081 tree cst = gimple_assign_rhs2 (stmt);
1082
1083 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1084
1085 check_for_plus_in_loops_1 (osi, basevar,
1086 depth + !integer_zerop (cst));
1087 }
1088 else
1089 gcc_unreachable ();
1090 break;
1091 }
1092
1093 case GIMPLE_CALL:
1094 {
1095 tree arg = pass_through_call (stmt);
1096 if (arg)
1097 {
1098 if (TREE_CODE (arg) == SSA_NAME)
1099 check_for_plus_in_loops_1 (osi, arg, depth);
1100 else
1101 gcc_unreachable ();
1102 }
1103 break;
1104 }
1105
1106 case GIMPLE_PHI:
1107 {
1108 unsigned i;
1109
1110 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1111 {
1112 tree rhs = gimple_phi_arg (stmt, i)->def;
1113
1114 if (TREE_CODE (rhs) == SSA_NAME)
1115 check_for_plus_in_loops_1 (osi, rhs, depth);
1116 }
1117 break;
1118 }
1119
1120 default:
1121 gcc_unreachable ();
1122 }
1123
1124 osi->depths[varno] = 0;
1125 osi->tos--;
1126 }
1127
1128
1129 /* Check if some pointer we are computing object size of is being increased
1130 within a loop. If yes, assume all the SSA variables participating in
1131 that loop have minimum object sizes 0. */
1132
1133 static void
1134 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1135 {
1136 gimple stmt = SSA_NAME_DEF_STMT (var);
1137
1138 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1139 and looked for a POINTER_PLUS_EXPR in the pass-through
1140 argument, if any. In GIMPLE, however, such an expression
1141 is not a valid call operand. */
1142
1143 if (is_gimple_assign (stmt)
1144 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1145 {
1146 tree basevar = gimple_assign_rhs1 (stmt);
1147 tree cst = gimple_assign_rhs2 (stmt);
1148
1149 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1150
1151 if (integer_zerop (cst))
1152 return;
1153
1154 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1155 *osi->tos++ = SSA_NAME_VERSION (basevar);
1156 check_for_plus_in_loops_1 (osi, var, 2);
1157 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1158 osi->tos--;
1159 }
1160 }
1161
1162
1163 /* Initialize data structures for the object size computation. */
1164
1165 void
1166 init_object_sizes (void)
1167 {
1168 int object_size_type;
1169
1170 if (object_sizes[0])
1171 return;
1172
1173 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1174 {
1175 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1176 computed[object_size_type] = BITMAP_ALLOC (NULL);
1177 }
1178
1179 init_offset_limit ();
1180 }
1181
1182
1183 /* Destroy data structures after the object size computation. */
1184
1185 static void
1186 fini_object_sizes (void)
1187 {
1188 int object_size_type;
1189
1190 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1191 {
1192 free (object_sizes[object_size_type]);
1193 BITMAP_FREE (computed[object_size_type]);
1194 object_sizes[object_size_type] = NULL;
1195 }
1196 }
1197
1198
1199 /* Simple pass to optimize all __builtin_object_size () builtins. */
1200
1201 static unsigned int
1202 compute_object_sizes (void)
1203 {
1204 basic_block bb;
1205 FOR_EACH_BB (bb)
1206 {
1207 gimple_stmt_iterator i;
1208 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1209 {
1210 tree result;
1211 gimple call = gsi_stmt (i);
1212 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1213 continue;
1214
1215 init_object_sizes ();
1216 result = fold_call_stmt (call, false);
1217 if (!result)
1218 {
1219 if (gimple_call_num_args (call) == 2
1220 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1221 {
1222 tree ost = gimple_call_arg (call, 1);
1223
1224 if (tree_fits_uhwi_p (ost))
1225 {
1226 unsigned HOST_WIDE_INT object_size_type
1227 = tree_to_uhwi (ost);
1228
1229 if (object_size_type < 2)
1230 result = fold_convert (size_type_node,
1231 integer_minus_one_node);
1232 else if (object_size_type < 4)
1233 result = build_zero_cst (size_type_node);
1234 }
1235 }
1236
1237 if (!result)
1238 continue;
1239 }
1240
1241 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1242
1243 if (dump_file && (dump_flags & TDF_DETAILS))
1244 {
1245 fprintf (dump_file, "Simplified\n ");
1246 print_gimple_stmt (dump_file, call, 0, dump_flags);
1247 fprintf (dump_file, " to ");
1248 print_generic_expr (dump_file, result, 0);
1249 fprintf (dump_file, "\n");
1250 }
1251
1252 tree lhs = gimple_call_lhs (call);
1253 if (!lhs)
1254 continue;
1255
1256 /* Propagate into all uses and fold those stmts. */
1257 gimple use_stmt;
1258 imm_use_iterator iter;
1259 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
1260 {
1261 use_operand_p use_p;
1262 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1263 SET_USE (use_p, result);
1264 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1265 fold_stmt (&gsi);
1266 update_stmt (gsi_stmt (gsi));
1267 }
1268 }
1269 }
1270
1271 fini_object_sizes ();
1272 return 0;
1273 }
1274
1275 namespace {
1276
1277 const pass_data pass_data_object_sizes =
1278 {
1279 GIMPLE_PASS, /* type */
1280 "objsz", /* name */
1281 OPTGROUP_NONE, /* optinfo_flags */
1282 false, /* has_gate */
1283 true, /* has_execute */
1284 TV_NONE, /* tv_id */
1285 ( PROP_cfg | PROP_ssa ), /* properties_required */
1286 0, /* properties_provided */
1287 0, /* properties_destroyed */
1288 0, /* todo_flags_start */
1289 TODO_verify_ssa, /* todo_flags_finish */
1290 };
1291
1292 class pass_object_sizes : public gimple_opt_pass
1293 {
1294 public:
1295 pass_object_sizes (gcc::context *ctxt)
1296 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1297 {}
1298
1299 /* opt_pass methods: */
1300 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1301 unsigned int execute () { return compute_object_sizes (); }
1302
1303 }; // class pass_object_sizes
1304
1305 } // anon namespace
1306
1307 gimple_opt_pass *
1308 make_pass_object_sizes (gcc::context *ctxt)
1309 {
1310 return new pass_object_sizes (ctxt);
1311 }