]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-object-size.c
- Fix comment typos that I'd introducted.
[thirdparty/gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-ssa.h"
29 #include "tree-pass.h"
30 #include "tree-ssa-propagate.h"
31
32 struct object_size_info
33 {
34 int object_size_type;
35 bitmap visited, reexamine;
36 int pass;
37 bool changed;
38 unsigned int *depths;
39 unsigned int *stack, *tos;
40 };
41
42 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
43
44 static tree compute_object_offset (const_tree, const_tree);
45 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
46 const_tree, int);
47 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
48 static tree pass_through_call (const_gimple);
49 static void collect_object_sizes_for (struct object_size_info *, tree);
50 static void expr_object_size (struct object_size_info *, tree, tree);
51 static bool merge_object_sizes (struct object_size_info *, tree, tree,
52 unsigned HOST_WIDE_INT);
53 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
54 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
55 static unsigned int compute_object_sizes (void);
56 static void init_offset_limit (void);
57 static void check_for_plus_in_loops (struct object_size_info *, tree);
58 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
59 unsigned int);
60
61 /* object_sizes[0] is upper bound for number of bytes till the end of
62 the object.
63 object_sizes[1] is upper bound for number of bytes till the end of
64 the subobject (innermost array or field with address taken).
65 object_sizes[2] is lower bound for number of bytes till the end of
66 the object and object_sizes[3] lower bound for subobject. */
67 static unsigned HOST_WIDE_INT *object_sizes[4];
68
69 /* Bitmaps what object sizes have been computed already. */
70 static bitmap computed[4];
71
72 /* Maximum value of offset we consider to be addition. */
73 static unsigned HOST_WIDE_INT offset_limit;
74
75
76 /* Initialize OFFSET_LIMIT variable. */
77 static void
78 init_offset_limit (void)
79 {
80 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
81 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
82 else
83 offset_limit = -1;
84 offset_limit /= 2;
85 }
86
87
88 /* Compute offset of EXPR within VAR. Return error_mark_node
89 if unknown. */
90
91 static tree
92 compute_object_offset (const_tree expr, const_tree var)
93 {
94 enum tree_code code = PLUS_EXPR;
95 tree base, off, t;
96
97 if (expr == var)
98 return size_zero_node;
99
100 switch (TREE_CODE (expr))
101 {
102 case COMPONENT_REF:
103 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
104 if (base == error_mark_node)
105 return base;
106
107 t = TREE_OPERAND (expr, 1);
108 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
109 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
110 / BITS_PER_UNIT));
111 break;
112
113 case REALPART_EXPR:
114 CASE_CONVERT:
115 case VIEW_CONVERT_EXPR:
116 case NON_LVALUE_EXPR:
117 return compute_object_offset (TREE_OPERAND (expr, 0), var);
118
119 case IMAGPART_EXPR:
120 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
121 if (base == error_mark_node)
122 return base;
123
124 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
125 break;
126
127 case ARRAY_REF:
128 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
129 if (base == error_mark_node)
130 return base;
131
132 t = TREE_OPERAND (expr, 1);
133 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
134 {
135 code = MINUS_EXPR;
136 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
137 }
138 t = fold_convert (sizetype, t);
139 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
140 break;
141
142 case MEM_REF:
143 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
144 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
145
146 default:
147 return error_mark_node;
148 }
149
150 return size_binop (code, base, off);
151 }
152
153
154 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
155 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
156 If unknown, return unknown[object_size_type]. */
157
158 static unsigned HOST_WIDE_INT
159 addr_object_size (struct object_size_info *osi, const_tree ptr,
160 int object_size_type)
161 {
162 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
163
164 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
165
166 pt_var = TREE_OPERAND (ptr, 0);
167 while (handled_component_p (pt_var))
168 pt_var = TREE_OPERAND (pt_var, 0);
169
170 if (pt_var
171 && TREE_CODE (pt_var) == MEM_REF)
172 {
173 unsigned HOST_WIDE_INT sz;
174
175 if (!osi || (object_size_type & 1) != 0
176 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
177 {
178 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
179 object_size_type & ~1);
180 }
181 else
182 {
183 tree var = TREE_OPERAND (pt_var, 0);
184 if (osi->pass == 0)
185 collect_object_sizes_for (osi, var);
186 if (bitmap_bit_p (computed[object_size_type],
187 SSA_NAME_VERSION (var)))
188 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
189 else
190 sz = unknown[object_size_type];
191 }
192 if (sz != unknown[object_size_type])
193 {
194 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
195 if (wi::neg_p (dsz))
196 sz = 0;
197 else if (wi::fits_uhwi_p (dsz))
198 sz = dsz.to_uhwi ();
199 else
200 sz = unknown[object_size_type];
201 }
202
203 if (sz != unknown[object_size_type] && sz < offset_limit)
204 pt_var_size = size_int (sz);
205 }
206 else if (pt_var
207 && DECL_P (pt_var)
208 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
209 && (unsigned HOST_WIDE_INT)
210 tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
211 pt_var_size = DECL_SIZE_UNIT (pt_var);
212 else if (pt_var
213 && TREE_CODE (pt_var) == STRING_CST
214 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
215 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
216 && (unsigned HOST_WIDE_INT)
217 tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
218 < offset_limit)
219 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
220 else
221 return unknown[object_size_type];
222
223 if (pt_var != TREE_OPERAND (ptr, 0))
224 {
225 tree var;
226
227 if (object_size_type & 1)
228 {
229 var = TREE_OPERAND (ptr, 0);
230
231 while (var != pt_var
232 && TREE_CODE (var) != BIT_FIELD_REF
233 && TREE_CODE (var) != COMPONENT_REF
234 && TREE_CODE (var) != ARRAY_REF
235 && TREE_CODE (var) != ARRAY_RANGE_REF
236 && TREE_CODE (var) != REALPART_EXPR
237 && TREE_CODE (var) != IMAGPART_EXPR)
238 var = TREE_OPERAND (var, 0);
239 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
240 var = TREE_OPERAND (var, 0);
241 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
242 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
243 || (pt_var_size
244 && tree_int_cst_lt (pt_var_size,
245 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
246 var = pt_var;
247 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
248 {
249 tree v = var;
250 /* For &X->fld, compute object size only if fld isn't the last
251 field, as struct { int i; char c[1]; } is often used instead
252 of flexible array member. */
253 while (v && v != pt_var)
254 switch (TREE_CODE (v))
255 {
256 case ARRAY_REF:
257 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
258 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
259 {
260 tree domain
261 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
262 if (domain
263 && TYPE_MAX_VALUE (domain)
264 && TREE_CODE (TYPE_MAX_VALUE (domain))
265 == INTEGER_CST
266 && tree_int_cst_lt (TREE_OPERAND (v, 1),
267 TYPE_MAX_VALUE (domain)))
268 {
269 v = NULL_TREE;
270 break;
271 }
272 }
273 v = TREE_OPERAND (v, 0);
274 break;
275 case REALPART_EXPR:
276 case IMAGPART_EXPR:
277 v = NULL_TREE;
278 break;
279 case COMPONENT_REF:
280 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
281 {
282 v = NULL_TREE;
283 break;
284 }
285 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
286 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
287 != UNION_TYPE
288 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
289 != QUAL_UNION_TYPE)
290 break;
291 else
292 v = TREE_OPERAND (v, 0);
293 if (TREE_CODE (v) == COMPONENT_REF
294 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
295 == RECORD_TYPE)
296 {
297 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
298 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
299 if (TREE_CODE (fld_chain) == FIELD_DECL)
300 break;
301
302 if (fld_chain)
303 {
304 v = NULL_TREE;
305 break;
306 }
307 v = TREE_OPERAND (v, 0);
308 }
309 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
310 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
311 != UNION_TYPE
312 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
313 != QUAL_UNION_TYPE)
314 break;
315 else
316 v = TREE_OPERAND (v, 0);
317 if (v != pt_var)
318 v = NULL_TREE;
319 else
320 v = pt_var;
321 break;
322 default:
323 v = pt_var;
324 break;
325 }
326 if (v == pt_var)
327 var = pt_var;
328 }
329 }
330 else
331 var = pt_var;
332
333 if (var != pt_var)
334 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
335 else if (!pt_var_size)
336 return unknown[object_size_type];
337 else
338 var_size = pt_var_size;
339 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
340 if (bytes != error_mark_node)
341 {
342 if (TREE_CODE (bytes) == INTEGER_CST
343 && tree_int_cst_lt (var_size, bytes))
344 bytes = size_zero_node;
345 else
346 bytes = size_binop (MINUS_EXPR, var_size, bytes);
347 }
348 if (var != pt_var
349 && pt_var_size
350 && TREE_CODE (pt_var) == MEM_REF
351 && bytes != error_mark_node)
352 {
353 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
354 if (bytes2 != error_mark_node)
355 {
356 if (TREE_CODE (bytes2) == INTEGER_CST
357 && tree_int_cst_lt (pt_var_size, bytes2))
358 bytes2 = size_zero_node;
359 else
360 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
361 bytes = size_binop (MIN_EXPR, bytes, bytes2);
362 }
363 }
364 }
365 else if (!pt_var_size)
366 return unknown[object_size_type];
367 else
368 bytes = pt_var_size;
369
370 if (tree_fits_uhwi_p (bytes))
371 return tree_to_uhwi (bytes);
372
373 return unknown[object_size_type];
374 }
375
376
377 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
378 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
379 argument from __builtin_object_size. If unknown, return
380 unknown[object_size_type]. */
381
382 static unsigned HOST_WIDE_INT
383 alloc_object_size (const_gimple call, int object_size_type)
384 {
385 tree callee, bytes = NULL_TREE;
386 tree alloc_size;
387 int arg1 = -1, arg2 = -1;
388
389 gcc_assert (is_gimple_call (call));
390
391 callee = gimple_call_fndecl (call);
392 if (!callee)
393 return unknown[object_size_type];
394
395 alloc_size = lookup_attribute ("alloc_size",
396 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
397 if (alloc_size && TREE_VALUE (alloc_size))
398 {
399 tree p = TREE_VALUE (alloc_size);
400
401 arg1 = tree_to_hwi (TREE_VALUE (p))-1;
402 if (TREE_CHAIN (p))
403 arg2 = tree_to_hwi (TREE_VALUE (TREE_CHAIN (p)))-1;
404 }
405
406 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
407 switch (DECL_FUNCTION_CODE (callee))
408 {
409 case BUILT_IN_CALLOC:
410 arg2 = 1;
411 /* fall through */
412 case BUILT_IN_MALLOC:
413 case BUILT_IN_ALLOCA:
414 case BUILT_IN_ALLOCA_WITH_ALIGN:
415 arg1 = 0;
416 default:
417 break;
418 }
419
420 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
421 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
422 || (arg2 >= 0
423 && (arg2 >= (int)gimple_call_num_args (call)
424 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
425 return unknown[object_size_type];
426
427 if (arg2 >= 0)
428 bytes = size_binop (MULT_EXPR,
429 fold_convert (sizetype, gimple_call_arg (call, arg1)),
430 fold_convert (sizetype, gimple_call_arg (call, arg2)));
431 else if (arg1 >= 0)
432 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
433
434 if (bytes && tree_fits_uhwi_p (bytes))
435 return tree_to_uhwi (bytes);
436
437 return unknown[object_size_type];
438 }
439
440
441 /* If object size is propagated from one of function's arguments directly
442 to its return value, return that argument for GIMPLE_CALL statement CALL.
443 Otherwise return NULL. */
444
445 static tree
446 pass_through_call (const_gimple call)
447 {
448 tree callee = gimple_call_fndecl (call);
449
450 if (callee
451 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
452 switch (DECL_FUNCTION_CODE (callee))
453 {
454 case BUILT_IN_MEMCPY:
455 case BUILT_IN_MEMMOVE:
456 case BUILT_IN_MEMSET:
457 case BUILT_IN_STRCPY:
458 case BUILT_IN_STRNCPY:
459 case BUILT_IN_STRCAT:
460 case BUILT_IN_STRNCAT:
461 case BUILT_IN_MEMCPY_CHK:
462 case BUILT_IN_MEMMOVE_CHK:
463 case BUILT_IN_MEMSET_CHK:
464 case BUILT_IN_STRCPY_CHK:
465 case BUILT_IN_STRNCPY_CHK:
466 case BUILT_IN_STPNCPY_CHK:
467 case BUILT_IN_STRCAT_CHK:
468 case BUILT_IN_STRNCAT_CHK:
469 case BUILT_IN_ASSUME_ALIGNED:
470 if (gimple_call_num_args (call) >= 1)
471 return gimple_call_arg (call, 0);
472 break;
473 default:
474 break;
475 }
476
477 return NULL_TREE;
478 }
479
480
481 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
482 second argument from __builtin_object_size. */
483
484 unsigned HOST_WIDE_INT
485 compute_builtin_object_size (tree ptr, int object_size_type)
486 {
487 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
488
489 if (! offset_limit)
490 init_offset_limit ();
491
492 if (TREE_CODE (ptr) == ADDR_EXPR)
493 return addr_object_size (NULL, ptr, object_size_type);
494
495 if (TREE_CODE (ptr) == SSA_NAME
496 && POINTER_TYPE_P (TREE_TYPE (ptr))
497 && object_sizes[object_size_type] != NULL)
498 {
499 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
500 {
501 struct object_size_info osi;
502 bitmap_iterator bi;
503 unsigned int i;
504
505 if (dump_file)
506 {
507 fprintf (dump_file, "Computing %s %sobject size for ",
508 (object_size_type & 2) ? "minimum" : "maximum",
509 (object_size_type & 1) ? "sub" : "");
510 print_generic_expr (dump_file, ptr, dump_flags);
511 fprintf (dump_file, ":\n");
512 }
513
514 osi.visited = BITMAP_ALLOC (NULL);
515 osi.reexamine = BITMAP_ALLOC (NULL);
516 osi.object_size_type = object_size_type;
517 osi.depths = NULL;
518 osi.stack = NULL;
519 osi.tos = NULL;
520
521 /* First pass: walk UD chains, compute object sizes that
522 can be computed. osi.reexamine bitmap at the end will
523 contain what variables were found in dependency cycles
524 and therefore need to be reexamined. */
525 osi.pass = 0;
526 osi.changed = false;
527 collect_object_sizes_for (&osi, ptr);
528
529 /* Second pass: keep recomputing object sizes of variables
530 that need reexamination, until no object sizes are
531 increased or all object sizes are computed. */
532 if (! bitmap_empty_p (osi.reexamine))
533 {
534 bitmap reexamine = BITMAP_ALLOC (NULL);
535
536 /* If looking for minimum instead of maximum object size,
537 detect cases where a pointer is increased in a loop.
538 Although even without this detection pass 2 would eventually
539 terminate, it could take a long time. If a pointer is
540 increasing this way, we need to assume 0 object size.
541 E.g. p = &buf[0]; while (cond) p = p + 4; */
542 if (object_size_type & 2)
543 {
544 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
545 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
546 osi.tos = osi.stack;
547 osi.pass = 1;
548 /* collect_object_sizes_for is changing
549 osi.reexamine bitmap, so iterate over a copy. */
550 bitmap_copy (reexamine, osi.reexamine);
551 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
552 if (bitmap_bit_p (osi.reexamine, i))
553 check_for_plus_in_loops (&osi, ssa_name (i));
554
555 free (osi.depths);
556 osi.depths = NULL;
557 free (osi.stack);
558 osi.stack = NULL;
559 osi.tos = NULL;
560 }
561
562 do
563 {
564 osi.pass = 2;
565 osi.changed = false;
566 /* collect_object_sizes_for is changing
567 osi.reexamine bitmap, so iterate over a copy. */
568 bitmap_copy (reexamine, osi.reexamine);
569 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
570 if (bitmap_bit_p (osi.reexamine, i))
571 {
572 collect_object_sizes_for (&osi, ssa_name (i));
573 if (dump_file && (dump_flags & TDF_DETAILS))
574 {
575 fprintf (dump_file, "Reexamining ");
576 print_generic_expr (dump_file, ssa_name (i),
577 dump_flags);
578 fprintf (dump_file, "\n");
579 }
580 }
581 }
582 while (osi.changed);
583
584 BITMAP_FREE (reexamine);
585 }
586 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
587 bitmap_set_bit (computed[object_size_type], i);
588
589 /* Debugging dumps. */
590 if (dump_file)
591 {
592 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
593 if (object_sizes[object_size_type][i]
594 != unknown[object_size_type])
595 {
596 print_generic_expr (dump_file, ssa_name (i),
597 dump_flags);
598 fprintf (dump_file,
599 ": %s %sobject size "
600 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
601 (object_size_type & 2) ? "minimum" : "maximum",
602 (object_size_type & 1) ? "sub" : "",
603 object_sizes[object_size_type][i]);
604 }
605 }
606
607 BITMAP_FREE (osi.reexamine);
608 BITMAP_FREE (osi.visited);
609 }
610
611 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
612 }
613
614 return unknown[object_size_type];
615 }
616
617 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
618
619 static void
620 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
621 {
622 int object_size_type = osi->object_size_type;
623 unsigned int varno = SSA_NAME_VERSION (ptr);
624 unsigned HOST_WIDE_INT bytes;
625
626 gcc_assert (object_sizes[object_size_type][varno]
627 != unknown[object_size_type]);
628 gcc_assert (osi->pass == 0);
629
630 if (TREE_CODE (value) == WITH_SIZE_EXPR)
631 value = TREE_OPERAND (value, 0);
632
633 /* Pointer variables should have been handled by merge_object_sizes. */
634 gcc_assert (TREE_CODE (value) != SSA_NAME
635 || !POINTER_TYPE_P (TREE_TYPE (value)));
636
637 if (TREE_CODE (value) == ADDR_EXPR)
638 bytes = addr_object_size (osi, value, object_size_type);
639 else
640 bytes = unknown[object_size_type];
641
642 if ((object_size_type & 2) == 0)
643 {
644 if (object_sizes[object_size_type][varno] < bytes)
645 object_sizes[object_size_type][varno] = bytes;
646 }
647 else
648 {
649 if (object_sizes[object_size_type][varno] > bytes)
650 object_sizes[object_size_type][varno] = bytes;
651 }
652 }
653
654
655 /* Compute object_sizes for PTR, defined to the result of a call. */
656
657 static void
658 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
659 {
660 int object_size_type = osi->object_size_type;
661 unsigned int varno = SSA_NAME_VERSION (ptr);
662 unsigned HOST_WIDE_INT bytes;
663
664 gcc_assert (is_gimple_call (call));
665
666 gcc_assert (object_sizes[object_size_type][varno]
667 != unknown[object_size_type]);
668 gcc_assert (osi->pass == 0);
669
670 bytes = alloc_object_size (call, object_size_type);
671
672 if ((object_size_type & 2) == 0)
673 {
674 if (object_sizes[object_size_type][varno] < bytes)
675 object_sizes[object_size_type][varno] = bytes;
676 }
677 else
678 {
679 if (object_sizes[object_size_type][varno] > bytes)
680 object_sizes[object_size_type][varno] = bytes;
681 }
682 }
683
684
685 /* Compute object_sizes for PTR, defined to an unknown value. */
686
687 static void
688 unknown_object_size (struct object_size_info *osi, tree ptr)
689 {
690 int object_size_type = osi->object_size_type;
691 unsigned int varno = SSA_NAME_VERSION (ptr);
692 unsigned HOST_WIDE_INT bytes;
693
694 gcc_assert (object_sizes[object_size_type][varno]
695 != unknown[object_size_type]);
696 gcc_assert (osi->pass == 0);
697
698 bytes = unknown[object_size_type];
699
700 if ((object_size_type & 2) == 0)
701 {
702 if (object_sizes[object_size_type][varno] < bytes)
703 object_sizes[object_size_type][varno] = bytes;
704 }
705 else
706 {
707 if (object_sizes[object_size_type][varno] > bytes)
708 object_sizes[object_size_type][varno] = bytes;
709 }
710 }
711
712
713 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
714 the object size might need reexamination later. */
715
716 static bool
717 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
718 unsigned HOST_WIDE_INT offset)
719 {
720 int object_size_type = osi->object_size_type;
721 unsigned int varno = SSA_NAME_VERSION (dest);
722 unsigned HOST_WIDE_INT orig_bytes;
723
724 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
725 return false;
726 if (offset >= offset_limit)
727 {
728 object_sizes[object_size_type][varno] = unknown[object_size_type];
729 return false;
730 }
731
732 if (osi->pass == 0)
733 collect_object_sizes_for (osi, orig);
734
735 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
736 if (orig_bytes != unknown[object_size_type])
737 orig_bytes = (offset > orig_bytes)
738 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
739
740 if ((object_size_type & 2) == 0)
741 {
742 if (object_sizes[object_size_type][varno] < orig_bytes)
743 {
744 object_sizes[object_size_type][varno] = orig_bytes;
745 osi->changed = true;
746 }
747 }
748 else
749 {
750 if (object_sizes[object_size_type][varno] > orig_bytes)
751 {
752 object_sizes[object_size_type][varno] = orig_bytes;
753 osi->changed = true;
754 }
755 }
756 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
757 }
758
759
760 /* Compute object_sizes for VAR, defined to the result of an assignment
761 with operator POINTER_PLUS_EXPR. Return true if the object size might
762 need reexamination later. */
763
764 static bool
765 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
766 {
767 int object_size_type = osi->object_size_type;
768 unsigned int varno = SSA_NAME_VERSION (var);
769 unsigned HOST_WIDE_INT bytes;
770 tree op0, op1;
771
772 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
773 {
774 op0 = gimple_assign_rhs1 (stmt);
775 op1 = gimple_assign_rhs2 (stmt);
776 }
777 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
778 {
779 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
780 gcc_assert (TREE_CODE (rhs) == MEM_REF);
781 op0 = TREE_OPERAND (rhs, 0);
782 op1 = TREE_OPERAND (rhs, 1);
783 }
784 else
785 gcc_unreachable ();
786
787 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
788 return false;
789
790 /* Handle PTR + OFFSET here. */
791 if (TREE_CODE (op1) == INTEGER_CST
792 && (TREE_CODE (op0) == SSA_NAME
793 || TREE_CODE (op0) == ADDR_EXPR))
794 {
795 if (! tree_fits_uhwi_p (op1))
796 bytes = unknown[object_size_type];
797 else if (TREE_CODE (op0) == SSA_NAME)
798 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
799 else
800 {
801 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
802
803 /* op0 will be ADDR_EXPR here. */
804 bytes = addr_object_size (osi, op0, object_size_type);
805 if (bytes == unknown[object_size_type])
806 ;
807 else if (off > offset_limit)
808 bytes = unknown[object_size_type];
809 else if (off > bytes)
810 bytes = 0;
811 else
812 bytes -= off;
813 }
814 }
815 else
816 bytes = unknown[object_size_type];
817
818 if ((object_size_type & 2) == 0)
819 {
820 if (object_sizes[object_size_type][varno] < bytes)
821 object_sizes[object_size_type][varno] = bytes;
822 }
823 else
824 {
825 if (object_sizes[object_size_type][varno] > bytes)
826 object_sizes[object_size_type][varno] = bytes;
827 }
828 return false;
829 }
830
831
832 /* Compute object_sizes for VAR, defined at STMT, which is
833 a COND_EXPR. Return true if the object size might need reexamination
834 later. */
835
836 static bool
837 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
838 {
839 tree then_, else_;
840 int object_size_type = osi->object_size_type;
841 unsigned int varno = SSA_NAME_VERSION (var);
842 bool reexamine = false;
843
844 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
845
846 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
847 return false;
848
849 then_ = gimple_assign_rhs2 (stmt);
850 else_ = gimple_assign_rhs3 (stmt);
851
852 if (TREE_CODE (then_) == SSA_NAME)
853 reexamine |= merge_object_sizes (osi, var, then_, 0);
854 else
855 expr_object_size (osi, var, then_);
856
857 if (TREE_CODE (else_) == SSA_NAME)
858 reexamine |= merge_object_sizes (osi, var, else_, 0);
859 else
860 expr_object_size (osi, var, else_);
861
862 return reexamine;
863 }
864
865 /* Compute object sizes for VAR.
866 For ADDR_EXPR an object size is the number of remaining bytes
867 to the end of the object (where what is considered an object depends on
868 OSI->object_size_type).
869 For allocation GIMPLE_CALL like malloc or calloc object size is the size
870 of the allocation.
871 For POINTER_PLUS_EXPR where second operand is a constant integer,
872 object size is object size of the first operand minus the constant.
873 If the constant is bigger than the number of remaining bytes until the
874 end of the object, object size is 0, but if it is instead a pointer
875 subtraction, object size is unknown[object_size_type].
876 To differentiate addition from subtraction, ADDR_EXPR returns
877 unknown[object_size_type] for all objects bigger than half of the address
878 space, and constants less than half of the address space are considered
879 addition, while bigger constants subtraction.
880 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
881 object size is object size of that argument.
882 Otherwise, object size is the maximum of object sizes of variables
883 that it might be set to. */
884
885 static void
886 collect_object_sizes_for (struct object_size_info *osi, tree var)
887 {
888 int object_size_type = osi->object_size_type;
889 unsigned int varno = SSA_NAME_VERSION (var);
890 gimple stmt;
891 bool reexamine;
892
893 if (bitmap_bit_p (computed[object_size_type], varno))
894 return;
895
896 if (osi->pass == 0)
897 {
898 if (bitmap_set_bit (osi->visited, varno))
899 {
900 object_sizes[object_size_type][varno]
901 = (object_size_type & 2) ? -1 : 0;
902 }
903 else
904 {
905 /* Found a dependency loop. Mark the variable for later
906 re-examination. */
907 bitmap_set_bit (osi->reexamine, varno);
908 if (dump_file && (dump_flags & TDF_DETAILS))
909 {
910 fprintf (dump_file, "Found a dependency loop at ");
911 print_generic_expr (dump_file, var, dump_flags);
912 fprintf (dump_file, "\n");
913 }
914 return;
915 }
916 }
917
918 if (dump_file && (dump_flags & TDF_DETAILS))
919 {
920 fprintf (dump_file, "Visiting use-def links for ");
921 print_generic_expr (dump_file, var, dump_flags);
922 fprintf (dump_file, "\n");
923 }
924
925 stmt = SSA_NAME_DEF_STMT (var);
926 reexamine = false;
927
928 switch (gimple_code (stmt))
929 {
930 case GIMPLE_ASSIGN:
931 {
932 tree rhs = gimple_assign_rhs1 (stmt);
933 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
934 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
935 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
936 reexamine = plus_stmt_object_size (osi, var, stmt);
937 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
938 reexamine = cond_expr_object_size (osi, var, stmt);
939 else if (gimple_assign_single_p (stmt)
940 || gimple_assign_unary_nop_p (stmt))
941 {
942 if (TREE_CODE (rhs) == SSA_NAME
943 && POINTER_TYPE_P (TREE_TYPE (rhs)))
944 reexamine = merge_object_sizes (osi, var, rhs, 0);
945 else
946 expr_object_size (osi, var, rhs);
947 }
948 else
949 unknown_object_size (osi, var);
950 break;
951 }
952
953 case GIMPLE_CALL:
954 {
955 tree arg = pass_through_call (stmt);
956 if (arg)
957 {
958 if (TREE_CODE (arg) == SSA_NAME
959 && POINTER_TYPE_P (TREE_TYPE (arg)))
960 reexamine = merge_object_sizes (osi, var, arg, 0);
961 else
962 expr_object_size (osi, var, arg);
963 }
964 else
965 call_object_size (osi, var, stmt);
966 break;
967 }
968
969 case GIMPLE_ASM:
970 /* Pointers defined by __asm__ statements can point anywhere. */
971 object_sizes[object_size_type][varno] = unknown[object_size_type];
972 break;
973
974 case GIMPLE_NOP:
975 if (SSA_NAME_VAR (var)
976 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
977 expr_object_size (osi, var, SSA_NAME_VAR (var));
978 else
979 /* Uninitialized SSA names point nowhere. */
980 object_sizes[object_size_type][varno] = unknown[object_size_type];
981 break;
982
983 case GIMPLE_PHI:
984 {
985 unsigned i;
986
987 for (i = 0; i < gimple_phi_num_args (stmt); i++)
988 {
989 tree rhs = gimple_phi_arg (stmt, i)->def;
990
991 if (object_sizes[object_size_type][varno]
992 == unknown[object_size_type])
993 break;
994
995 if (TREE_CODE (rhs) == SSA_NAME)
996 reexamine |= merge_object_sizes (osi, var, rhs, 0);
997 else if (osi->pass == 0)
998 expr_object_size (osi, var, rhs);
999 }
1000 break;
1001 }
1002
1003 default:
1004 gcc_unreachable ();
1005 }
1006
1007 if (! reexamine
1008 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1009 {
1010 bitmap_set_bit (computed[object_size_type], varno);
1011 bitmap_clear_bit (osi->reexamine, varno);
1012 }
1013 else
1014 {
1015 bitmap_set_bit (osi->reexamine, varno);
1016 if (dump_file && (dump_flags & TDF_DETAILS))
1017 {
1018 fprintf (dump_file, "Need to reexamine ");
1019 print_generic_expr (dump_file, var, dump_flags);
1020 fprintf (dump_file, "\n");
1021 }
1022 }
1023 }
1024
1025
1026 /* Helper function for check_for_plus_in_loops. Called recursively
1027 to detect loops. */
1028
1029 static void
1030 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1031 unsigned int depth)
1032 {
1033 gimple stmt = SSA_NAME_DEF_STMT (var);
1034 unsigned int varno = SSA_NAME_VERSION (var);
1035
1036 if (osi->depths[varno])
1037 {
1038 if (osi->depths[varno] != depth)
1039 {
1040 unsigned int *sp;
1041
1042 /* Found a loop involving pointer addition. */
1043 for (sp = osi->tos; sp > osi->stack; )
1044 {
1045 --sp;
1046 bitmap_clear_bit (osi->reexamine, *sp);
1047 bitmap_set_bit (computed[osi->object_size_type], *sp);
1048 object_sizes[osi->object_size_type][*sp] = 0;
1049 if (*sp == varno)
1050 break;
1051 }
1052 }
1053 return;
1054 }
1055 else if (! bitmap_bit_p (osi->reexamine, varno))
1056 return;
1057
1058 osi->depths[varno] = depth;
1059 *osi->tos++ = varno;
1060
1061 switch (gimple_code (stmt))
1062 {
1063
1064 case GIMPLE_ASSIGN:
1065 {
1066 if ((gimple_assign_single_p (stmt)
1067 || gimple_assign_unary_nop_p (stmt))
1068 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1069 {
1070 tree rhs = gimple_assign_rhs1 (stmt);
1071
1072 check_for_plus_in_loops_1 (osi, rhs, depth);
1073 }
1074 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1075 {
1076 tree basevar = gimple_assign_rhs1 (stmt);
1077 tree cst = gimple_assign_rhs2 (stmt);
1078
1079 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1080
1081 check_for_plus_in_loops_1 (osi, basevar,
1082 depth + !integer_zerop (cst));
1083 }
1084 else
1085 gcc_unreachable ();
1086 break;
1087 }
1088
1089 case GIMPLE_CALL:
1090 {
1091 tree arg = pass_through_call (stmt);
1092 if (arg)
1093 {
1094 if (TREE_CODE (arg) == SSA_NAME)
1095 check_for_plus_in_loops_1 (osi, arg, depth);
1096 else
1097 gcc_unreachable ();
1098 }
1099 break;
1100 }
1101
1102 case GIMPLE_PHI:
1103 {
1104 unsigned i;
1105
1106 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1107 {
1108 tree rhs = gimple_phi_arg (stmt, i)->def;
1109
1110 if (TREE_CODE (rhs) == SSA_NAME)
1111 check_for_plus_in_loops_1 (osi, rhs, depth);
1112 }
1113 break;
1114 }
1115
1116 default:
1117 gcc_unreachable ();
1118 }
1119
1120 osi->depths[varno] = 0;
1121 osi->tos--;
1122 }
1123
1124
1125 /* Check if some pointer we are computing object size of is being increased
1126 within a loop. If yes, assume all the SSA variables participating in
1127 that loop have minimum object sizes 0. */
1128
1129 static void
1130 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1131 {
1132 gimple stmt = SSA_NAME_DEF_STMT (var);
1133
1134 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1135 and looked for a POINTER_PLUS_EXPR in the pass-through
1136 argument, if any. In GIMPLE, however, such an expression
1137 is not a valid call operand. */
1138
1139 if (is_gimple_assign (stmt)
1140 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1141 {
1142 tree basevar = gimple_assign_rhs1 (stmt);
1143 tree cst = gimple_assign_rhs2 (stmt);
1144
1145 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1146
1147 if (integer_zerop (cst))
1148 return;
1149
1150 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1151 *osi->tos++ = SSA_NAME_VERSION (basevar);
1152 check_for_plus_in_loops_1 (osi, var, 2);
1153 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1154 osi->tos--;
1155 }
1156 }
1157
1158
1159 /* Initialize data structures for the object size computation. */
1160
1161 void
1162 init_object_sizes (void)
1163 {
1164 int object_size_type;
1165
1166 if (object_sizes[0])
1167 return;
1168
1169 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1170 {
1171 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1172 computed[object_size_type] = BITMAP_ALLOC (NULL);
1173 }
1174
1175 init_offset_limit ();
1176 }
1177
1178
1179 /* Destroy data structures after the object size computation. */
1180
1181 void
1182 fini_object_sizes (void)
1183 {
1184 int object_size_type;
1185
1186 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1187 {
1188 free (object_sizes[object_size_type]);
1189 BITMAP_FREE (computed[object_size_type]);
1190 object_sizes[object_size_type] = NULL;
1191 }
1192 }
1193
1194
1195 /* Simple pass to optimize all __builtin_object_size () builtins. */
1196
1197 static unsigned int
1198 compute_object_sizes (void)
1199 {
1200 basic_block bb;
1201 FOR_EACH_BB (bb)
1202 {
1203 gimple_stmt_iterator i;
1204 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1205 {
1206 tree callee, result;
1207 gimple call = gsi_stmt (i);
1208
1209 if (gimple_code (call) != GIMPLE_CALL)
1210 continue;
1211
1212 callee = gimple_call_fndecl (call);
1213 if (!callee
1214 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1215 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1216 continue;
1217
1218 init_object_sizes ();
1219 result = fold_call_stmt (call, false);
1220 if (!result)
1221 {
1222 if (gimple_call_num_args (call) == 2
1223 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1224 {
1225 tree ost = gimple_call_arg (call, 1);
1226
1227 if (tree_fits_uhwi_p (ost))
1228 {
1229 unsigned HOST_WIDE_INT object_size_type
1230 = tree_to_uhwi (ost);
1231
1232 if (object_size_type < 2)
1233 result = fold_convert (size_type_node,
1234 integer_minus_one_node);
1235 else if (object_size_type < 4)
1236 result = build_zero_cst (size_type_node);
1237 }
1238 }
1239
1240 if (!result)
1241 continue;
1242 }
1243
1244 if (dump_file && (dump_flags & TDF_DETAILS))
1245 {
1246 fprintf (dump_file, "Simplified\n ");
1247 print_gimple_stmt (dump_file, call, 0, dump_flags);
1248 }
1249
1250 if (!update_call_from_tree (&i, result))
1251 gcc_unreachable ();
1252
1253 if (dump_file && (dump_flags & TDF_DETAILS))
1254 {
1255 fprintf (dump_file, "to\n ");
1256 print_gimple_stmt (dump_file, gsi_stmt (i), 0, dump_flags);
1257 fprintf (dump_file, "\n");
1258 }
1259 }
1260 }
1261
1262 fini_object_sizes ();
1263 return 0;
1264 }
1265
1266 namespace {
1267
1268 const pass_data pass_data_object_sizes =
1269 {
1270 GIMPLE_PASS, /* type */
1271 "objsz", /* name */
1272 OPTGROUP_NONE, /* optinfo_flags */
1273 false, /* has_gate */
1274 true, /* has_execute */
1275 TV_NONE, /* tv_id */
1276 ( PROP_cfg | PROP_ssa ), /* properties_required */
1277 0, /* properties_provided */
1278 0, /* properties_destroyed */
1279 0, /* todo_flags_start */
1280 TODO_verify_ssa, /* todo_flags_finish */
1281 };
1282
1283 class pass_object_sizes : public gimple_opt_pass
1284 {
1285 public:
1286 pass_object_sizes (gcc::context *ctxt)
1287 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1288 {}
1289
1290 /* opt_pass methods: */
1291 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1292 unsigned int execute () { return compute_object_sizes (); }
1293
1294 }; // class pass_object_sizes
1295
1296 } // anon namespace
1297
1298 gimple_opt_pass *
1299 make_pass_object_sizes (gcc::context *ctxt)
1300 {
1301 return new pass_object_sizes (ctxt);
1302 }