]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-object-size.c
made sign parameter optional to wide_int::neg_p
[thirdparty/gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-flow.h"
29 #include "tree-pass.h"
30 #include "tree-ssa-propagate.h"
31
32 struct object_size_info
33 {
34 int object_size_type;
35 bitmap visited, reexamine;
36 int pass;
37 bool changed;
38 unsigned int *depths;
39 unsigned int *stack, *tos;
40 };
41
42 static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
43
44 static tree compute_object_offset (const_tree, const_tree);
45 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
46 const_tree, int);
47 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
48 static tree pass_through_call (const_gimple);
49 static void collect_object_sizes_for (struct object_size_info *, tree);
50 static void expr_object_size (struct object_size_info *, tree, tree);
51 static bool merge_object_sizes (struct object_size_info *, tree, tree,
52 unsigned HOST_WIDE_INT);
53 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
54 static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
55 static unsigned int compute_object_sizes (void);
56 static void init_offset_limit (void);
57 static void check_for_plus_in_loops (struct object_size_info *, tree);
58 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
59 unsigned int);
60
61 /* object_sizes[0] is upper bound for number of bytes till the end of
62 the object.
63 object_sizes[1] is upper bound for number of bytes till the end of
64 the subobject (innermost array or field with address taken).
65 object_sizes[2] is lower bound for number of bytes till the end of
66 the object and object_sizes[3] lower bound for subobject. */
67 static unsigned HOST_WIDE_INT *object_sizes[4];
68
69 /* Bitmaps what object sizes have been computed already. */
70 static bitmap computed[4];
71
72 /* Maximum value of offset we consider to be addition. */
73 static unsigned HOST_WIDE_INT offset_limit;
74
75
76 /* Initialize OFFSET_LIMIT variable. */
77 static void
78 init_offset_limit (void)
79 {
80 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
81 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
82 else
83 offset_limit = -1;
84 offset_limit /= 2;
85 }
86
87
88 /* Compute offset of EXPR within VAR. Return error_mark_node
89 if unknown. */
90
91 static tree
92 compute_object_offset (const_tree expr, const_tree var)
93 {
94 enum tree_code code = PLUS_EXPR;
95 tree base, off, t;
96
97 if (expr == var)
98 return size_zero_node;
99
100 switch (TREE_CODE (expr))
101 {
102 case COMPONENT_REF:
103 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
104 if (base == error_mark_node)
105 return base;
106
107 t = TREE_OPERAND (expr, 1);
108 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
109 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
110 / BITS_PER_UNIT));
111 break;
112
113 case REALPART_EXPR:
114 CASE_CONVERT:
115 case VIEW_CONVERT_EXPR:
116 case NON_LVALUE_EXPR:
117 return compute_object_offset (TREE_OPERAND (expr, 0), var);
118
119 case IMAGPART_EXPR:
120 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
121 if (base == error_mark_node)
122 return base;
123
124 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
125 break;
126
127 case ARRAY_REF:
128 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
129 if (base == error_mark_node)
130 return base;
131
132 t = TREE_OPERAND (expr, 1);
133 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
134 {
135 code = MINUS_EXPR;
136 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
137 }
138 t = fold_convert (sizetype, t);
139 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
140 break;
141
142 case MEM_REF:
143 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
144 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
145
146 default:
147 return error_mark_node;
148 }
149
150 return size_binop (code, base, off);
151 }
152
153
154 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
155 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
156 If unknown, return unknown[object_size_type]. */
157
158 static unsigned HOST_WIDE_INT
159 addr_object_size (struct object_size_info *osi, const_tree ptr,
160 int object_size_type)
161 {
162 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
163
164 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
165
166 pt_var = TREE_OPERAND (ptr, 0);
167 while (handled_component_p (pt_var))
168 pt_var = TREE_OPERAND (pt_var, 0);
169
170 if (pt_var
171 && TREE_CODE (pt_var) == MEM_REF)
172 {
173 unsigned HOST_WIDE_INT sz;
174
175 if (!osi || (object_size_type & 1) != 0
176 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
177 {
178 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
179 object_size_type & ~1);
180 }
181 else
182 {
183 tree var = TREE_OPERAND (pt_var, 0);
184 if (osi->pass == 0)
185 collect_object_sizes_for (osi, var);
186 if (bitmap_bit_p (computed[object_size_type],
187 SSA_NAME_VERSION (var)))
188 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
189 else
190 sz = unknown[object_size_type];
191 }
192 if (sz != unknown[object_size_type])
193 {
194 addr_wide_int dsz = addr_wide_int (sz) - mem_ref_offset (pt_var);
195 if (dsz.neg_p ())
196 sz = 0;
197 else if (dsz.fits_uhwi_p ())
198 sz = dsz.to_uhwi ();
199 else
200 sz = unknown[object_size_type];
201 }
202
203 if (sz != unknown[object_size_type] && sz < offset_limit)
204 pt_var_size = size_int (sz);
205 }
206 else if (pt_var
207 && DECL_P (pt_var)
208 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
209 && (unsigned HOST_WIDE_INT)
210 tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
211 pt_var_size = DECL_SIZE_UNIT (pt_var);
212 else if (pt_var
213 && TREE_CODE (pt_var) == STRING_CST
214 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
215 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
216 && (unsigned HOST_WIDE_INT)
217 tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
218 < offset_limit)
219 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
220 else
221 return unknown[object_size_type];
222
223 if (pt_var != TREE_OPERAND (ptr, 0))
224 {
225 tree var;
226
227 if (object_size_type & 1)
228 {
229 var = TREE_OPERAND (ptr, 0);
230
231 while (var != pt_var
232 && TREE_CODE (var) != BIT_FIELD_REF
233 && TREE_CODE (var) != COMPONENT_REF
234 && TREE_CODE (var) != ARRAY_REF
235 && TREE_CODE (var) != ARRAY_RANGE_REF
236 && TREE_CODE (var) != REALPART_EXPR
237 && TREE_CODE (var) != IMAGPART_EXPR)
238 var = TREE_OPERAND (var, 0);
239 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
240 var = TREE_OPERAND (var, 0);
241 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
242 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
243 || (pt_var_size
244 && tree_int_cst_lt (pt_var_size,
245 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
246 var = pt_var;
247 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
248 {
249 tree v = var;
250 /* For &X->fld, compute object size only if fld isn't the last
251 field, as struct { int i; char c[1]; } is often used instead
252 of flexible array member. */
253 while (v && v != pt_var)
254 switch (TREE_CODE (v))
255 {
256 case ARRAY_REF:
257 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
258 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
259 {
260 tree domain
261 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
262 if (domain
263 && TYPE_MAX_VALUE (domain)
264 && TREE_CODE (TYPE_MAX_VALUE (domain))
265 == INTEGER_CST
266 && tree_int_cst_lt (TREE_OPERAND (v, 1),
267 TYPE_MAX_VALUE (domain)))
268 {
269 v = NULL_TREE;
270 break;
271 }
272 }
273 v = TREE_OPERAND (v, 0);
274 break;
275 case REALPART_EXPR:
276 case IMAGPART_EXPR:
277 v = NULL_TREE;
278 break;
279 case COMPONENT_REF:
280 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
281 {
282 v = NULL_TREE;
283 break;
284 }
285 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
286 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
287 != UNION_TYPE
288 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
289 != QUAL_UNION_TYPE)
290 break;
291 else
292 v = TREE_OPERAND (v, 0);
293 if (TREE_CODE (v) == COMPONENT_REF
294 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
295 == RECORD_TYPE)
296 {
297 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
298 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
299 if (TREE_CODE (fld_chain) == FIELD_DECL)
300 break;
301
302 if (fld_chain)
303 {
304 v = NULL_TREE;
305 break;
306 }
307 v = TREE_OPERAND (v, 0);
308 }
309 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
310 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
311 != UNION_TYPE
312 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
313 != QUAL_UNION_TYPE)
314 break;
315 else
316 v = TREE_OPERAND (v, 0);
317 if (v != pt_var)
318 v = NULL_TREE;
319 else
320 v = pt_var;
321 break;
322 default:
323 v = pt_var;
324 break;
325 }
326 if (v == pt_var)
327 var = pt_var;
328 }
329 }
330 else
331 var = pt_var;
332
333 if (var != pt_var)
334 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
335 else if (!pt_var_size)
336 return unknown[object_size_type];
337 else
338 var_size = pt_var_size;
339 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
340 if (bytes != error_mark_node)
341 {
342 if (TREE_CODE (bytes) == INTEGER_CST
343 && tree_int_cst_lt (var_size, bytes))
344 bytes = size_zero_node;
345 else
346 bytes = size_binop (MINUS_EXPR, var_size, bytes);
347 }
348 if (var != pt_var
349 && pt_var_size
350 && TREE_CODE (pt_var) == MEM_REF
351 && bytes != error_mark_node)
352 {
353 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
354 if (bytes2 != error_mark_node)
355 {
356 if (TREE_CODE (bytes2) == INTEGER_CST
357 && tree_int_cst_lt (pt_var_size, bytes2))
358 bytes2 = size_zero_node;
359 else
360 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
361 bytes = size_binop (MIN_EXPR, bytes, bytes2);
362 }
363 }
364 }
365 else if (!pt_var_size)
366 return unknown[object_size_type];
367 else
368 bytes = pt_var_size;
369
370 if (tree_fits_uhwi_p (bytes))
371 return tree_to_uhwi (bytes);
372
373 return unknown[object_size_type];
374 }
375
376
377 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
378 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
379 argument from __builtin_object_size. If unknown, return
380 unknown[object_size_type]. */
381
382 static unsigned HOST_WIDE_INT
383 alloc_object_size (const_gimple call, int object_size_type)
384 {
385 tree callee, bytes = NULL_TREE;
386 tree alloc_size;
387 int arg1 = -1, arg2 = -1;
388
389 gcc_assert (is_gimple_call (call));
390
391 callee = gimple_call_fndecl (call);
392 if (!callee)
393 return unknown[object_size_type];
394
395 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
396 if (alloc_size && TREE_VALUE (alloc_size))
397 {
398 tree p = TREE_VALUE (alloc_size);
399
400 arg1 = tree_to_hwi (TREE_VALUE (p))-1;
401 if (TREE_CHAIN (p))
402 arg2 = tree_to_hwi (TREE_VALUE (TREE_CHAIN (p)))-1;
403 }
404
405 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
406 switch (DECL_FUNCTION_CODE (callee))
407 {
408 case BUILT_IN_CALLOC:
409 arg2 = 1;
410 /* fall through */
411 case BUILT_IN_MALLOC:
412 case BUILT_IN_ALLOCA:
413 case BUILT_IN_ALLOCA_WITH_ALIGN:
414 arg1 = 0;
415 default:
416 break;
417 }
418
419 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
420 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
421 || (arg2 >= 0
422 && (arg2 >= (int)gimple_call_num_args (call)
423 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
424 return unknown[object_size_type];
425
426 if (arg2 >= 0)
427 bytes = size_binop (MULT_EXPR,
428 fold_convert (sizetype, gimple_call_arg (call, arg1)),
429 fold_convert (sizetype, gimple_call_arg (call, arg2)));
430 else if (arg1 >= 0)
431 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
432
433 if (bytes && tree_fits_uhwi_p (bytes))
434 return tree_to_uhwi (bytes);
435
436 return unknown[object_size_type];
437 }
438
439
440 /* If object size is propagated from one of function's arguments directly
441 to its return value, return that argument for GIMPLE_CALL statement CALL.
442 Otherwise return NULL. */
443
444 static tree
445 pass_through_call (const_gimple call)
446 {
447 tree callee = gimple_call_fndecl (call);
448
449 if (callee
450 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
451 switch (DECL_FUNCTION_CODE (callee))
452 {
453 case BUILT_IN_MEMCPY:
454 case BUILT_IN_MEMMOVE:
455 case BUILT_IN_MEMSET:
456 case BUILT_IN_STRCPY:
457 case BUILT_IN_STRNCPY:
458 case BUILT_IN_STRCAT:
459 case BUILT_IN_STRNCAT:
460 case BUILT_IN_MEMCPY_CHK:
461 case BUILT_IN_MEMMOVE_CHK:
462 case BUILT_IN_MEMSET_CHK:
463 case BUILT_IN_STRCPY_CHK:
464 case BUILT_IN_STRNCPY_CHK:
465 case BUILT_IN_STPNCPY_CHK:
466 case BUILT_IN_STRCAT_CHK:
467 case BUILT_IN_STRNCAT_CHK:
468 case BUILT_IN_ASSUME_ALIGNED:
469 if (gimple_call_num_args (call) >= 1)
470 return gimple_call_arg (call, 0);
471 break;
472 default:
473 break;
474 }
475
476 return NULL_TREE;
477 }
478
479
480 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
481 second argument from __builtin_object_size. */
482
483 unsigned HOST_WIDE_INT
484 compute_builtin_object_size (tree ptr, int object_size_type)
485 {
486 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
487
488 if (! offset_limit)
489 init_offset_limit ();
490
491 if (TREE_CODE (ptr) == ADDR_EXPR)
492 return addr_object_size (NULL, ptr, object_size_type);
493
494 if (TREE_CODE (ptr) == SSA_NAME
495 && POINTER_TYPE_P (TREE_TYPE (ptr))
496 && object_sizes[object_size_type] != NULL)
497 {
498 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
499 {
500 struct object_size_info osi;
501 bitmap_iterator bi;
502 unsigned int i;
503
504 if (dump_file)
505 {
506 fprintf (dump_file, "Computing %s %sobject size for ",
507 (object_size_type & 2) ? "minimum" : "maximum",
508 (object_size_type & 1) ? "sub" : "");
509 print_generic_expr (dump_file, ptr, dump_flags);
510 fprintf (dump_file, ":\n");
511 }
512
513 osi.visited = BITMAP_ALLOC (NULL);
514 osi.reexamine = BITMAP_ALLOC (NULL);
515 osi.object_size_type = object_size_type;
516 osi.depths = NULL;
517 osi.stack = NULL;
518 osi.tos = NULL;
519
520 /* First pass: walk UD chains, compute object sizes that
521 can be computed. osi.reexamine bitmap at the end will
522 contain what variables were found in dependency cycles
523 and therefore need to be reexamined. */
524 osi.pass = 0;
525 osi.changed = false;
526 collect_object_sizes_for (&osi, ptr);
527
528 /* Second pass: keep recomputing object sizes of variables
529 that need reexamination, until no object sizes are
530 increased or all object sizes are computed. */
531 if (! bitmap_empty_p (osi.reexamine))
532 {
533 bitmap reexamine = BITMAP_ALLOC (NULL);
534
535 /* If looking for minimum instead of maximum object size,
536 detect cases where a pointer is increased in a loop.
537 Although even without this detection pass 2 would eventually
538 terminate, it could take a long time. If a pointer is
539 increasing this way, we need to assume 0 object size.
540 E.g. p = &buf[0]; while (cond) p = p + 4; */
541 if (object_size_type & 2)
542 {
543 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
544 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
545 osi.tos = osi.stack;
546 osi.pass = 1;
547 /* collect_object_sizes_for is changing
548 osi.reexamine bitmap, so iterate over a copy. */
549 bitmap_copy (reexamine, osi.reexamine);
550 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
551 if (bitmap_bit_p (osi.reexamine, i))
552 check_for_plus_in_loops (&osi, ssa_name (i));
553
554 free (osi.depths);
555 osi.depths = NULL;
556 free (osi.stack);
557 osi.stack = NULL;
558 osi.tos = NULL;
559 }
560
561 do
562 {
563 osi.pass = 2;
564 osi.changed = false;
565 /* collect_object_sizes_for is changing
566 osi.reexamine bitmap, so iterate over a copy. */
567 bitmap_copy (reexamine, osi.reexamine);
568 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
569 if (bitmap_bit_p (osi.reexamine, i))
570 {
571 collect_object_sizes_for (&osi, ssa_name (i));
572 if (dump_file && (dump_flags & TDF_DETAILS))
573 {
574 fprintf (dump_file, "Reexamining ");
575 print_generic_expr (dump_file, ssa_name (i),
576 dump_flags);
577 fprintf (dump_file, "\n");
578 }
579 }
580 }
581 while (osi.changed);
582
583 BITMAP_FREE (reexamine);
584 }
585 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
586 bitmap_set_bit (computed[object_size_type], i);
587
588 /* Debugging dumps. */
589 if (dump_file)
590 {
591 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
592 if (object_sizes[object_size_type][i]
593 != unknown[object_size_type])
594 {
595 print_generic_expr (dump_file, ssa_name (i),
596 dump_flags);
597 fprintf (dump_file,
598 ": %s %sobject size "
599 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
600 (object_size_type & 2) ? "minimum" : "maximum",
601 (object_size_type & 1) ? "sub" : "",
602 object_sizes[object_size_type][i]);
603 }
604 }
605
606 BITMAP_FREE (osi.reexamine);
607 BITMAP_FREE (osi.visited);
608 }
609
610 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
611 }
612
613 return unknown[object_size_type];
614 }
615
616 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
617
618 static void
619 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
620 {
621 int object_size_type = osi->object_size_type;
622 unsigned int varno = SSA_NAME_VERSION (ptr);
623 unsigned HOST_WIDE_INT bytes;
624
625 gcc_assert (object_sizes[object_size_type][varno]
626 != unknown[object_size_type]);
627 gcc_assert (osi->pass == 0);
628
629 if (TREE_CODE (value) == WITH_SIZE_EXPR)
630 value = TREE_OPERAND (value, 0);
631
632 /* Pointer variables should have been handled by merge_object_sizes. */
633 gcc_assert (TREE_CODE (value) != SSA_NAME
634 || !POINTER_TYPE_P (TREE_TYPE (value)));
635
636 if (TREE_CODE (value) == ADDR_EXPR)
637 bytes = addr_object_size (osi, value, object_size_type);
638 else
639 bytes = unknown[object_size_type];
640
641 if ((object_size_type & 2) == 0)
642 {
643 if (object_sizes[object_size_type][varno] < bytes)
644 object_sizes[object_size_type][varno] = bytes;
645 }
646 else
647 {
648 if (object_sizes[object_size_type][varno] > bytes)
649 object_sizes[object_size_type][varno] = bytes;
650 }
651 }
652
653
654 /* Compute object_sizes for PTR, defined to the result of a call. */
655
656 static void
657 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
658 {
659 int object_size_type = osi->object_size_type;
660 unsigned int varno = SSA_NAME_VERSION (ptr);
661 unsigned HOST_WIDE_INT bytes;
662
663 gcc_assert (is_gimple_call (call));
664
665 gcc_assert (object_sizes[object_size_type][varno]
666 != unknown[object_size_type]);
667 gcc_assert (osi->pass == 0);
668
669 bytes = alloc_object_size (call, object_size_type);
670
671 if ((object_size_type & 2) == 0)
672 {
673 if (object_sizes[object_size_type][varno] < bytes)
674 object_sizes[object_size_type][varno] = bytes;
675 }
676 else
677 {
678 if (object_sizes[object_size_type][varno] > bytes)
679 object_sizes[object_size_type][varno] = bytes;
680 }
681 }
682
683
684 /* Compute object_sizes for PTR, defined to an unknown value. */
685
686 static void
687 unknown_object_size (struct object_size_info *osi, tree ptr)
688 {
689 int object_size_type = osi->object_size_type;
690 unsigned int varno = SSA_NAME_VERSION (ptr);
691 unsigned HOST_WIDE_INT bytes;
692
693 gcc_assert (object_sizes[object_size_type][varno]
694 != unknown[object_size_type]);
695 gcc_assert (osi->pass == 0);
696
697 bytes = unknown[object_size_type];
698
699 if ((object_size_type & 2) == 0)
700 {
701 if (object_sizes[object_size_type][varno] < bytes)
702 object_sizes[object_size_type][varno] = bytes;
703 }
704 else
705 {
706 if (object_sizes[object_size_type][varno] > bytes)
707 object_sizes[object_size_type][varno] = bytes;
708 }
709 }
710
711
712 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
713 the object size might need reexamination later. */
714
715 static bool
716 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
717 unsigned HOST_WIDE_INT offset)
718 {
719 int object_size_type = osi->object_size_type;
720 unsigned int varno = SSA_NAME_VERSION (dest);
721 unsigned HOST_WIDE_INT orig_bytes;
722
723 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
724 return false;
725 if (offset >= offset_limit)
726 {
727 object_sizes[object_size_type][varno] = unknown[object_size_type];
728 return false;
729 }
730
731 if (osi->pass == 0)
732 collect_object_sizes_for (osi, orig);
733
734 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
735 if (orig_bytes != unknown[object_size_type])
736 orig_bytes = (offset > orig_bytes)
737 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
738
739 if ((object_size_type & 2) == 0)
740 {
741 if (object_sizes[object_size_type][varno] < orig_bytes)
742 {
743 object_sizes[object_size_type][varno] = orig_bytes;
744 osi->changed = true;
745 }
746 }
747 else
748 {
749 if (object_sizes[object_size_type][varno] > orig_bytes)
750 {
751 object_sizes[object_size_type][varno] = orig_bytes;
752 osi->changed = true;
753 }
754 }
755 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
756 }
757
758
759 /* Compute object_sizes for VAR, defined to the result of an assignment
760 with operator POINTER_PLUS_EXPR. Return true if the object size might
761 need reexamination later. */
762
763 static bool
764 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
765 {
766 int object_size_type = osi->object_size_type;
767 unsigned int varno = SSA_NAME_VERSION (var);
768 unsigned HOST_WIDE_INT bytes;
769 tree op0, op1;
770
771 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
772 {
773 op0 = gimple_assign_rhs1 (stmt);
774 op1 = gimple_assign_rhs2 (stmt);
775 }
776 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
777 {
778 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
779 gcc_assert (TREE_CODE (rhs) == MEM_REF);
780 op0 = TREE_OPERAND (rhs, 0);
781 op1 = TREE_OPERAND (rhs, 1);
782 }
783 else
784 gcc_unreachable ();
785
786 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
787 return false;
788
789 /* Handle PTR + OFFSET here. */
790 if (TREE_CODE (op1) == INTEGER_CST
791 && (TREE_CODE (op0) == SSA_NAME
792 || TREE_CODE (op0) == ADDR_EXPR))
793 {
794 if (! tree_fits_uhwi_p (op1))
795 bytes = unknown[object_size_type];
796 else if (TREE_CODE (op0) == SSA_NAME)
797 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
798 else
799 {
800 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
801
802 /* op0 will be ADDR_EXPR here. */
803 bytes = addr_object_size (osi, op0, object_size_type);
804 if (bytes == unknown[object_size_type])
805 ;
806 else if (off > offset_limit)
807 bytes = unknown[object_size_type];
808 else if (off > bytes)
809 bytes = 0;
810 else
811 bytes -= off;
812 }
813 }
814 else
815 bytes = unknown[object_size_type];
816
817 if ((object_size_type & 2) == 0)
818 {
819 if (object_sizes[object_size_type][varno] < bytes)
820 object_sizes[object_size_type][varno] = bytes;
821 }
822 else
823 {
824 if (object_sizes[object_size_type][varno] > bytes)
825 object_sizes[object_size_type][varno] = bytes;
826 }
827 return false;
828 }
829
830
831 /* Compute object_sizes for VAR, defined at STMT, which is
832 a COND_EXPR. Return true if the object size might need reexamination
833 later. */
834
835 static bool
836 cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
837 {
838 tree then_, else_;
839 int object_size_type = osi->object_size_type;
840 unsigned int varno = SSA_NAME_VERSION (var);
841 bool reexamine = false;
842
843 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
844
845 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
846 return false;
847
848 then_ = gimple_assign_rhs2 (stmt);
849 else_ = gimple_assign_rhs3 (stmt);
850
851 if (TREE_CODE (then_) == SSA_NAME)
852 reexamine |= merge_object_sizes (osi, var, then_, 0);
853 else
854 expr_object_size (osi, var, then_);
855
856 if (TREE_CODE (else_) == SSA_NAME)
857 reexamine |= merge_object_sizes (osi, var, else_, 0);
858 else
859 expr_object_size (osi, var, else_);
860
861 return reexamine;
862 }
863
864 /* Compute object sizes for VAR.
865 For ADDR_EXPR an object size is the number of remaining bytes
866 to the end of the object (where what is considered an object depends on
867 OSI->object_size_type).
868 For allocation GIMPLE_CALL like malloc or calloc object size is the size
869 of the allocation.
870 For POINTER_PLUS_EXPR where second operand is a constant integer,
871 object size is object size of the first operand minus the constant.
872 If the constant is bigger than the number of remaining bytes until the
873 end of the object, object size is 0, but if it is instead a pointer
874 subtraction, object size is unknown[object_size_type].
875 To differentiate addition from subtraction, ADDR_EXPR returns
876 unknown[object_size_type] for all objects bigger than half of the address
877 space, and constants less than half of the address space are considered
878 addition, while bigger constants subtraction.
879 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
880 object size is object size of that argument.
881 Otherwise, object size is the maximum of object sizes of variables
882 that it might be set to. */
883
884 static void
885 collect_object_sizes_for (struct object_size_info *osi, tree var)
886 {
887 int object_size_type = osi->object_size_type;
888 unsigned int varno = SSA_NAME_VERSION (var);
889 gimple stmt;
890 bool reexamine;
891
892 if (bitmap_bit_p (computed[object_size_type], varno))
893 return;
894
895 if (osi->pass == 0)
896 {
897 if (bitmap_set_bit (osi->visited, varno))
898 {
899 object_sizes[object_size_type][varno]
900 = (object_size_type & 2) ? -1 : 0;
901 }
902 else
903 {
904 /* Found a dependency loop. Mark the variable for later
905 re-examination. */
906 bitmap_set_bit (osi->reexamine, varno);
907 if (dump_file && (dump_flags & TDF_DETAILS))
908 {
909 fprintf (dump_file, "Found a dependency loop at ");
910 print_generic_expr (dump_file, var, dump_flags);
911 fprintf (dump_file, "\n");
912 }
913 return;
914 }
915 }
916
917 if (dump_file && (dump_flags & TDF_DETAILS))
918 {
919 fprintf (dump_file, "Visiting use-def links for ");
920 print_generic_expr (dump_file, var, dump_flags);
921 fprintf (dump_file, "\n");
922 }
923
924 stmt = SSA_NAME_DEF_STMT (var);
925 reexamine = false;
926
927 switch (gimple_code (stmt))
928 {
929 case GIMPLE_ASSIGN:
930 {
931 tree rhs = gimple_assign_rhs1 (stmt);
932 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
933 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
934 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
935 reexamine = plus_stmt_object_size (osi, var, stmt);
936 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
937 reexamine = cond_expr_object_size (osi, var, stmt);
938 else if (gimple_assign_single_p (stmt)
939 || gimple_assign_unary_nop_p (stmt))
940 {
941 if (TREE_CODE (rhs) == SSA_NAME
942 && POINTER_TYPE_P (TREE_TYPE (rhs)))
943 reexamine = merge_object_sizes (osi, var, rhs, 0);
944 else
945 expr_object_size (osi, var, rhs);
946 }
947 else
948 unknown_object_size (osi, var);
949 break;
950 }
951
952 case GIMPLE_CALL:
953 {
954 tree arg = pass_through_call (stmt);
955 if (arg)
956 {
957 if (TREE_CODE (arg) == SSA_NAME
958 && POINTER_TYPE_P (TREE_TYPE (arg)))
959 reexamine = merge_object_sizes (osi, var, arg, 0);
960 else
961 expr_object_size (osi, var, arg);
962 }
963 else
964 call_object_size (osi, var, stmt);
965 break;
966 }
967
968 case GIMPLE_ASM:
969 /* Pointers defined by __asm__ statements can point anywhere. */
970 object_sizes[object_size_type][varno] = unknown[object_size_type];
971 break;
972
973 case GIMPLE_NOP:
974 if (SSA_NAME_VAR (var)
975 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
976 expr_object_size (osi, var, SSA_NAME_VAR (var));
977 else
978 /* Uninitialized SSA names point nowhere. */
979 object_sizes[object_size_type][varno] = unknown[object_size_type];
980 break;
981
982 case GIMPLE_PHI:
983 {
984 unsigned i;
985
986 for (i = 0; i < gimple_phi_num_args (stmt); i++)
987 {
988 tree rhs = gimple_phi_arg (stmt, i)->def;
989
990 if (object_sizes[object_size_type][varno]
991 == unknown[object_size_type])
992 break;
993
994 if (TREE_CODE (rhs) == SSA_NAME)
995 reexamine |= merge_object_sizes (osi, var, rhs, 0);
996 else if (osi->pass == 0)
997 expr_object_size (osi, var, rhs);
998 }
999 break;
1000 }
1001
1002 default:
1003 gcc_unreachable ();
1004 }
1005
1006 if (! reexamine
1007 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1008 {
1009 bitmap_set_bit (computed[object_size_type], varno);
1010 bitmap_clear_bit (osi->reexamine, varno);
1011 }
1012 else
1013 {
1014 bitmap_set_bit (osi->reexamine, varno);
1015 if (dump_file && (dump_flags & TDF_DETAILS))
1016 {
1017 fprintf (dump_file, "Need to reexamine ");
1018 print_generic_expr (dump_file, var, dump_flags);
1019 fprintf (dump_file, "\n");
1020 }
1021 }
1022 }
1023
1024
1025 /* Helper function for check_for_plus_in_loops. Called recursively
1026 to detect loops. */
1027
1028 static void
1029 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1030 unsigned int depth)
1031 {
1032 gimple stmt = SSA_NAME_DEF_STMT (var);
1033 unsigned int varno = SSA_NAME_VERSION (var);
1034
1035 if (osi->depths[varno])
1036 {
1037 if (osi->depths[varno] != depth)
1038 {
1039 unsigned int *sp;
1040
1041 /* Found a loop involving pointer addition. */
1042 for (sp = osi->tos; sp > osi->stack; )
1043 {
1044 --sp;
1045 bitmap_clear_bit (osi->reexamine, *sp);
1046 bitmap_set_bit (computed[osi->object_size_type], *sp);
1047 object_sizes[osi->object_size_type][*sp] = 0;
1048 if (*sp == varno)
1049 break;
1050 }
1051 }
1052 return;
1053 }
1054 else if (! bitmap_bit_p (osi->reexamine, varno))
1055 return;
1056
1057 osi->depths[varno] = depth;
1058 *osi->tos++ = varno;
1059
1060 switch (gimple_code (stmt))
1061 {
1062
1063 case GIMPLE_ASSIGN:
1064 {
1065 if ((gimple_assign_single_p (stmt)
1066 || gimple_assign_unary_nop_p (stmt))
1067 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1068 {
1069 tree rhs = gimple_assign_rhs1 (stmt);
1070
1071 check_for_plus_in_loops_1 (osi, rhs, depth);
1072 }
1073 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1074 {
1075 tree basevar = gimple_assign_rhs1 (stmt);
1076 tree cst = gimple_assign_rhs2 (stmt);
1077
1078 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1079
1080 check_for_plus_in_loops_1 (osi, basevar,
1081 depth + !integer_zerop (cst));
1082 }
1083 else
1084 gcc_unreachable ();
1085 break;
1086 }
1087
1088 case GIMPLE_CALL:
1089 {
1090 tree arg = pass_through_call (stmt);
1091 if (arg)
1092 {
1093 if (TREE_CODE (arg) == SSA_NAME)
1094 check_for_plus_in_loops_1 (osi, arg, depth);
1095 else
1096 gcc_unreachable ();
1097 }
1098 break;
1099 }
1100
1101 case GIMPLE_PHI:
1102 {
1103 unsigned i;
1104
1105 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1106 {
1107 tree rhs = gimple_phi_arg (stmt, i)->def;
1108
1109 if (TREE_CODE (rhs) == SSA_NAME)
1110 check_for_plus_in_loops_1 (osi, rhs, depth);
1111 }
1112 break;
1113 }
1114
1115 default:
1116 gcc_unreachable ();
1117 }
1118
1119 osi->depths[varno] = 0;
1120 osi->tos--;
1121 }
1122
1123
1124 /* Check if some pointer we are computing object size of is being increased
1125 within a loop. If yes, assume all the SSA variables participating in
1126 that loop have minimum object sizes 0. */
1127
1128 static void
1129 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1130 {
1131 gimple stmt = SSA_NAME_DEF_STMT (var);
1132
1133 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1134 and looked for a POINTER_PLUS_EXPR in the pass-through
1135 argument, if any. In GIMPLE, however, such an expression
1136 is not a valid call operand. */
1137
1138 if (is_gimple_assign (stmt)
1139 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1140 {
1141 tree basevar = gimple_assign_rhs1 (stmt);
1142 tree cst = gimple_assign_rhs2 (stmt);
1143
1144 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1145
1146 if (integer_zerop (cst))
1147 return;
1148
1149 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1150 *osi->tos++ = SSA_NAME_VERSION (basevar);
1151 check_for_plus_in_loops_1 (osi, var, 2);
1152 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1153 osi->tos--;
1154 }
1155 }
1156
1157
1158 /* Initialize data structures for the object size computation. */
1159
1160 void
1161 init_object_sizes (void)
1162 {
1163 int object_size_type;
1164
1165 if (object_sizes[0])
1166 return;
1167
1168 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1169 {
1170 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1171 computed[object_size_type] = BITMAP_ALLOC (NULL);
1172 }
1173
1174 init_offset_limit ();
1175 }
1176
1177
1178 /* Destroy data structures after the object size computation. */
1179
1180 void
1181 fini_object_sizes (void)
1182 {
1183 int object_size_type;
1184
1185 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1186 {
1187 free (object_sizes[object_size_type]);
1188 BITMAP_FREE (computed[object_size_type]);
1189 object_sizes[object_size_type] = NULL;
1190 }
1191 }
1192
1193
1194 /* Simple pass to optimize all __builtin_object_size () builtins. */
1195
1196 static unsigned int
1197 compute_object_sizes (void)
1198 {
1199 basic_block bb;
1200 FOR_EACH_BB (bb)
1201 {
1202 gimple_stmt_iterator i;
1203 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1204 {
1205 tree callee, result;
1206 gimple call = gsi_stmt (i);
1207
1208 if (gimple_code (call) != GIMPLE_CALL)
1209 continue;
1210
1211 callee = gimple_call_fndecl (call);
1212 if (!callee
1213 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1214 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1215 continue;
1216
1217 init_object_sizes ();
1218 result = fold_call_stmt (call, false);
1219 if (!result)
1220 {
1221 if (gimple_call_num_args (call) == 2
1222 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1223 {
1224 tree ost = gimple_call_arg (call, 1);
1225
1226 if (tree_fits_uhwi_p (ost))
1227 {
1228 unsigned HOST_WIDE_INT object_size_type
1229 = tree_to_uhwi (ost);
1230
1231 if (object_size_type < 2)
1232 result = fold_convert (size_type_node,
1233 integer_minus_one_node);
1234 else if (object_size_type < 4)
1235 result = build_zero_cst (size_type_node);
1236 }
1237 }
1238
1239 if (!result)
1240 continue;
1241 }
1242
1243 if (dump_file && (dump_flags & TDF_DETAILS))
1244 {
1245 fprintf (dump_file, "Simplified\n ");
1246 print_gimple_stmt (dump_file, call, 0, dump_flags);
1247 }
1248
1249 if (!update_call_from_tree (&i, result))
1250 gcc_unreachable ();
1251
1252 if (dump_file && (dump_flags & TDF_DETAILS))
1253 {
1254 fprintf (dump_file, "to\n ");
1255 print_gimple_stmt (dump_file, gsi_stmt (i), 0, dump_flags);
1256 fprintf (dump_file, "\n");
1257 }
1258 }
1259 }
1260
1261 fini_object_sizes ();
1262 return 0;
1263 }
1264
1265 namespace {
1266
1267 const pass_data pass_data_object_sizes =
1268 {
1269 GIMPLE_PASS, /* type */
1270 "objsz", /* name */
1271 OPTGROUP_NONE, /* optinfo_flags */
1272 false, /* has_gate */
1273 true, /* has_execute */
1274 TV_NONE, /* tv_id */
1275 ( PROP_cfg | PROP_ssa ), /* properties_required */
1276 0, /* properties_provided */
1277 0, /* properties_destroyed */
1278 0, /* todo_flags_start */
1279 TODO_verify_ssa, /* todo_flags_finish */
1280 };
1281
1282 class pass_object_sizes : public gimple_opt_pass
1283 {
1284 public:
1285 pass_object_sizes(gcc::context *ctxt)
1286 : gimple_opt_pass(pass_data_object_sizes, ctxt)
1287 {}
1288
1289 /* opt_pass methods: */
1290 opt_pass * clone () { return new pass_object_sizes (ctxt_); }
1291 unsigned int execute () { return compute_object_sizes (); }
1292
1293 }; // class pass_object_sizes
1294
1295 } // anon namespace
1296
1297 gimple_opt_pass *
1298 make_pass_object_sizes (gcc::context *ctxt)
1299 {
1300 return new pass_object_sizes (ctxt);
1301 }