]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-object-size.c
* config/i386/i386.md (@cmp<mode>_1): Rename from cmp<mode>_1.
[thirdparty/gcc.git] / gcc / tree-object-size.c
CommitLineData
0a39fd54 1/* __builtin_object_size (ptr, object_size_type) computation
fbd26352 2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
0a39fd54 3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
8c4c00c1 9the Free Software Foundation; either version 3, or (at your option)
0a39fd54 10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
0a39fd54 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
9ef16211 24#include "backend.h"
0a39fd54 25#include "tree.h"
9ef16211 26#include "gimple.h"
7c29e30e 27#include "tree-pass.h"
9ef16211 28#include "ssa.h"
7c29e30e 29#include "gimple-pretty-print.h"
b20a8bb4 30#include "fold-const.h"
9ed99284 31#include "tree-object-size.h"
bc61cadb 32#include "gimple-fold.h"
dcf1a1ec 33#include "gimple-iterator.h"
1603b4dc 34#include "tree-cfg.h"
30a86690 35#include "stringpool.h"
36#include "attribs.h"
0a39fd54 37
38struct object_size_info
39{
40 int object_size_type;
487798e2 41 unsigned char pass;
0a39fd54 42 bool changed;
487798e2 43 bitmap visited, reexamine;
0a39fd54 44 unsigned int *depths;
45 unsigned int *stack, *tos;
46};
47
fb089e65 48static const unsigned HOST_WIDE_INT unknown[4] = {
49 HOST_WIDE_INT_M1U,
50 HOST_WIDE_INT_M1U,
51 0,
52 0
53};
0a39fd54 54
b4b34335 55static tree compute_object_offset (const_tree, const_tree);
4e91a07b 56static bool addr_object_size (struct object_size_info *,
57 const_tree, int, unsigned HOST_WIDE_INT *);
1a91d914 58static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
59static tree pass_through_call (const gcall *);
0a39fd54 60static void collect_object_sizes_for (struct object_size_info *, tree);
61static void expr_object_size (struct object_size_info *, tree, tree);
62static bool merge_object_sizes (struct object_size_info *, tree, tree,
63 unsigned HOST_WIDE_INT);
24500bba 64static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
65static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
0a39fd54 66static void init_offset_limit (void);
67static void check_for_plus_in_loops (struct object_size_info *, tree);
68static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
69 unsigned int);
70
71/* object_sizes[0] is upper bound for number of bytes till the end of
72 the object.
73 object_sizes[1] is upper bound for number of bytes till the end of
74 the subobject (innermost array or field with address taken).
75 object_sizes[2] is lower bound for number of bytes till the end of
76 the object and object_sizes[3] lower bound for subobject. */
194eb161 77static vec<unsigned HOST_WIDE_INT> object_sizes[4];
0a39fd54 78
79/* Bitmaps what object sizes have been computed already. */
80static bitmap computed[4];
81
82/* Maximum value of offset we consider to be addition. */
83static unsigned HOST_WIDE_INT offset_limit;
84
85
86/* Initialize OFFSET_LIMIT variable. */
87static void
88init_offset_limit (void)
89{
e913b5cd 90 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
91 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
0a39fd54 92 else
93 offset_limit = -1;
94 offset_limit /= 2;
95}
96
97
98/* Compute offset of EXPR within VAR. Return error_mark_node
99 if unknown. */
100
101static tree
b4b34335 102compute_object_offset (const_tree expr, const_tree var)
0a39fd54 103{
104 enum tree_code code = PLUS_EXPR;
105 tree base, off, t;
106
107 if (expr == var)
108 return size_zero_node;
109
110 switch (TREE_CODE (expr))
111 {
112 case COMPONENT_REF:
113 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
114 if (base == error_mark_node)
115 return base;
116
117 t = TREE_OPERAND (expr, 1);
118 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
e913b5cd 119 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
0a39fd54 120 / BITS_PER_UNIT));
121 break;
122
123 case REALPART_EXPR:
72dd6141 124 CASE_CONVERT:
0a39fd54 125 case VIEW_CONVERT_EXPR:
126 case NON_LVALUE_EXPR:
127 return compute_object_offset (TREE_OPERAND (expr, 0), var);
128
129 case IMAGPART_EXPR:
130 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131 if (base == error_mark_node)
132 return base;
133
134 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
135 break;
136
137 case ARRAY_REF:
138 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
139 if (base == error_mark_node)
140 return base;
141
142 t = TREE_OPERAND (expr, 1);
30a04590 143 tree low_bound, unit_size;
144 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
145 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
146 if (! integer_zerop (low_bound))
147 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
0a39fd54 148 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
149 {
150 code = MINUS_EXPR;
151 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
152 }
535664e3 153 t = fold_convert (sizetype, t);
30a04590 154 off = size_binop (MULT_EXPR, unit_size, t);
0a39fd54 155 break;
156
182cf5a9 157 case MEM_REF:
158 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
e913b5cd 159 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
182cf5a9 160
0a39fd54 161 default:
162 return error_mark_node;
163 }
164
165 return size_binop (code, base, off);
166}
167
168
169/* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
170 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
171 If unknown, return unknown[object_size_type]. */
172
4e91a07b 173static bool
fbc1ccbc 174addr_object_size (struct object_size_info *osi, const_tree ptr,
4e91a07b 175 int object_size_type, unsigned HOST_WIDE_INT *psize)
0a39fd54 176{
fbc1ccbc 177 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
0a39fd54 178
179 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
180
4e91a07b 181 /* Set to unknown and overwrite just before returning if the size
182 could be determined. */
183 *psize = unknown[object_size_type];
184
0a39fd54 185 pt_var = TREE_OPERAND (ptr, 0);
97a7a862 186 while (handled_component_p (pt_var))
187 pt_var = TREE_OPERAND (pt_var, 0);
0a39fd54 188
189 if (pt_var
97a7a862 190 && TREE_CODE (pt_var) == MEM_REF)
0a39fd54 191 {
fbc1ccbc 192 unsigned HOST_WIDE_INT sz;
0a39fd54 193
97a7a862 194 if (!osi || (object_size_type & 1) != 0
cd45d8fd 195 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
182cf5a9 196 {
4e91a07b 197 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
198 object_size_type & ~1, &sz);
182cf5a9 199 }
fbc1ccbc 200 else
0a39fd54 201 {
fbc1ccbc 202 tree var = TREE_OPERAND (pt_var, 0);
203 if (osi->pass == 0)
204 collect_object_sizes_for (osi, var);
205 if (bitmap_bit_p (computed[object_size_type],
206 SSA_NAME_VERSION (var)))
207 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
208 else
209 sz = unknown[object_size_type];
97a7a862 210 }
211 if (sz != unknown[object_size_type])
212 {
90ca1268 213 offset_int mem_offset;
214 if (mem_ref_offset (pt_var).is_constant (&mem_offset))
215 {
216 offset_int dsz = wi::sub (sz, mem_offset);
217 if (wi::neg_p (dsz))
218 sz = 0;
219 else if (wi::fits_uhwi_p (dsz))
220 sz = dsz.to_uhwi ();
221 else
222 sz = unknown[object_size_type];
223 }
182cf5a9 224 else
97a7a862 225 sz = unknown[object_size_type];
fbc1ccbc 226 }
227
228 if (sz != unknown[object_size_type] && sz < offset_limit)
229 pt_var_size = size_int (sz);
230 }
dda02ea2 231 else if (pt_var
232 && DECL_P (pt_var)
e913b5cd 233 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
aa59f000 234 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
dda02ea2 235 pt_var_size = DECL_SIZE_UNIT (pt_var);
fbc1ccbc 236 else if (pt_var
97a7a862 237 && TREE_CODE (pt_var) == STRING_CST
fbc1ccbc 238 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
e913b5cd 239 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
aa59f000 240 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
fbc1ccbc 241 < offset_limit)
242 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
243 else
4e91a07b 244 return false;
fbc1ccbc 245
246 if (pt_var != TREE_OPERAND (ptr, 0))
247 {
248 tree var;
0a39fd54 249
fbc1ccbc 250 if (object_size_type & 1)
251 {
252 var = TREE_OPERAND (ptr, 0);
253
254 while (var != pt_var
255 && TREE_CODE (var) != BIT_FIELD_REF
256 && TREE_CODE (var) != COMPONENT_REF
257 && TREE_CODE (var) != ARRAY_REF
258 && TREE_CODE (var) != ARRAY_RANGE_REF
259 && TREE_CODE (var) != REALPART_EXPR
260 && TREE_CODE (var) != IMAGPART_EXPR)
261 var = TREE_OPERAND (var, 0);
262 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
ffbeca59 263 var = TREE_OPERAND (var, 0);
fbc1ccbc 264 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
e913b5cd 265 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
fbc1ccbc 266 || (pt_var_size
267 && tree_int_cst_lt (pt_var_size,
268 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
269 var = pt_var;
182cf5a9 270 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
0a39fd54 271 {
fbc1ccbc 272 tree v = var;
273 /* For &X->fld, compute object size only if fld isn't the last
274 field, as struct { int i; char c[1]; } is often used instead
275 of flexible array member. */
276 while (v && v != pt_var)
277 switch (TREE_CODE (v))
278 {
279 case ARRAY_REF:
280 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
281 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
282 {
283 tree domain
284 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
285 if (domain
286 && TYPE_MAX_VALUE (domain)
287 && TREE_CODE (TYPE_MAX_VALUE (domain))
288 == INTEGER_CST
289 && tree_int_cst_lt (TREE_OPERAND (v, 1),
290 TYPE_MAX_VALUE (domain)))
291 {
292 v = NULL_TREE;
293 break;
294 }
295 }
296 v = TREE_OPERAND (v, 0);
297 break;
298 case REALPART_EXPR:
299 case IMAGPART_EXPR:
300 v = NULL_TREE;
301 break;
302 case COMPONENT_REF:
5dc67ab1 303 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
304 {
305 v = NULL_TREE;
306 break;
307 }
ffbeca59 308 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
309 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
310 != UNION_TYPE
311 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
312 != QUAL_UNION_TYPE)
313 break;
314 else
315 v = TREE_OPERAND (v, 0);
316 if (TREE_CODE (v) == COMPONENT_REF
317 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
318 == RECORD_TYPE)
fbc1ccbc 319 {
1767a056 320 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
321 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
5dc67ab1 322 if (TREE_CODE (fld_chain) == FIELD_DECL)
323 break;
324
325 if (fld_chain)
326 {
327 v = NULL_TREE;
fbc1ccbc 328 break;
5dc67ab1 329 }
ffbeca59 330 v = TREE_OPERAND (v, 0);
fbc1ccbc 331 }
ffbeca59 332 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
333 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
334 != UNION_TYPE
335 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
336 != QUAL_UNION_TYPE)
5dc67ab1 337 break;
338 else
339 v = TREE_OPERAND (v, 0);
ffbeca59 340 if (v != pt_var)
5dc67ab1 341 v = NULL_TREE;
342 else
343 v = pt_var;
fbc1ccbc 344 break;
345 default:
346 v = pt_var;
347 break;
348 }
349 if (v == pt_var)
0a39fd54 350 var = pt_var;
351 }
fbc1ccbc 352 }
353 else
354 var = pt_var;
0a39fd54 355
fbc1ccbc 356 if (var != pt_var)
357 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
358 else if (!pt_var_size)
4e91a07b 359 return false;
fbc1ccbc 360 else
361 var_size = pt_var_size;
362 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
363 if (bytes != error_mark_node)
364 {
365 if (TREE_CODE (bytes) == INTEGER_CST
366 && tree_int_cst_lt (var_size, bytes))
367 bytes = size_zero_node;
368 else
369 bytes = size_binop (MINUS_EXPR, var_size, bytes);
370 }
371 if (var != pt_var
372 && pt_var_size
182cf5a9 373 && TREE_CODE (pt_var) == MEM_REF
fbc1ccbc 374 && bytes != error_mark_node)
375 {
376 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
377 if (bytes2 != error_mark_node)
0a39fd54 378 {
fbc1ccbc 379 if (TREE_CODE (bytes2) == INTEGER_CST
380 && tree_int_cst_lt (pt_var_size, bytes2))
381 bytes2 = size_zero_node;
0a39fd54 382 else
56b5c963 383 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
fbc1ccbc 384 bytes = size_binop (MIN_EXPR, bytes, bytes2);
0a39fd54 385 }
386 }
0a39fd54 387 }
fbc1ccbc 388 else if (!pt_var_size)
4e91a07b 389 return false;
fbc1ccbc 390 else
391 bytes = pt_var_size;
392
e913b5cd 393 if (tree_fits_uhwi_p (bytes))
4e91a07b 394 {
395 *psize = tree_to_uhwi (bytes);
396 return true;
397 }
0a39fd54 398
4e91a07b 399 return false;
0a39fd54 400}
401
402
75a70cf9 403/* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
0f15b7f6 404 Handles calls to functions declared with attribute alloc_size.
405 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
406 If unknown, return unknown[object_size_type]. */
0a39fd54 407
408static unsigned HOST_WIDE_INT
1a91d914 409alloc_object_size (const gcall *call, int object_size_type)
0a39fd54 410{
75a70cf9 411 gcc_assert (is_gimple_call (call));
0a39fd54 412
0f15b7f6 413 tree calltype;
414 if (tree callfn = gimple_call_fndecl (call))
415 calltype = TREE_TYPE (callfn);
416 else
417 calltype = gimple_call_fntype (call);
418
419 if (!calltype)
4a29c97c 420 return unknown[object_size_type];
421
0f15b7f6 422 /* Set to positions of alloc_size arguments. */
423 int arg1 = -1, arg2 = -1;
424 tree alloc_size = lookup_attribute ("alloc_size",
425 TYPE_ATTRIBUTES (calltype));
4a29c97c 426 if (alloc_size && TREE_VALUE (alloc_size))
427 {
428 tree p = TREE_VALUE (alloc_size);
429
f9ae6f95 430 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
4a29c97c 431 if (TREE_CHAIN (p))
f9ae6f95 432 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
4a29c97c 433 }
48e1416a 434
75a70cf9 435 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
436 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
48e1416a 437 || (arg2 >= 0
75a70cf9 438 && (arg2 >= (int)gimple_call_num_args (call)
439 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
48e1416a 440 return unknown[object_size_type];
4a29c97c 441
0f15b7f6 442 tree bytes = NULL_TREE;
4a29c97c 443 if (arg2 >= 0)
444 bytes = size_binop (MULT_EXPR,
75a70cf9 445 fold_convert (sizetype, gimple_call_arg (call, arg1)),
446 fold_convert (sizetype, gimple_call_arg (call, arg2)));
4a29c97c 447 else if (arg1 >= 0)
75a70cf9 448 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
4a29c97c 449
e913b5cd 450 if (bytes && tree_fits_uhwi_p (bytes))
451 return tree_to_uhwi (bytes);
0a39fd54 452
453 return unknown[object_size_type];
454}
455
456
457/* If object size is propagated from one of function's arguments directly
75a70cf9 458 to its return value, return that argument for GIMPLE_CALL statement CALL.
0a39fd54 459 Otherwise return NULL. */
460
461static tree
1a91d914 462pass_through_call (const gcall *call)
0a39fd54 463{
4ef5e689 464 unsigned rf = gimple_call_return_flags (call);
465 if (rf & ERF_RETURNS_ARG)
466 {
467 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
468 if (argnum < gimple_call_num_args (call))
469 return gimple_call_arg (call, argnum);
470 }
0a39fd54 471
4ef5e689 472 /* __builtin_assume_aligned is intentionally not marked RET1. */
473 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
474 return gimple_call_arg (call, 0);
0a39fd54 475
476 return NULL_TREE;
477}
478
479
4e91a07b 480/* Compute __builtin_object_size value for PTR and set *PSIZE to
481 the resulting value. OBJECT_SIZE_TYPE is the second argument
482 to __builtin_object_size. Return true on success and false
483 when the object size could not be determined. */
0a39fd54 484
4e91a07b 485bool
486compute_builtin_object_size (tree ptr, int object_size_type,
487 unsigned HOST_WIDE_INT *psize)
0a39fd54 488{
489 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
490
4e91a07b 491 /* Set to unknown and overwrite just before returning if the size
492 could be determined. */
493 *psize = unknown[object_size_type];
494
0a39fd54 495 if (! offset_limit)
496 init_offset_limit ();
497
498 if (TREE_CODE (ptr) == ADDR_EXPR)
4e91a07b 499 return addr_object_size (NULL, ptr, object_size_type, psize);
500
501 if (TREE_CODE (ptr) != SSA_NAME
502 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
503 return false;
0a39fd54 504
4e91a07b 505 if (computed[object_size_type] == NULL)
0a39fd54 506 {
4e91a07b 507 if (optimize || object_size_type & 1)
508 return false;
509
510 /* When not optimizing, rather than failing, make a small effort
511 to determine the object size without the full benefit of
512 the (costly) computation below. */
513 gimple *def = SSA_NAME_DEF_STMT (ptr);
514 if (gimple_code (def) == GIMPLE_ASSIGN)
0a39fd54 515 {
4e91a07b 516 tree_code code = gimple_assign_rhs_code (def);
517 if (code == POINTER_PLUS_EXPR)
0a39fd54 518 {
4e91a07b 519 tree offset = gimple_assign_rhs2 (def);
520 ptr = gimple_assign_rhs1 (def);
0a39fd54 521
17dba507 522 if (tree_fits_shwi_p (offset)
4e91a07b 523 && compute_builtin_object_size (ptr, object_size_type, psize))
0a39fd54 524 {
4e91a07b 525 /* Return zero when the offset is out of bounds. */
526 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
527 *psize = off < *psize ? *psize - off : 0;
528 return true;
0a39fd54 529 }
4e91a07b 530 }
531 }
532 return false;
533 }
0a39fd54 534
4e91a07b 535 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
536 {
537 struct object_size_info osi;
538 bitmap_iterator bi;
539 unsigned int i;
540
541 if (num_ssa_names > object_sizes[object_size_type].length ())
542 object_sizes[object_size_type].safe_grow (num_ssa_names);
543 if (dump_file)
544 {
545 fprintf (dump_file, "Computing %s %sobject size for ",
546 (object_size_type & 2) ? "minimum" : "maximum",
547 (object_size_type & 1) ? "sub" : "");
548 print_generic_expr (dump_file, ptr, dump_flags);
549 fprintf (dump_file, ":\n");
550 }
0a39fd54 551
4e91a07b 552 osi.visited = BITMAP_ALLOC (NULL);
553 osi.reexamine = BITMAP_ALLOC (NULL);
554 osi.object_size_type = object_size_type;
555 osi.depths = NULL;
556 osi.stack = NULL;
557 osi.tos = NULL;
558
559 /* First pass: walk UD chains, compute object sizes that
560 can be computed. osi.reexamine bitmap at the end will
561 contain what variables were found in dependency cycles
562 and therefore need to be reexamined. */
563 osi.pass = 0;
564 osi.changed = false;
565 collect_object_sizes_for (&osi, ptr);
566
567 /* Second pass: keep recomputing object sizes of variables
568 that need reexamination, until no object sizes are
569 increased or all object sizes are computed. */
570 if (! bitmap_empty_p (osi.reexamine))
571 {
572 bitmap reexamine = BITMAP_ALLOC (NULL);
573
574 /* If looking for minimum instead of maximum object size,
575 detect cases where a pointer is increased in a loop.
576 Although even without this detection pass 2 would eventually
577 terminate, it could take a long time. If a pointer is
578 increasing this way, we need to assume 0 object size.
579 E.g. p = &buf[0]; while (cond) p = p + 4; */
580 if (object_size_type & 2)
581 {
582 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
583 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
584 osi.tos = osi.stack;
585 osi.pass = 1;
586 /* collect_object_sizes_for is changing
587 osi.reexamine bitmap, so iterate over a copy. */
588 bitmap_copy (reexamine, osi.reexamine);
589 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
590 if (bitmap_bit_p (osi.reexamine, i))
591 check_for_plus_in_loops (&osi, ssa_name (i));
592
593 free (osi.depths);
594 osi.depths = NULL;
595 free (osi.stack);
596 osi.stack = NULL;
597 osi.tos = NULL;
0a39fd54 598 }
0a39fd54 599
4e91a07b 600 do
0a39fd54 601 {
4e91a07b 602 osi.pass = 2;
603 osi.changed = false;
604 /* collect_object_sizes_for is changing
605 osi.reexamine bitmap, so iterate over a copy. */
606 bitmap_copy (reexamine, osi.reexamine);
607 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
608 if (bitmap_bit_p (osi.reexamine, i))
0a39fd54 609 {
4e91a07b 610 collect_object_sizes_for (&osi, ssa_name (i));
611 if (dump_file && (dump_flags & TDF_DETAILS))
612 {
613 fprintf (dump_file, "Reexamining ");
614 print_generic_expr (dump_file, ssa_name (i),
615 dump_flags);
616 fprintf (dump_file, "\n");
617 }
0a39fd54 618 }
619 }
4e91a07b 620 while (osi.changed);
0a39fd54 621
4e91a07b 622 BITMAP_FREE (reexamine);
0a39fd54 623 }
4e91a07b 624 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
625 bitmap_set_bit (computed[object_size_type], i);
0a39fd54 626
4e91a07b 627 /* Debugging dumps. */
628 if (dump_file)
629 {
630 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
631 if (object_sizes[object_size_type][i]
632 != unknown[object_size_type])
633 {
634 print_generic_expr (dump_file, ssa_name (i),
635 dump_flags);
636 fprintf (dump_file,
637 ": %s %sobject size "
638 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
639 (object_size_type & 2) ? "minimum" : "maximum",
640 (object_size_type & 1) ? "sub" : "",
641 object_sizes[object_size_type][i]);
642 }
643 }
644
645 BITMAP_FREE (osi.reexamine);
646 BITMAP_FREE (osi.visited);
0a39fd54 647 }
648
4e91a07b 649 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
650 return *psize != unknown[object_size_type];
0a39fd54 651}
652
75a70cf9 653/* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
0a39fd54 654
655static void
656expr_object_size (struct object_size_info *osi, tree ptr, tree value)
657{
658 int object_size_type = osi->object_size_type;
659 unsigned int varno = SSA_NAME_VERSION (ptr);
660 unsigned HOST_WIDE_INT bytes;
661
662 gcc_assert (object_sizes[object_size_type][varno]
663 != unknown[object_size_type]);
664 gcc_assert (osi->pass == 0);
665
666 if (TREE_CODE (value) == WITH_SIZE_EXPR)
667 value = TREE_OPERAND (value, 0);
668
669 /* Pointer variables should have been handled by merge_object_sizes. */
670 gcc_assert (TREE_CODE (value) != SSA_NAME
671 || !POINTER_TYPE_P (TREE_TYPE (value)));
672
673 if (TREE_CODE (value) == ADDR_EXPR)
4e91a07b 674 addr_object_size (osi, value, object_size_type, &bytes);
0a39fd54 675 else
676 bytes = unknown[object_size_type];
677
678 if ((object_size_type & 2) == 0)
679 {
680 if (object_sizes[object_size_type][varno] < bytes)
681 object_sizes[object_size_type][varno] = bytes;
682 }
683 else
684 {
685 if (object_sizes[object_size_type][varno] > bytes)
686 object_sizes[object_size_type][varno] = bytes;
687 }
688}
689
690
75a70cf9 691/* Compute object_sizes for PTR, defined to the result of a call. */
692
693static void
1a91d914 694call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
75a70cf9 695{
696 int object_size_type = osi->object_size_type;
697 unsigned int varno = SSA_NAME_VERSION (ptr);
698 unsigned HOST_WIDE_INT bytes;
699
700 gcc_assert (is_gimple_call (call));
701
702 gcc_assert (object_sizes[object_size_type][varno]
703 != unknown[object_size_type]);
704 gcc_assert (osi->pass == 0);
705
706 bytes = alloc_object_size (call, object_size_type);
707
708 if ((object_size_type & 2) == 0)
709 {
710 if (object_sizes[object_size_type][varno] < bytes)
711 object_sizes[object_size_type][varno] = bytes;
712 }
713 else
714 {
715 if (object_sizes[object_size_type][varno] > bytes)
716 object_sizes[object_size_type][varno] = bytes;
717 }
718}
719
720
721/* Compute object_sizes for PTR, defined to an unknown value. */
722
723static void
724unknown_object_size (struct object_size_info *osi, tree ptr)
725{
726 int object_size_type = osi->object_size_type;
727 unsigned int varno = SSA_NAME_VERSION (ptr);
728 unsigned HOST_WIDE_INT bytes;
729
730 gcc_assert (object_sizes[object_size_type][varno]
731 != unknown[object_size_type]);
732 gcc_assert (osi->pass == 0);
733
734 bytes = unknown[object_size_type];
735
736 if ((object_size_type & 2) == 0)
737 {
738 if (object_sizes[object_size_type][varno] < bytes)
739 object_sizes[object_size_type][varno] = bytes;
740 }
741 else
742 {
743 if (object_sizes[object_size_type][varno] > bytes)
744 object_sizes[object_size_type][varno] = bytes;
745 }
746}
747
748
0a39fd54 749/* Merge object sizes of ORIG + OFFSET into DEST. Return true if
750 the object size might need reexamination later. */
751
752static bool
753merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
754 unsigned HOST_WIDE_INT offset)
755{
756 int object_size_type = osi->object_size_type;
757 unsigned int varno = SSA_NAME_VERSION (dest);
758 unsigned HOST_WIDE_INT orig_bytes;
759
760 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
761 return false;
762 if (offset >= offset_limit)
763 {
764 object_sizes[object_size_type][varno] = unknown[object_size_type];
765 return false;
766 }
767
768 if (osi->pass == 0)
769 collect_object_sizes_for (osi, orig);
770
771 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
772 if (orig_bytes != unknown[object_size_type])
773 orig_bytes = (offset > orig_bytes)
457daa4e 774 ? HOST_WIDE_INT_0U : orig_bytes - offset;
0a39fd54 775
776 if ((object_size_type & 2) == 0)
777 {
778 if (object_sizes[object_size_type][varno] < orig_bytes)
779 {
780 object_sizes[object_size_type][varno] = orig_bytes;
781 osi->changed = true;
782 }
783 }
784 else
785 {
786 if (object_sizes[object_size_type][varno] > orig_bytes)
787 {
788 object_sizes[object_size_type][varno] = orig_bytes;
789 osi->changed = true;
790 }
791 }
792 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
793}
794
795
75a70cf9 796/* Compute object_sizes for VAR, defined to the result of an assignment
797 with operator POINTER_PLUS_EXPR. Return true if the object size might
798 need reexamination later. */
0a39fd54 799
800static bool
42acab1c 801plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
0a39fd54 802{
0a39fd54 803 int object_size_type = osi->object_size_type;
804 unsigned int varno = SSA_NAME_VERSION (var);
805 unsigned HOST_WIDE_INT bytes;
75a70cf9 806 tree op0, op1;
807
182cf5a9 808 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
809 {
810 op0 = gimple_assign_rhs1 (stmt);
811 op1 = gimple_assign_rhs2 (stmt);
812 }
813 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
814 {
815 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
816 gcc_assert (TREE_CODE (rhs) == MEM_REF);
817 op0 = TREE_OPERAND (rhs, 0);
818 op1 = TREE_OPERAND (rhs, 1);
819 }
820 else
821 gcc_unreachable ();
0a39fd54 822
823 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
824 return false;
825
0a39fd54 826 /* Handle PTR + OFFSET here. */
0de36bdb 827 if (TREE_CODE (op1) == INTEGER_CST
0a39fd54 828 && (TREE_CODE (op0) == SSA_NAME
829 || TREE_CODE (op0) == ADDR_EXPR))
830 {
e913b5cd 831 if (! tree_fits_uhwi_p (op1))
0a39fd54 832 bytes = unknown[object_size_type];
833 else if (TREE_CODE (op0) == SSA_NAME)
e913b5cd 834 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
0a39fd54 835 else
836 {
e913b5cd 837 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
0a39fd54 838
75a70cf9 839 /* op0 will be ADDR_EXPR here. */
4e91a07b 840 addr_object_size (osi, op0, object_size_type, &bytes);
06f9fe3e 841 if (bytes == unknown[object_size_type])
842 ;
843 else if (off > offset_limit)
0a39fd54 844 bytes = unknown[object_size_type];
845 else if (off > bytes)
846 bytes = 0;
847 else
848 bytes -= off;
849 }
850 }
851 else
852 bytes = unknown[object_size_type];
853
854 if ((object_size_type & 2) == 0)
855 {
856 if (object_sizes[object_size_type][varno] < bytes)
857 object_sizes[object_size_type][varno] = bytes;
858 }
859 else
860 {
861 if (object_sizes[object_size_type][varno] > bytes)
862 object_sizes[object_size_type][varno] = bytes;
863 }
864 return false;
865}
866
867
8a2caf10 868/* Compute object_sizes for VAR, defined at STMT, which is
ec0fa513 869 a COND_EXPR. Return true if the object size might need reexamination
870 later. */
871
872static bool
42acab1c 873cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
ec0fa513 874{
875 tree then_, else_;
876 int object_size_type = osi->object_size_type;
877 unsigned int varno = SSA_NAME_VERSION (var);
878 bool reexamine = false;
879
8a2caf10 880 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
ec0fa513 881
882 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
883 return false;
884
8a2caf10 885 then_ = gimple_assign_rhs2 (stmt);
886 else_ = gimple_assign_rhs3 (stmt);
ec0fa513 887
888 if (TREE_CODE (then_) == SSA_NAME)
889 reexamine |= merge_object_sizes (osi, var, then_, 0);
890 else
891 expr_object_size (osi, var, then_);
892
893 if (TREE_CODE (else_) == SSA_NAME)
894 reexamine |= merge_object_sizes (osi, var, else_, 0);
895 else
896 expr_object_size (osi, var, else_);
897
898 return reexamine;
899}
900
0a39fd54 901/* Compute object sizes for VAR.
902 For ADDR_EXPR an object size is the number of remaining bytes
56d36a49 903 to the end of the object (where what is considered an object depends on
0a39fd54 904 OSI->object_size_type).
75a70cf9 905 For allocation GIMPLE_CALL like malloc or calloc object size is the size
0a39fd54 906 of the allocation.
0de36bdb 907 For POINTER_PLUS_EXPR where second operand is a constant integer,
0a39fd54 908 object size is object size of the first operand minus the constant.
909 If the constant is bigger than the number of remaining bytes until the
910 end of the object, object size is 0, but if it is instead a pointer
911 subtraction, object size is unknown[object_size_type].
912 To differentiate addition from subtraction, ADDR_EXPR returns
913 unknown[object_size_type] for all objects bigger than half of the address
914 space, and constants less than half of the address space are considered
915 addition, while bigger constants subtraction.
75a70cf9 916 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
0a39fd54 917 object size is object size of that argument.
918 Otherwise, object size is the maximum of object sizes of variables
919 that it might be set to. */
920
921static void
922collect_object_sizes_for (struct object_size_info *osi, tree var)
923{
924 int object_size_type = osi->object_size_type;
925 unsigned int varno = SSA_NAME_VERSION (var);
42acab1c 926 gimple *stmt;
0a39fd54 927 bool reexamine;
928
929 if (bitmap_bit_p (computed[object_size_type], varno))
930 return;
931
932 if (osi->pass == 0)
933 {
6ef9bbe0 934 if (bitmap_set_bit (osi->visited, varno))
0a39fd54 935 {
0a39fd54 936 object_sizes[object_size_type][varno]
937 = (object_size_type & 2) ? -1 : 0;
938 }
939 else
940 {
941 /* Found a dependency loop. Mark the variable for later
942 re-examination. */
943 bitmap_set_bit (osi->reexamine, varno);
944 if (dump_file && (dump_flags & TDF_DETAILS))
945 {
946 fprintf (dump_file, "Found a dependency loop at ");
947 print_generic_expr (dump_file, var, dump_flags);
948 fprintf (dump_file, "\n");
949 }
950 return;
951 }
952 }
953
954 if (dump_file && (dump_flags & TDF_DETAILS))
955 {
956 fprintf (dump_file, "Visiting use-def links for ");
957 print_generic_expr (dump_file, var, dump_flags);
958 fprintf (dump_file, "\n");
959 }
960
961 stmt = SSA_NAME_DEF_STMT (var);
962 reexamine = false;
963
75a70cf9 964 switch (gimple_code (stmt))
0a39fd54 965 {
75a70cf9 966 case GIMPLE_ASSIGN:
0a39fd54 967 {
182cf5a9 968 tree rhs = gimple_assign_rhs1 (stmt);
969 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
970 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
971 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
75a70cf9 972 reexamine = plus_stmt_object_size (osi, var, stmt);
8a2caf10 973 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
974 reexamine = cond_expr_object_size (osi, var, stmt);
75a70cf9 975 else if (gimple_assign_single_p (stmt)
976 || gimple_assign_unary_nop_p (stmt))
977 {
75a70cf9 978 if (TREE_CODE (rhs) == SSA_NAME
979 && POINTER_TYPE_P (TREE_TYPE (rhs)))
980 reexamine = merge_object_sizes (osi, var, rhs, 0);
75a70cf9 981 else
982 expr_object_size (osi, var, rhs);
983 }
984 else
985 unknown_object_size (osi, var);
986 break;
987 }
ec0fa513 988
75a70cf9 989 case GIMPLE_CALL:
990 {
1a91d914 991 gcall *call_stmt = as_a <gcall *> (stmt);
992 tree arg = pass_through_call (call_stmt);
75a70cf9 993 if (arg)
994 {
995 if (TREE_CODE (arg) == SSA_NAME
996 && POINTER_TYPE_P (TREE_TYPE (arg)))
997 reexamine = merge_object_sizes (osi, var, arg, 0);
75a70cf9 998 else
999 expr_object_size (osi, var, arg);
1000 }
1001 else
1a91d914 1002 call_object_size (osi, var, call_stmt);
0a39fd54 1003 break;
1004 }
1005
75a70cf9 1006 case GIMPLE_ASM:
0a39fd54 1007 /* Pointers defined by __asm__ statements can point anywhere. */
1008 object_sizes[object_size_type][varno] = unknown[object_size_type];
1009 break;
1010
75a70cf9 1011 case GIMPLE_NOP:
ec11736b 1012 if (SSA_NAME_VAR (var)
1013 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1014 expr_object_size (osi, var, SSA_NAME_VAR (var));
1015 else
1016 /* Uninitialized SSA names point nowhere. */
1017 object_sizes[object_size_type][varno] = unknown[object_size_type];
0a39fd54 1018 break;
1019
75a70cf9 1020 case GIMPLE_PHI:
0a39fd54 1021 {
75a70cf9 1022 unsigned i;
0a39fd54 1023
75a70cf9 1024 for (i = 0; i < gimple_phi_num_args (stmt); i++)
0a39fd54 1025 {
75a70cf9 1026 tree rhs = gimple_phi_arg (stmt, i)->def;
0a39fd54 1027
1028 if (object_sizes[object_size_type][varno]
1029 == unknown[object_size_type])
1030 break;
1031
1032 if (TREE_CODE (rhs) == SSA_NAME)
1033 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1034 else if (osi->pass == 0)
1035 expr_object_size (osi, var, rhs);
1036 }
1037 break;
1038 }
75a70cf9 1039
0a39fd54 1040 default:
1041 gcc_unreachable ();
1042 }
1043
1044 if (! reexamine
1045 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1046 {
1047 bitmap_set_bit (computed[object_size_type], varno);
1048 bitmap_clear_bit (osi->reexamine, varno);
1049 }
1050 else
1051 {
1052 bitmap_set_bit (osi->reexamine, varno);
1053 if (dump_file && (dump_flags & TDF_DETAILS))
1054 {
1055 fprintf (dump_file, "Need to reexamine ");
1056 print_generic_expr (dump_file, var, dump_flags);
1057 fprintf (dump_file, "\n");
1058 }
1059 }
1060}
1061
1062
1063/* Helper function for check_for_plus_in_loops. Called recursively
1064 to detect loops. */
1065
1066static void
1067check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1068 unsigned int depth)
1069{
42acab1c 1070 gimple *stmt = SSA_NAME_DEF_STMT (var);
0a39fd54 1071 unsigned int varno = SSA_NAME_VERSION (var);
1072
1073 if (osi->depths[varno])
1074 {
1075 if (osi->depths[varno] != depth)
1076 {
1077 unsigned int *sp;
1078
1079 /* Found a loop involving pointer addition. */
1080 for (sp = osi->tos; sp > osi->stack; )
1081 {
1082 --sp;
1083 bitmap_clear_bit (osi->reexamine, *sp);
1084 bitmap_set_bit (computed[osi->object_size_type], *sp);
1085 object_sizes[osi->object_size_type][*sp] = 0;
1086 if (*sp == varno)
1087 break;
1088 }
1089 }
1090 return;
1091 }
1092 else if (! bitmap_bit_p (osi->reexamine, varno))
1093 return;
1094
1095 osi->depths[varno] = depth;
1096 *osi->tos++ = varno;
1097
75a70cf9 1098 switch (gimple_code (stmt))
0a39fd54 1099 {
0a39fd54 1100
75a70cf9 1101 case GIMPLE_ASSIGN:
0a39fd54 1102 {
75a70cf9 1103 if ((gimple_assign_single_p (stmt)
1104 || gimple_assign_unary_nop_p (stmt))
1105 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1106 {
1107 tree rhs = gimple_assign_rhs1 (stmt);
1108
1109 check_for_plus_in_loops_1 (osi, rhs, depth);
1110 }
1111 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1112 {
1113 tree basevar = gimple_assign_rhs1 (stmt);
1114 tree cst = gimple_assign_rhs2 (stmt);
1115
1116 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1117
1118 check_for_plus_in_loops_1 (osi, basevar,
1119 depth + !integer_zerop (cst));
1120 }
1121 else
1122 gcc_unreachable ();
1123 break;
1124 }
0a39fd54 1125
75a70cf9 1126 case GIMPLE_CALL:
1127 {
1a91d914 1128 gcall *call_stmt = as_a <gcall *> (stmt);
1129 tree arg = pass_through_call (call_stmt);
75a70cf9 1130 if (arg)
1131 {
1132 if (TREE_CODE (arg) == SSA_NAME)
1133 check_for_plus_in_loops_1 (osi, arg, depth);
1134 else
1135 gcc_unreachable ();
1136 }
1137 break;
0a39fd54 1138 }
75a70cf9 1139
1140 case GIMPLE_PHI:
0a39fd54 1141 {
75a70cf9 1142 unsigned i;
0a39fd54 1143
75a70cf9 1144 for (i = 0; i < gimple_phi_num_args (stmt); i++)
0a39fd54 1145 {
75a70cf9 1146 tree rhs = gimple_phi_arg (stmt, i)->def;
0a39fd54 1147
1148 if (TREE_CODE (rhs) == SSA_NAME)
1149 check_for_plus_in_loops_1 (osi, rhs, depth);
1150 }
1151 break;
1152 }
75a70cf9 1153
0a39fd54 1154 default:
1155 gcc_unreachable ();
1156 }
1157
1158 osi->depths[varno] = 0;
1159 osi->tos--;
1160}
1161
1162
1163/* Check if some pointer we are computing object size of is being increased
1164 within a loop. If yes, assume all the SSA variables participating in
1165 that loop have minimum object sizes 0. */
1166
1167static void
1168check_for_plus_in_loops (struct object_size_info *osi, tree var)
1169{
42acab1c 1170 gimple *stmt = SSA_NAME_DEF_STMT (var);
0a39fd54 1171
75a70cf9 1172 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1173 and looked for a POINTER_PLUS_EXPR in the pass-through
1174 argument, if any. In GIMPLE, however, such an expression
1175 is not a valid call operand. */
0a39fd54 1176
75a70cf9 1177 if (is_gimple_assign (stmt)
1178 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1179 {
1180 tree basevar = gimple_assign_rhs1 (stmt);
1181 tree cst = gimple_assign_rhs2 (stmt);
48e1416a 1182
75a70cf9 1183 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1184
1185 if (integer_zerop (cst))
1186 return;
1187
1188 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1189 *osi->tos++ = SSA_NAME_VERSION (basevar);
1190 check_for_plus_in_loops_1 (osi, var, 2);
1191 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1192 osi->tos--;
0a39fd54 1193 }
1194}
1195
1196
1197/* Initialize data structures for the object size computation. */
1198
1199void
1200init_object_sizes (void)
1201{
1202 int object_size_type;
1203
194eb161 1204 if (computed[0])
0a39fd54 1205 return;
1206
1207 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1208 {
194eb161 1209 object_sizes[object_size_type].safe_grow (num_ssa_names);
0a39fd54 1210 computed[object_size_type] = BITMAP_ALLOC (NULL);
1211 }
1212
1213 init_offset_limit ();
1214}
1215
1216
1217/* Destroy data structures after the object size computation. */
1218
d17f89dd 1219void
0a39fd54 1220fini_object_sizes (void)
1221{
1222 int object_size_type;
1223
1224 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1225 {
194eb161 1226 object_sizes[object_size_type].release ();
0a39fd54 1227 BITMAP_FREE (computed[object_size_type]);
0a39fd54 1228 }
1229}
1230
1231
1232/* Simple pass to optimize all __builtin_object_size () builtins. */
1233
65b0537f 1234namespace {
1235
1236const pass_data pass_data_object_sizes =
1237{
1238 GIMPLE_PASS, /* type */
1239 "objsz", /* name */
1240 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 1241 TV_NONE, /* tv_id */
1242 ( PROP_cfg | PROP_ssa ), /* properties_required */
1243 0, /* properties_provided */
1244 0, /* properties_destroyed */
1245 0, /* todo_flags_start */
8b88439e 1246 0, /* todo_flags_finish */
65b0537f 1247};
1248
1249class pass_object_sizes : public gimple_opt_pass
1250{
1251public:
1252 pass_object_sizes (gcc::context *ctxt)
0bb8a43b 1253 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
65b0537f 1254 {}
1255
1256 /* opt_pass methods: */
1257 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
0bb8a43b 1258 void set_pass_param (unsigned int n, bool param)
1259 {
1260 gcc_assert (n == 0);
1261 insert_min_max_p = param;
1262 }
65b0537f 1263 virtual unsigned int execute (function *);
1264
0bb8a43b 1265 private:
1266 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1267 bool insert_min_max_p;
65b0537f 1268}; // class pass_object_sizes
1269
1603b4dc 1270/* Dummy valueize function. */
1271
1272static tree
1273do_valueize (tree t)
1274{
1275 return t;
1276}
1277
65b0537f 1278unsigned int
1279pass_object_sizes::execute (function *fun)
0a39fd54 1280{
1281 basic_block bb;
65b0537f 1282 FOR_EACH_BB_FN (bb, fun)
0a39fd54 1283 {
75a70cf9 1284 gimple_stmt_iterator i;
1285 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
0a39fd54 1286 {
6a00bf6b 1287 tree result;
42acab1c 1288 gimple *call = gsi_stmt (i);
6a00bf6b 1289 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
0a39fd54 1290 continue;
1291
1292 init_object_sizes ();
ece434d4 1293
0bb8a43b 1294 /* If insert_min_max_p, only attempt to fold
ece434d4 1295 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1296 and rather than folding the builtin to the constant if any,
1297 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1298 call result and the computed constant. */
0bb8a43b 1299 if (insert_min_max_p)
ece434d4 1300 {
1301 tree ost = gimple_call_arg (call, 1);
1302 if (tree_fits_uhwi_p (ost))
1303 {
1304 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1305 tree ptr = gimple_call_arg (call, 0);
1306 tree lhs = gimple_call_lhs (call);
1307 if ((object_size_type == 1 || object_size_type == 3)
1308 && (TREE_CODE (ptr) == ADDR_EXPR
1309 || TREE_CODE (ptr) == SSA_NAME)
1310 && lhs)
1311 {
1312 tree type = TREE_TYPE (lhs);
4e91a07b 1313 unsigned HOST_WIDE_INT bytes;
1314 if (compute_builtin_object_size (ptr, object_size_type,
1315 &bytes)
ece434d4 1316 && wi::fits_to_tree_p (bytes, type))
1317 {
1318 tree tem = make_ssa_name (type);
1319 gimple_call_set_lhs (call, tem);
1320 enum tree_code code
1321 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1322 tree cst = build_int_cstu (type, bytes);
42acab1c 1323 gimple *g
1324 = gimple_build_assign (lhs, code, tem, cst);
ece434d4 1325 gsi_insert_after (&i, g, GSI_NEW_STMT);
1326 update_stmt (call);
1327 }
1328 }
1329 }
1330 continue;
1331 }
1332
1603b4dc 1333 tree lhs = gimple_call_lhs (call);
1334 if (!lhs)
1335 continue;
1336
1337 result = gimple_fold_stmt_to_constant (call, do_valueize);
0a39fd54 1338 if (!result)
1339 {
ece434d4 1340 tree ost = gimple_call_arg (call, 1);
1341
1342 if (tree_fits_uhwi_p (ost))
0a39fd54 1343 {
ece434d4 1344 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
0a39fd54 1345
ece434d4 1346 if (object_size_type < 2)
1347 result = fold_convert (size_type_node,
1348 integer_minus_one_node);
1349 else if (object_size_type < 4)
1350 result = build_zero_cst (size_type_node);
0a39fd54 1351 }
1352
1353 if (!result)
1354 continue;
1355 }
1356
6a00bf6b 1357 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1358
0a39fd54 1359 if (dump_file && (dump_flags & TDF_DETAILS))
1360 {
1361 fprintf (dump_file, "Simplified\n ");
75a70cf9 1362 print_gimple_stmt (dump_file, call, 0, dump_flags);
6a00bf6b 1363 fprintf (dump_file, " to ");
1ffa4346 1364 print_generic_expr (dump_file, result);
6a00bf6b 1365 fprintf (dump_file, "\n");
0a39fd54 1366 }
1367
6a00bf6b 1368 /* Propagate into all uses and fold those stmts. */
1603b4dc 1369 replace_uses_by (lhs, result);
0a39fd54 1370 }
1371 }
1372
1373 fini_object_sizes ();
2a1990e9 1374 return 0;
0a39fd54 1375}
1376
cbe8bda8 1377} // anon namespace
1378
1379gimple_opt_pass *
1380make_pass_object_sizes (gcc::context *ctxt)
1381{
1382 return new pass_object_sizes (ctxt);
1383}