]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-object-size.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / tree-object-size.c
CommitLineData
10a0d495 1/* __builtin_object_size (ptr, object_size_type) computation
5624e564 2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
10a0d495
JJ
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
10a0d495
JJ
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
10a0d495
JJ
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
10a0d495 25#include "tree.h"
c7131fb2
AM
26#include "gimple.h"
27#include "hard-reg-set.h"
28#include "ssa.h"
29#include "alias.h"
40e23961 30#include "fold-const.h"
d8a2d370 31#include "tree-object-size.h"
718f9c0f 32#include "diagnostic-core.h"
cf835838 33#include "gimple-pretty-print.h"
2fb9a547
AM
34#include "internal-fn.h"
35#include "gimple-fold.h"
5be5c238 36#include "gimple-iterator.h"
10a0d495
JJ
37#include "tree-pass.h"
38#include "tree-ssa-propagate.h"
9b2b7279 39#include "builtins.h"
10a0d495
JJ
40
41struct object_size_info
42{
43 int object_size_type;
44 bitmap visited, reexamine;
45 int pass;
46 bool changed;
47 unsigned int *depths;
48 unsigned int *stack, *tos;
49};
50
4cdce1a8 51static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
10a0d495 52
ac545c64 53static tree compute_object_offset (const_tree, const_tree);
eb9ed98a
JJ
54static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
55 const_tree, int);
538dd0b7
DM
56static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
57static tree pass_through_call (const gcall *);
10a0d495
JJ
58static void collect_object_sizes_for (struct object_size_info *, tree);
59static void expr_object_size (struct object_size_info *, tree, tree);
60static bool merge_object_sizes (struct object_size_info *, tree, tree,
61 unsigned HOST_WIDE_INT);
726a989a 62static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
4e71066d 63static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
10a0d495
JJ
64static void init_offset_limit (void);
65static void check_for_plus_in_loops (struct object_size_info *, tree);
66static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
67 unsigned int);
68
69/* object_sizes[0] is upper bound for number of bytes till the end of
70 the object.
71 object_sizes[1] is upper bound for number of bytes till the end of
72 the subobject (innermost array or field with address taken).
73 object_sizes[2] is lower bound for number of bytes till the end of
74 the object and object_sizes[3] lower bound for subobject. */
db84d11e 75static vec<unsigned HOST_WIDE_INT> object_sizes[4];
10a0d495
JJ
76
77/* Bitmaps what object sizes have been computed already. */
78static bitmap computed[4];
79
80/* Maximum value of offset we consider to be addition. */
81static unsigned HOST_WIDE_INT offset_limit;
82
83
84/* Initialize OFFSET_LIMIT variable. */
85static void
86init_offset_limit (void)
87{
cc269bb6 88 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
ae7e9ddd 89 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
10a0d495
JJ
90 else
91 offset_limit = -1;
92 offset_limit /= 2;
93}
94
95
96/* Compute offset of EXPR within VAR. Return error_mark_node
97 if unknown. */
98
99static tree
ac545c64 100compute_object_offset (const_tree expr, const_tree var)
10a0d495
JJ
101{
102 enum tree_code code = PLUS_EXPR;
103 tree base, off, t;
104
105 if (expr == var)
106 return size_zero_node;
107
108 switch (TREE_CODE (expr))
109 {
110 case COMPONENT_REF:
111 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
112 if (base == error_mark_node)
113 return base;
114
115 t = TREE_OPERAND (expr, 1);
116 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
ae7e9ddd 117 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
10a0d495
JJ
118 / BITS_PER_UNIT));
119 break;
120
121 case REALPART_EXPR:
1043771b 122 CASE_CONVERT:
10a0d495
JJ
123 case VIEW_CONVERT_EXPR:
124 case NON_LVALUE_EXPR:
125 return compute_object_offset (TREE_OPERAND (expr, 0), var);
126
127 case IMAGPART_EXPR:
128 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
129 if (base == error_mark_node)
130 return base;
131
132 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
133 break;
134
135 case ARRAY_REF:
136 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
137 if (base == error_mark_node)
138 return base;
139
140 t = TREE_OPERAND (expr, 1);
141 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
142 {
143 code = MINUS_EXPR;
144 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
145 }
b6f65e3c 146 t = fold_convert (sizetype, t);
10a0d495
JJ
147 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
148 break;
149
70f34814
RG
150 case MEM_REF:
151 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
807e902e 152 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
70f34814 153
10a0d495
JJ
154 default:
155 return error_mark_node;
156 }
157
158 return size_binop (code, base, off);
159}
160
161
162/* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
163 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
164 If unknown, return unknown[object_size_type]. */
165
166static unsigned HOST_WIDE_INT
eb9ed98a
JJ
167addr_object_size (struct object_size_info *osi, const_tree ptr,
168 int object_size_type)
10a0d495 169{
eb9ed98a 170 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
10a0d495
JJ
171
172 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
173
174 pt_var = TREE_OPERAND (ptr, 0);
d837d73d
RG
175 while (handled_component_p (pt_var))
176 pt_var = TREE_OPERAND (pt_var, 0);
10a0d495
JJ
177
178 if (pt_var
d837d73d 179 && TREE_CODE (pt_var) == MEM_REF)
10a0d495 180 {
eb9ed98a 181 unsigned HOST_WIDE_INT sz;
10a0d495 182
d837d73d 183 if (!osi || (object_size_type & 1) != 0
ea17de23 184 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
70f34814
RG
185 {
186 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
187 object_size_type & ~1);
70f34814 188 }
eb9ed98a 189 else
10a0d495 190 {
eb9ed98a
JJ
191 tree var = TREE_OPERAND (pt_var, 0);
192 if (osi->pass == 0)
193 collect_object_sizes_for (osi, var);
194 if (bitmap_bit_p (computed[object_size_type],
195 SSA_NAME_VERSION (var)))
196 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
197 else
198 sz = unknown[object_size_type];
d837d73d
RG
199 }
200 if (sz != unknown[object_size_type])
201 {
807e902e
KZ
202 offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
203 if (wi::neg_p (dsz))
d837d73d 204 sz = 0;
807e902e 205 else if (wi::fits_uhwi_p (dsz))
27bcd47c 206 sz = dsz.to_uhwi ();
70f34814 207 else
d837d73d 208 sz = unknown[object_size_type];
eb9ed98a
JJ
209 }
210
211 if (sz != unknown[object_size_type] && sz < offset_limit)
212 pt_var_size = size_int (sz);
213 }
e497b9bd
RG
214 else if (pt_var
215 && DECL_P (pt_var)
cc269bb6 216 && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
7d362f6c 217 && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
e497b9bd 218 pt_var_size = DECL_SIZE_UNIT (pt_var);
eb9ed98a 219 else if (pt_var
d837d73d 220 && TREE_CODE (pt_var) == STRING_CST
eb9ed98a 221 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
cc269bb6 222 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
7d362f6c 223 && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
eb9ed98a
JJ
224 < offset_limit)
225 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
226 else
227 return unknown[object_size_type];
228
229 if (pt_var != TREE_OPERAND (ptr, 0))
230 {
231 tree var;
10a0d495 232
eb9ed98a
JJ
233 if (object_size_type & 1)
234 {
235 var = TREE_OPERAND (ptr, 0);
236
237 while (var != pt_var
238 && TREE_CODE (var) != BIT_FIELD_REF
239 && TREE_CODE (var) != COMPONENT_REF
240 && TREE_CODE (var) != ARRAY_REF
241 && TREE_CODE (var) != ARRAY_RANGE_REF
242 && TREE_CODE (var) != REALPART_EXPR
243 && TREE_CODE (var) != IMAGPART_EXPR)
244 var = TREE_OPERAND (var, 0);
245 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
38027156 246 var = TREE_OPERAND (var, 0);
eb9ed98a 247 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
cc269bb6 248 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
eb9ed98a
JJ
249 || (pt_var_size
250 && tree_int_cst_lt (pt_var_size,
251 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
252 var = pt_var;
70f34814 253 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
10a0d495 254 {
eb9ed98a
JJ
255 tree v = var;
256 /* For &X->fld, compute object size only if fld isn't the last
257 field, as struct { int i; char c[1]; } is often used instead
258 of flexible array member. */
259 while (v && v != pt_var)
260 switch (TREE_CODE (v))
261 {
262 case ARRAY_REF:
263 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
264 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
265 {
266 tree domain
267 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
268 if (domain
269 && TYPE_MAX_VALUE (domain)
270 && TREE_CODE (TYPE_MAX_VALUE (domain))
271 == INTEGER_CST
272 && tree_int_cst_lt (TREE_OPERAND (v, 1),
273 TYPE_MAX_VALUE (domain)))
274 {
275 v = NULL_TREE;
276 break;
277 }
278 }
279 v = TREE_OPERAND (v, 0);
280 break;
281 case REALPART_EXPR:
282 case IMAGPART_EXPR:
283 v = NULL_TREE;
284 break;
285 case COMPONENT_REF:
8593e0b6
JJ
286 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
287 {
288 v = NULL_TREE;
289 break;
290 }
38027156
JJ
291 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
292 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
293 != UNION_TYPE
294 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
295 != QUAL_UNION_TYPE)
296 break;
297 else
298 v = TREE_OPERAND (v, 0);
299 if (TREE_CODE (v) == COMPONENT_REF
300 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
301 == RECORD_TYPE)
eb9ed98a 302 {
910ad8de
NF
303 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
304 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
8593e0b6
JJ
305 if (TREE_CODE (fld_chain) == FIELD_DECL)
306 break;
307
308 if (fld_chain)
309 {
310 v = NULL_TREE;
eb9ed98a 311 break;
8593e0b6 312 }
38027156 313 v = TREE_OPERAND (v, 0);
eb9ed98a 314 }
38027156
JJ
315 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
316 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
317 != UNION_TYPE
318 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
319 != QUAL_UNION_TYPE)
8593e0b6
JJ
320 break;
321 else
322 v = TREE_OPERAND (v, 0);
38027156 323 if (v != pt_var)
8593e0b6
JJ
324 v = NULL_TREE;
325 else
326 v = pt_var;
eb9ed98a
JJ
327 break;
328 default:
329 v = pt_var;
330 break;
331 }
332 if (v == pt_var)
10a0d495
JJ
333 var = pt_var;
334 }
eb9ed98a
JJ
335 }
336 else
337 var = pt_var;
10a0d495 338
eb9ed98a
JJ
339 if (var != pt_var)
340 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
341 else if (!pt_var_size)
342 return unknown[object_size_type];
343 else
344 var_size = pt_var_size;
345 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
346 if (bytes != error_mark_node)
347 {
348 if (TREE_CODE (bytes) == INTEGER_CST
349 && tree_int_cst_lt (var_size, bytes))
350 bytes = size_zero_node;
351 else
352 bytes = size_binop (MINUS_EXPR, var_size, bytes);
353 }
354 if (var != pt_var
355 && pt_var_size
70f34814 356 && TREE_CODE (pt_var) == MEM_REF
eb9ed98a
JJ
357 && bytes != error_mark_node)
358 {
359 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
360 if (bytes2 != error_mark_node)
10a0d495 361 {
eb9ed98a
JJ
362 if (TREE_CODE (bytes2) == INTEGER_CST
363 && tree_int_cst_lt (pt_var_size, bytes2))
364 bytes2 = size_zero_node;
10a0d495 365 else
98a129b9 366 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
eb9ed98a 367 bytes = size_binop (MIN_EXPR, bytes, bytes2);
10a0d495
JJ
368 }
369 }
10a0d495 370 }
eb9ed98a
JJ
371 else if (!pt_var_size)
372 return unknown[object_size_type];
373 else
374 bytes = pt_var_size;
375
cc269bb6 376 if (tree_fits_uhwi_p (bytes))
ae7e9ddd 377 return tree_to_uhwi (bytes);
10a0d495
JJ
378
379 return unknown[object_size_type];
380}
381
382
726a989a 383/* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
10a0d495
JJ
384 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
385 argument from __builtin_object_size. If unknown, return
386 unknown[object_size_type]. */
387
388static unsigned HOST_WIDE_INT
538dd0b7 389alloc_object_size (const gcall *call, int object_size_type)
10a0d495 390{
5039610b 391 tree callee, bytes = NULL_TREE;
51bc54a6
DM
392 tree alloc_size;
393 int arg1 = -1, arg2 = -1;
10a0d495 394
726a989a 395 gcc_assert (is_gimple_call (call));
10a0d495 396
726a989a 397 callee = gimple_call_fndecl (call);
51bc54a6
DM
398 if (!callee)
399 return unknown[object_size_type];
400
c3284718
RS
401 alloc_size = lookup_attribute ("alloc_size",
402 TYPE_ATTRIBUTES (TREE_TYPE (callee)));
51bc54a6
DM
403 if (alloc_size && TREE_VALUE (alloc_size))
404 {
405 tree p = TREE_VALUE (alloc_size);
406
407 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
408 if (TREE_CHAIN (p))
726a989a 409 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
51bc54a6 410 }
b8698a0f 411
51bc54a6 412 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
10a0d495
JJ
413 switch (DECL_FUNCTION_CODE (callee))
414 {
51bc54a6
DM
415 case BUILT_IN_CALLOC:
416 arg2 = 1;
417 /* fall through */
10a0d495
JJ
418 case BUILT_IN_MALLOC:
419 case BUILT_IN_ALLOCA:
13e49da9 420 case BUILT_IN_ALLOCA_WITH_ALIGN:
51bc54a6 421 arg1 = 0;
10a0d495
JJ
422 default:
423 break;
424 }
425
726a989a
RB
426 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
427 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
b8698a0f 428 || (arg2 >= 0
726a989a
RB
429 && (arg2 >= (int)gimple_call_num_args (call)
430 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
b8698a0f 431 return unknown[object_size_type];
51bc54a6
DM
432
433 if (arg2 >= 0)
434 bytes = size_binop (MULT_EXPR,
726a989a
RB
435 fold_convert (sizetype, gimple_call_arg (call, arg1)),
436 fold_convert (sizetype, gimple_call_arg (call, arg2)));
51bc54a6 437 else if (arg1 >= 0)
726a989a 438 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
51bc54a6 439
cc269bb6 440 if (bytes && tree_fits_uhwi_p (bytes))
ae7e9ddd 441 return tree_to_uhwi (bytes);
10a0d495
JJ
442
443 return unknown[object_size_type];
444}
445
446
447/* If object size is propagated from one of function's arguments directly
726a989a 448 to its return value, return that argument for GIMPLE_CALL statement CALL.
10a0d495
JJ
449 Otherwise return NULL. */
450
451static tree
538dd0b7 452pass_through_call (const gcall *call)
10a0d495 453{
726a989a 454 tree callee = gimple_call_fndecl (call);
10a0d495
JJ
455
456 if (callee
457 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
458 switch (DECL_FUNCTION_CODE (callee))
459 {
460 case BUILT_IN_MEMCPY:
461 case BUILT_IN_MEMMOVE:
462 case BUILT_IN_MEMSET:
463 case BUILT_IN_STRCPY:
464 case BUILT_IN_STRNCPY:
465 case BUILT_IN_STRCAT:
466 case BUILT_IN_STRNCAT:
467 case BUILT_IN_MEMCPY_CHK:
468 case BUILT_IN_MEMMOVE_CHK:
469 case BUILT_IN_MEMSET_CHK:
470 case BUILT_IN_STRCPY_CHK:
471 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 472 case BUILT_IN_STPNCPY_CHK:
10a0d495
JJ
473 case BUILT_IN_STRCAT_CHK:
474 case BUILT_IN_STRNCAT_CHK:
8b1bdcc5 475 case BUILT_IN_ASSUME_ALIGNED:
726a989a
RB
476 if (gimple_call_num_args (call) >= 1)
477 return gimple_call_arg (call, 0);
10a0d495
JJ
478 break;
479 default:
480 break;
481 }
482
483 return NULL_TREE;
484}
485
486
487/* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
488 second argument from __builtin_object_size. */
489
490unsigned HOST_WIDE_INT
491compute_builtin_object_size (tree ptr, int object_size_type)
492{
493 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
494
495 if (! offset_limit)
496 init_offset_limit ();
497
498 if (TREE_CODE (ptr) == ADDR_EXPR)
eb9ed98a 499 return addr_object_size (NULL, ptr, object_size_type);
10a0d495 500
726a989a 501 if (TREE_CODE (ptr) == SSA_NAME
eb9ed98a 502 && POINTER_TYPE_P (TREE_TYPE (ptr))
db84d11e 503 && computed[object_size_type] != NULL)
10a0d495
JJ
504 {
505 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
506 {
507 struct object_size_info osi;
508 bitmap_iterator bi;
509 unsigned int i;
510
db84d11e
JJ
511 if (num_ssa_names > object_sizes[object_size_type].length ())
512 object_sizes[object_size_type].safe_grow (num_ssa_names);
10a0d495
JJ
513 if (dump_file)
514 {
515 fprintf (dump_file, "Computing %s %sobject size for ",
516 (object_size_type & 2) ? "minimum" : "maximum",
517 (object_size_type & 1) ? "sub" : "");
518 print_generic_expr (dump_file, ptr, dump_flags);
519 fprintf (dump_file, ":\n");
520 }
521
522 osi.visited = BITMAP_ALLOC (NULL);
523 osi.reexamine = BITMAP_ALLOC (NULL);
524 osi.object_size_type = object_size_type;
525 osi.depths = NULL;
526 osi.stack = NULL;
527 osi.tos = NULL;
528
529 /* First pass: walk UD chains, compute object sizes that
530 can be computed. osi.reexamine bitmap at the end will
531 contain what variables were found in dependency cycles
532 and therefore need to be reexamined. */
533 osi.pass = 0;
534 osi.changed = false;
535 collect_object_sizes_for (&osi, ptr);
536
537 /* Second pass: keep recomputing object sizes of variables
538 that need reexamination, until no object sizes are
539 increased or all object sizes are computed. */
540 if (! bitmap_empty_p (osi.reexamine))
541 {
542 bitmap reexamine = BITMAP_ALLOC (NULL);
543
544 /* If looking for minimum instead of maximum object size,
545 detect cases where a pointer is increased in a loop.
546 Although even without this detection pass 2 would eventually
547 terminate, it could take a long time. If a pointer is
548 increasing this way, we need to assume 0 object size.
549 E.g. p = &buf[0]; while (cond) p = p + 4; */
550 if (object_size_type & 2)
551 {
5ed6ace5
MD
552 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
553 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
10a0d495
JJ
554 osi.tos = osi.stack;
555 osi.pass = 1;
556 /* collect_object_sizes_for is changing
557 osi.reexamine bitmap, so iterate over a copy. */
558 bitmap_copy (reexamine, osi.reexamine);
559 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
560 if (bitmap_bit_p (osi.reexamine, i))
561 check_for_plus_in_loops (&osi, ssa_name (i));
562
563 free (osi.depths);
564 osi.depths = NULL;
565 free (osi.stack);
566 osi.stack = NULL;
567 osi.tos = NULL;
568 }
569
570 do
571 {
572 osi.pass = 2;
573 osi.changed = false;
574 /* collect_object_sizes_for is changing
575 osi.reexamine bitmap, so iterate over a copy. */
576 bitmap_copy (reexamine, osi.reexamine);
577 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
578 if (bitmap_bit_p (osi.reexamine, i))
579 {
580 collect_object_sizes_for (&osi, ssa_name (i));
581 if (dump_file && (dump_flags & TDF_DETAILS))
582 {
583 fprintf (dump_file, "Reexamining ");
584 print_generic_expr (dump_file, ssa_name (i),
585 dump_flags);
586 fprintf (dump_file, "\n");
587 }
588 }
589 }
590 while (osi.changed);
591
592 BITMAP_FREE (reexamine);
593 }
594 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
595 bitmap_set_bit (computed[object_size_type], i);
596
597 /* Debugging dumps. */
598 if (dump_file)
599 {
600 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
601 if (object_sizes[object_size_type][i]
602 != unknown[object_size_type])
603 {
604 print_generic_expr (dump_file, ssa_name (i),
605 dump_flags);
606 fprintf (dump_file,
607 ": %s %sobject size "
608 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
609 (object_size_type & 2) ? "minimum" : "maximum",
610 (object_size_type & 1) ? "sub" : "",
611 object_sizes[object_size_type][i]);
612 }
613 }
614
615 BITMAP_FREE (osi.reexamine);
616 BITMAP_FREE (osi.visited);
617 }
618
619 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
620 }
621
622 return unknown[object_size_type];
623}
624
726a989a 625/* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
10a0d495
JJ
626
627static void
628expr_object_size (struct object_size_info *osi, tree ptr, tree value)
629{
630 int object_size_type = osi->object_size_type;
631 unsigned int varno = SSA_NAME_VERSION (ptr);
632 unsigned HOST_WIDE_INT bytes;
633
634 gcc_assert (object_sizes[object_size_type][varno]
635 != unknown[object_size_type]);
636 gcc_assert (osi->pass == 0);
637
638 if (TREE_CODE (value) == WITH_SIZE_EXPR)
639 value = TREE_OPERAND (value, 0);
640
641 /* Pointer variables should have been handled by merge_object_sizes. */
642 gcc_assert (TREE_CODE (value) != SSA_NAME
643 || !POINTER_TYPE_P (TREE_TYPE (value)));
644
645 if (TREE_CODE (value) == ADDR_EXPR)
eb9ed98a 646 bytes = addr_object_size (osi, value, object_size_type);
10a0d495
JJ
647 else
648 bytes = unknown[object_size_type];
649
650 if ((object_size_type & 2) == 0)
651 {
652 if (object_sizes[object_size_type][varno] < bytes)
653 object_sizes[object_size_type][varno] = bytes;
654 }
655 else
656 {
657 if (object_sizes[object_size_type][varno] > bytes)
658 object_sizes[object_size_type][varno] = bytes;
659 }
660}
661
662
726a989a
RB
663/* Compute object_sizes for PTR, defined to the result of a call. */
664
665static void
538dd0b7 666call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
726a989a
RB
667{
668 int object_size_type = osi->object_size_type;
669 unsigned int varno = SSA_NAME_VERSION (ptr);
670 unsigned HOST_WIDE_INT bytes;
671
672 gcc_assert (is_gimple_call (call));
673
674 gcc_assert (object_sizes[object_size_type][varno]
675 != unknown[object_size_type]);
676 gcc_assert (osi->pass == 0);
677
678 bytes = alloc_object_size (call, object_size_type);
679
680 if ((object_size_type & 2) == 0)
681 {
682 if (object_sizes[object_size_type][varno] < bytes)
683 object_sizes[object_size_type][varno] = bytes;
684 }
685 else
686 {
687 if (object_sizes[object_size_type][varno] > bytes)
688 object_sizes[object_size_type][varno] = bytes;
689 }
690}
691
692
693/* Compute object_sizes for PTR, defined to an unknown value. */
694
695static void
696unknown_object_size (struct object_size_info *osi, tree ptr)
697{
698 int object_size_type = osi->object_size_type;
699 unsigned int varno = SSA_NAME_VERSION (ptr);
700 unsigned HOST_WIDE_INT bytes;
701
702 gcc_assert (object_sizes[object_size_type][varno]
703 != unknown[object_size_type]);
704 gcc_assert (osi->pass == 0);
705
706 bytes = unknown[object_size_type];
707
708 if ((object_size_type & 2) == 0)
709 {
710 if (object_sizes[object_size_type][varno] < bytes)
711 object_sizes[object_size_type][varno] = bytes;
712 }
713 else
714 {
715 if (object_sizes[object_size_type][varno] > bytes)
716 object_sizes[object_size_type][varno] = bytes;
717 }
718}
719
720
10a0d495
JJ
721/* Merge object sizes of ORIG + OFFSET into DEST. Return true if
722 the object size might need reexamination later. */
723
724static bool
725merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
726 unsigned HOST_WIDE_INT offset)
727{
728 int object_size_type = osi->object_size_type;
729 unsigned int varno = SSA_NAME_VERSION (dest);
730 unsigned HOST_WIDE_INT orig_bytes;
731
732 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
733 return false;
734 if (offset >= offset_limit)
735 {
736 object_sizes[object_size_type][varno] = unknown[object_size_type];
737 return false;
738 }
739
740 if (osi->pass == 0)
741 collect_object_sizes_for (osi, orig);
742
743 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
744 if (orig_bytes != unknown[object_size_type])
745 orig_bytes = (offset > orig_bytes)
746 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
747
748 if ((object_size_type & 2) == 0)
749 {
750 if (object_sizes[object_size_type][varno] < orig_bytes)
751 {
752 object_sizes[object_size_type][varno] = orig_bytes;
753 osi->changed = true;
754 }
755 }
756 else
757 {
758 if (object_sizes[object_size_type][varno] > orig_bytes)
759 {
760 object_sizes[object_size_type][varno] = orig_bytes;
761 osi->changed = true;
762 }
763 }
764 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
765}
766
767
726a989a
RB
768/* Compute object_sizes for VAR, defined to the result of an assignment
769 with operator POINTER_PLUS_EXPR. Return true if the object size might
770 need reexamination later. */
10a0d495
JJ
771
772static bool
726a989a 773plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
10a0d495 774{
10a0d495
JJ
775 int object_size_type = osi->object_size_type;
776 unsigned int varno = SSA_NAME_VERSION (var);
777 unsigned HOST_WIDE_INT bytes;
726a989a
RB
778 tree op0, op1;
779
70f34814
RG
780 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
781 {
782 op0 = gimple_assign_rhs1 (stmt);
783 op1 = gimple_assign_rhs2 (stmt);
784 }
785 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
786 {
787 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
788 gcc_assert (TREE_CODE (rhs) == MEM_REF);
789 op0 = TREE_OPERAND (rhs, 0);
790 op1 = TREE_OPERAND (rhs, 1);
791 }
792 else
793 gcc_unreachable ();
10a0d495
JJ
794
795 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
796 return false;
797
10a0d495 798 /* Handle PTR + OFFSET here. */
5be014d5 799 if (TREE_CODE (op1) == INTEGER_CST
10a0d495
JJ
800 && (TREE_CODE (op0) == SSA_NAME
801 || TREE_CODE (op0) == ADDR_EXPR))
802 {
cc269bb6 803 if (! tree_fits_uhwi_p (op1))
10a0d495
JJ
804 bytes = unknown[object_size_type];
805 else if (TREE_CODE (op0) == SSA_NAME)
ae7e9ddd 806 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
10a0d495
JJ
807 else
808 {
ae7e9ddd 809 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
10a0d495 810
726a989a 811 /* op0 will be ADDR_EXPR here. */
eb9ed98a 812 bytes = addr_object_size (osi, op0, object_size_type);
ac5a28a6
JH
813 if (bytes == unknown[object_size_type])
814 ;
815 else if (off > offset_limit)
10a0d495
JJ
816 bytes = unknown[object_size_type];
817 else if (off > bytes)
818 bytes = 0;
819 else
820 bytes -= off;
821 }
822 }
823 else
824 bytes = unknown[object_size_type];
825
826 if ((object_size_type & 2) == 0)
827 {
828 if (object_sizes[object_size_type][varno] < bytes)
829 object_sizes[object_size_type][varno] = bytes;
830 }
831 else
832 {
833 if (object_sizes[object_size_type][varno] > bytes)
834 object_sizes[object_size_type][varno] = bytes;
835 }
836 return false;
837}
838
839
4e71066d 840/* Compute object_sizes for VAR, defined at STMT, which is
f255541f
RC
841 a COND_EXPR. Return true if the object size might need reexamination
842 later. */
843
844static bool
4e71066d 845cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
f255541f
RC
846{
847 tree then_, else_;
848 int object_size_type = osi->object_size_type;
849 unsigned int varno = SSA_NAME_VERSION (var);
850 bool reexamine = false;
851
4e71066d 852 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
f255541f
RC
853
854 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
855 return false;
856
4e71066d
RG
857 then_ = gimple_assign_rhs2 (stmt);
858 else_ = gimple_assign_rhs3 (stmt);
f255541f
RC
859
860 if (TREE_CODE (then_) == SSA_NAME)
861 reexamine |= merge_object_sizes (osi, var, then_, 0);
862 else
863 expr_object_size (osi, var, then_);
864
865 if (TREE_CODE (else_) == SSA_NAME)
866 reexamine |= merge_object_sizes (osi, var, else_, 0);
867 else
868 expr_object_size (osi, var, else_);
869
870 return reexamine;
871}
872
10a0d495
JJ
873/* Compute object sizes for VAR.
874 For ADDR_EXPR an object size is the number of remaining bytes
619519c8 875 to the end of the object (where what is considered an object depends on
10a0d495 876 OSI->object_size_type).
726a989a 877 For allocation GIMPLE_CALL like malloc or calloc object size is the size
10a0d495 878 of the allocation.
5be014d5 879 For POINTER_PLUS_EXPR where second operand is a constant integer,
10a0d495
JJ
880 object size is object size of the first operand minus the constant.
881 If the constant is bigger than the number of remaining bytes until the
882 end of the object, object size is 0, but if it is instead a pointer
883 subtraction, object size is unknown[object_size_type].
884 To differentiate addition from subtraction, ADDR_EXPR returns
885 unknown[object_size_type] for all objects bigger than half of the address
886 space, and constants less than half of the address space are considered
887 addition, while bigger constants subtraction.
726a989a 888 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
10a0d495
JJ
889 object size is object size of that argument.
890 Otherwise, object size is the maximum of object sizes of variables
891 that it might be set to. */
892
893static void
894collect_object_sizes_for (struct object_size_info *osi, tree var)
895{
896 int object_size_type = osi->object_size_type;
897 unsigned int varno = SSA_NAME_VERSION (var);
726a989a 898 gimple stmt;
10a0d495
JJ
899 bool reexamine;
900
901 if (bitmap_bit_p (computed[object_size_type], varno))
902 return;
903
904 if (osi->pass == 0)
905 {
fcaa4ca4 906 if (bitmap_set_bit (osi->visited, varno))
10a0d495 907 {
10a0d495
JJ
908 object_sizes[object_size_type][varno]
909 = (object_size_type & 2) ? -1 : 0;
910 }
911 else
912 {
913 /* Found a dependency loop. Mark the variable for later
914 re-examination. */
915 bitmap_set_bit (osi->reexamine, varno);
916 if (dump_file && (dump_flags & TDF_DETAILS))
917 {
918 fprintf (dump_file, "Found a dependency loop at ");
919 print_generic_expr (dump_file, var, dump_flags);
920 fprintf (dump_file, "\n");
921 }
922 return;
923 }
924 }
925
926 if (dump_file && (dump_flags & TDF_DETAILS))
927 {
928 fprintf (dump_file, "Visiting use-def links for ");
929 print_generic_expr (dump_file, var, dump_flags);
930 fprintf (dump_file, "\n");
931 }
932
933 stmt = SSA_NAME_DEF_STMT (var);
934 reexamine = false;
935
726a989a 936 switch (gimple_code (stmt))
10a0d495 937 {
726a989a 938 case GIMPLE_ASSIGN:
10a0d495 939 {
70f34814
RG
940 tree rhs = gimple_assign_rhs1 (stmt);
941 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
942 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
943 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
726a989a 944 reexamine = plus_stmt_object_size (osi, var, stmt);
4e71066d
RG
945 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
946 reexamine = cond_expr_object_size (osi, var, stmt);
726a989a
RB
947 else if (gimple_assign_single_p (stmt)
948 || gimple_assign_unary_nop_p (stmt))
949 {
726a989a
RB
950 if (TREE_CODE (rhs) == SSA_NAME
951 && POINTER_TYPE_P (TREE_TYPE (rhs)))
952 reexamine = merge_object_sizes (osi, var, rhs, 0);
726a989a
RB
953 else
954 expr_object_size (osi, var, rhs);
955 }
956 else
957 unknown_object_size (osi, var);
958 break;
959 }
f255541f 960
726a989a
RB
961 case GIMPLE_CALL:
962 {
538dd0b7
DM
963 gcall *call_stmt = as_a <gcall *> (stmt);
964 tree arg = pass_through_call (call_stmt);
726a989a
RB
965 if (arg)
966 {
967 if (TREE_CODE (arg) == SSA_NAME
968 && POINTER_TYPE_P (TREE_TYPE (arg)))
969 reexamine = merge_object_sizes (osi, var, arg, 0);
726a989a
RB
970 else
971 expr_object_size (osi, var, arg);
972 }
973 else
538dd0b7 974 call_object_size (osi, var, call_stmt);
10a0d495
JJ
975 break;
976 }
977
726a989a 978 case GIMPLE_ASM:
10a0d495
JJ
979 /* Pointers defined by __asm__ statements can point anywhere. */
980 object_sizes[object_size_type][varno] = unknown[object_size_type];
981 break;
982
726a989a 983 case GIMPLE_NOP:
70b5e7dc
RG
984 if (SSA_NAME_VAR (var)
985 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
986 expr_object_size (osi, var, SSA_NAME_VAR (var));
987 else
988 /* Uninitialized SSA names point nowhere. */
989 object_sizes[object_size_type][varno] = unknown[object_size_type];
10a0d495
JJ
990 break;
991
726a989a 992 case GIMPLE_PHI:
10a0d495 993 {
726a989a 994 unsigned i;
10a0d495 995
726a989a 996 for (i = 0; i < gimple_phi_num_args (stmt); i++)
10a0d495 997 {
726a989a 998 tree rhs = gimple_phi_arg (stmt, i)->def;
10a0d495
JJ
999
1000 if (object_sizes[object_size_type][varno]
1001 == unknown[object_size_type])
1002 break;
1003
1004 if (TREE_CODE (rhs) == SSA_NAME)
1005 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1006 else if (osi->pass == 0)
1007 expr_object_size (osi, var, rhs);
1008 }
1009 break;
1010 }
726a989a 1011
10a0d495
JJ
1012 default:
1013 gcc_unreachable ();
1014 }
1015
1016 if (! reexamine
1017 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1018 {
1019 bitmap_set_bit (computed[object_size_type], varno);
1020 bitmap_clear_bit (osi->reexamine, varno);
1021 }
1022 else
1023 {
1024 bitmap_set_bit (osi->reexamine, varno);
1025 if (dump_file && (dump_flags & TDF_DETAILS))
1026 {
1027 fprintf (dump_file, "Need to reexamine ");
1028 print_generic_expr (dump_file, var, dump_flags);
1029 fprintf (dump_file, "\n");
1030 }
1031 }
1032}
1033
1034
1035/* Helper function for check_for_plus_in_loops. Called recursively
1036 to detect loops. */
1037
1038static void
1039check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1040 unsigned int depth)
1041{
726a989a 1042 gimple stmt = SSA_NAME_DEF_STMT (var);
10a0d495
JJ
1043 unsigned int varno = SSA_NAME_VERSION (var);
1044
1045 if (osi->depths[varno])
1046 {
1047 if (osi->depths[varno] != depth)
1048 {
1049 unsigned int *sp;
1050
1051 /* Found a loop involving pointer addition. */
1052 for (sp = osi->tos; sp > osi->stack; )
1053 {
1054 --sp;
1055 bitmap_clear_bit (osi->reexamine, *sp);
1056 bitmap_set_bit (computed[osi->object_size_type], *sp);
1057 object_sizes[osi->object_size_type][*sp] = 0;
1058 if (*sp == varno)
1059 break;
1060 }
1061 }
1062 return;
1063 }
1064 else if (! bitmap_bit_p (osi->reexamine, varno))
1065 return;
1066
1067 osi->depths[varno] = depth;
1068 *osi->tos++ = varno;
1069
726a989a 1070 switch (gimple_code (stmt))
10a0d495 1071 {
10a0d495 1072
726a989a 1073 case GIMPLE_ASSIGN:
10a0d495 1074 {
726a989a
RB
1075 if ((gimple_assign_single_p (stmt)
1076 || gimple_assign_unary_nop_p (stmt))
1077 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1078 {
1079 tree rhs = gimple_assign_rhs1 (stmt);
1080
1081 check_for_plus_in_loops_1 (osi, rhs, depth);
1082 }
1083 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1084 {
1085 tree basevar = gimple_assign_rhs1 (stmt);
1086 tree cst = gimple_assign_rhs2 (stmt);
1087
1088 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1089
1090 check_for_plus_in_loops_1 (osi, basevar,
1091 depth + !integer_zerop (cst));
1092 }
1093 else
1094 gcc_unreachable ();
1095 break;
1096 }
10a0d495 1097
726a989a
RB
1098 case GIMPLE_CALL:
1099 {
538dd0b7
DM
1100 gcall *call_stmt = as_a <gcall *> (stmt);
1101 tree arg = pass_through_call (call_stmt);
726a989a
RB
1102 if (arg)
1103 {
1104 if (TREE_CODE (arg) == SSA_NAME)
1105 check_for_plus_in_loops_1 (osi, arg, depth);
1106 else
1107 gcc_unreachable ();
1108 }
1109 break;
10a0d495 1110 }
726a989a
RB
1111
1112 case GIMPLE_PHI:
10a0d495 1113 {
726a989a 1114 unsigned i;
10a0d495 1115
726a989a 1116 for (i = 0; i < gimple_phi_num_args (stmt); i++)
10a0d495 1117 {
726a989a 1118 tree rhs = gimple_phi_arg (stmt, i)->def;
10a0d495
JJ
1119
1120 if (TREE_CODE (rhs) == SSA_NAME)
1121 check_for_plus_in_loops_1 (osi, rhs, depth);
1122 }
1123 break;
1124 }
726a989a 1125
10a0d495
JJ
1126 default:
1127 gcc_unreachable ();
1128 }
1129
1130 osi->depths[varno] = 0;
1131 osi->tos--;
1132}
1133
1134
1135/* Check if some pointer we are computing object size of is being increased
1136 within a loop. If yes, assume all the SSA variables participating in
1137 that loop have minimum object sizes 0. */
1138
1139static void
1140check_for_plus_in_loops (struct object_size_info *osi, tree var)
1141{
726a989a 1142 gimple stmt = SSA_NAME_DEF_STMT (var);
10a0d495 1143
726a989a
RB
1144 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1145 and looked for a POINTER_PLUS_EXPR in the pass-through
1146 argument, if any. In GIMPLE, however, such an expression
1147 is not a valid call operand. */
10a0d495 1148
726a989a
RB
1149 if (is_gimple_assign (stmt)
1150 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1151 {
1152 tree basevar = gimple_assign_rhs1 (stmt);
1153 tree cst = gimple_assign_rhs2 (stmt);
b8698a0f 1154
726a989a
RB
1155 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1156
1157 if (integer_zerop (cst))
1158 return;
1159
1160 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1161 *osi->tos++ = SSA_NAME_VERSION (basevar);
1162 check_for_plus_in_loops_1 (osi, var, 2);
1163 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1164 osi->tos--;
10a0d495
JJ
1165 }
1166}
1167
1168
1169/* Initialize data structures for the object size computation. */
1170
1171void
1172init_object_sizes (void)
1173{
1174 int object_size_type;
1175
db84d11e 1176 if (computed[0])
10a0d495
JJ
1177 return;
1178
1179 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1180 {
db84d11e 1181 object_sizes[object_size_type].safe_grow (num_ssa_names);
10a0d495
JJ
1182 computed[object_size_type] = BITMAP_ALLOC (NULL);
1183 }
1184
1185 init_offset_limit ();
1186}
1187
1188
1189/* Destroy data structures after the object size computation. */
1190
862d0b35 1191static void
10a0d495
JJ
1192fini_object_sizes (void)
1193{
1194 int object_size_type;
1195
1196 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1197 {
db84d11e 1198 object_sizes[object_size_type].release ();
10a0d495 1199 BITMAP_FREE (computed[object_size_type]);
10a0d495
JJ
1200 }
1201}
1202
1203
1204/* Simple pass to optimize all __builtin_object_size () builtins. */
1205
be55bfe6
TS
1206namespace {
1207
1208const pass_data pass_data_object_sizes =
1209{
1210 GIMPLE_PASS, /* type */
1211 "objsz", /* name */
1212 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
1213 TV_NONE, /* tv_id */
1214 ( PROP_cfg | PROP_ssa ), /* properties_required */
1215 0, /* properties_provided */
1216 0, /* properties_destroyed */
1217 0, /* todo_flags_start */
3bea341f 1218 0, /* todo_flags_finish */
be55bfe6
TS
1219};
1220
1221class pass_object_sizes : public gimple_opt_pass
1222{
1223public:
1224 pass_object_sizes (gcc::context *ctxt)
1225 : gimple_opt_pass (pass_data_object_sizes, ctxt)
1226 {}
1227
1228 /* opt_pass methods: */
1229 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1230 virtual unsigned int execute (function *);
1231
1232}; // class pass_object_sizes
1233
1234unsigned int
1235pass_object_sizes::execute (function *fun)
10a0d495
JJ
1236{
1237 basic_block bb;
be55bfe6 1238 FOR_EACH_BB_FN (bb, fun)
10a0d495 1239 {
726a989a
RB
1240 gimple_stmt_iterator i;
1241 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
10a0d495 1242 {
1eadb567 1243 tree result;
726a989a 1244 gimple call = gsi_stmt (i);
1eadb567 1245 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
10a0d495
JJ
1246 continue;
1247
1248 init_object_sizes ();
672ff0b6
JJ
1249
1250 /* In the first pass instance, only attempt to fold
1251 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1252 and rather than folding the builtin to the constant if any,
1253 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1254 call result and the computed constant. */
1255 if (first_pass_instance)
1256 {
1257 tree ost = gimple_call_arg (call, 1);
1258 if (tree_fits_uhwi_p (ost))
1259 {
1260 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1261 tree ptr = gimple_call_arg (call, 0);
1262 tree lhs = gimple_call_lhs (call);
1263 if ((object_size_type == 1 || object_size_type == 3)
1264 && (TREE_CODE (ptr) == ADDR_EXPR
1265 || TREE_CODE (ptr) == SSA_NAME)
1266 && lhs)
1267 {
1268 tree type = TREE_TYPE (lhs);
1269 unsigned HOST_WIDE_INT bytes
1270 = compute_builtin_object_size (ptr, object_size_type);
1271 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type == 1
1272 ? -1 : 0)
1273 && wi::fits_to_tree_p (bytes, type))
1274 {
1275 tree tem = make_ssa_name (type);
1276 gimple_call_set_lhs (call, tem);
1277 enum tree_code code
1278 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1279 tree cst = build_int_cstu (type, bytes);
1280 gimple g = gimple_build_assign (lhs, code, tem, cst);
1281 gsi_insert_after (&i, g, GSI_NEW_STMT);
1282 update_stmt (call);
1283 }
1284 }
1285 }
1286 continue;
1287 }
1288
538dd0b7 1289 result = fold_call_stmt (as_a <gcall *> (call), false);
10a0d495
JJ
1290 if (!result)
1291 {
672ff0b6
JJ
1292 tree ost = gimple_call_arg (call, 1);
1293
1294 if (tree_fits_uhwi_p (ost))
10a0d495 1295 {
672ff0b6 1296 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
10a0d495 1297
672ff0b6
JJ
1298 if (object_size_type < 2)
1299 result = fold_convert (size_type_node,
1300 integer_minus_one_node);
1301 else if (object_size_type < 4)
1302 result = build_zero_cst (size_type_node);
10a0d495
JJ
1303 }
1304
1305 if (!result)
1306 continue;
1307 }
1308
1eadb567
RB
1309 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1310
10a0d495
JJ
1311 if (dump_file && (dump_flags & TDF_DETAILS))
1312 {
1313 fprintf (dump_file, "Simplified\n ");
726a989a 1314 print_gimple_stmt (dump_file, call, 0, dump_flags);
1eadb567
RB
1315 fprintf (dump_file, " to ");
1316 print_generic_expr (dump_file, result, 0);
1317 fprintf (dump_file, "\n");
10a0d495
JJ
1318 }
1319
1eadb567
RB
1320 tree lhs = gimple_call_lhs (call);
1321 if (!lhs)
1322 continue;
726a989a 1323
1eadb567
RB
1324 /* Propagate into all uses and fold those stmts. */
1325 gimple use_stmt;
1326 imm_use_iterator iter;
1327 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
10a0d495 1328 {
1eadb567
RB
1329 use_operand_p use_p;
1330 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1331 SET_USE (use_p, result);
1332 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1333 fold_stmt (&gsi);
1334 update_stmt (gsi_stmt (gsi));
10a0d495
JJ
1335 }
1336 }
1337 }
1338
1339 fini_object_sizes ();
c2924966 1340 return 0;
10a0d495
JJ
1341}
1342
27a4cd48
DM
1343} // anon namespace
1344
1345gimple_opt_pass *
1346make_pass_object_sizes (gcc::context *ctxt)
1347{
1348 return new pass_object_sizes (ctxt);
1349}