]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-object-size.c
RISC-V: Preserve arch version info during normalizing arch string
[thirdparty/gcc.git] / gcc / tree-object-size.c
CommitLineData
10a0d495 1/* __builtin_object_size (ptr, object_size_type) computation
8d9254fc 2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
10a0d495
JJ
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
10a0d495
JJ
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
10a0d495
JJ
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
10a0d495 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5 27#include "tree-pass.h"
c7131fb2 28#include "ssa.h"
957060b5 29#include "gimple-pretty-print.h"
40e23961 30#include "fold-const.h"
d8a2d370 31#include "tree-object-size.h"
2fb9a547 32#include "gimple-fold.h"
5be5c238 33#include "gimple-iterator.h"
1e080ab4 34#include "tree-cfg.h"
314e6352
ML
35#include "stringpool.h"
36#include "attribs.h"
10a0d495
JJ
37
38struct object_size_info
39{
40 int object_size_type;
34e82342 41 unsigned char pass;
10a0d495 42 bool changed;
34e82342 43 bitmap visited, reexamine;
10a0d495
JJ
44 unsigned int *depths;
45 unsigned int *stack, *tos;
46};
47
a43068cc
JJ
48static const unsigned HOST_WIDE_INT unknown[4] = {
49 HOST_WIDE_INT_M1U,
50 HOST_WIDE_INT_M1U,
51 0,
52 0
53};
10a0d495 54
ac545c64 55static tree compute_object_offset (const_tree, const_tree);
05a64756 56static bool addr_object_size (struct object_size_info *,
28a5fa54 57 const_tree, int, unsigned HOST_WIDE_INT *,
f7d86b5c 58 tree * = NULL, tree * = NULL);
538dd0b7
DM
59static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
60static tree pass_through_call (const gcall *);
10a0d495
JJ
61static void collect_object_sizes_for (struct object_size_info *, tree);
62static void expr_object_size (struct object_size_info *, tree, tree);
63static bool merge_object_sizes (struct object_size_info *, tree, tree,
64 unsigned HOST_WIDE_INT);
60dd79ca
TS
65static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
66static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
10a0d495
JJ
67static void init_offset_limit (void);
68static void check_for_plus_in_loops (struct object_size_info *, tree);
69static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
70 unsigned int);
71
72/* object_sizes[0] is upper bound for number of bytes till the end of
73 the object.
74 object_sizes[1] is upper bound for number of bytes till the end of
75 the subobject (innermost array or field with address taken).
76 object_sizes[2] is lower bound for number of bytes till the end of
77 the object and object_sizes[3] lower bound for subobject. */
db84d11e 78static vec<unsigned HOST_WIDE_INT> object_sizes[4];
10a0d495
JJ
79
80/* Bitmaps what object sizes have been computed already. */
81static bitmap computed[4];
82
83/* Maximum value of offset we consider to be addition. */
84static unsigned HOST_WIDE_INT offset_limit;
85
86
87/* Initialize OFFSET_LIMIT variable. */
88static void
89init_offset_limit (void)
90{
cc269bb6 91 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
ae7e9ddd 92 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
10a0d495
JJ
93 else
94 offset_limit = -1;
95 offset_limit /= 2;
96}
97
98
99/* Compute offset of EXPR within VAR. Return error_mark_node
100 if unknown. */
101
102static tree
ac545c64 103compute_object_offset (const_tree expr, const_tree var)
10a0d495
JJ
104{
105 enum tree_code code = PLUS_EXPR;
106 tree base, off, t;
107
108 if (expr == var)
109 return size_zero_node;
110
111 switch (TREE_CODE (expr))
112 {
113 case COMPONENT_REF:
114 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
115 if (base == error_mark_node)
116 return base;
117
118 t = TREE_OPERAND (expr, 1);
119 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
ae7e9ddd 120 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
10a0d495
JJ
121 / BITS_PER_UNIT));
122 break;
123
124 case REALPART_EXPR:
1043771b 125 CASE_CONVERT:
10a0d495
JJ
126 case VIEW_CONVERT_EXPR:
127 case NON_LVALUE_EXPR:
128 return compute_object_offset (TREE_OPERAND (expr, 0), var);
129
130 case IMAGPART_EXPR:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
134
135 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
136 break;
137
138 case ARRAY_REF:
139 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
140 if (base == error_mark_node)
141 return base;
142
143 t = TREE_OPERAND (expr, 1);
2d4102c5
JJ
144 tree low_bound, unit_size;
145 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
146 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
147 if (! integer_zerop (low_bound))
148 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
10a0d495
JJ
149 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
150 {
151 code = MINUS_EXPR;
152 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
153 }
b6f65e3c 154 t = fold_convert (sizetype, t);
2d4102c5 155 off = size_binop (MULT_EXPR, unit_size, t);
10a0d495
JJ
156 break;
157
70f34814
RG
158 case MEM_REF:
159 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
807e902e 160 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
70f34814 161
10a0d495
JJ
162 default:
163 return error_mark_node;
164 }
165
166 return size_binop (code, base, off);
167}
168
7a41fcde
MS
169/* Returns the size of the object designated by DECL considering its
170 initializer if it either has one or if it would not affect its size,
171 otherwise the size of the object without the initializer when MIN
172 is true, else null. An object's initializer affects the object's
173 size if it's a struct type with a flexible array member. */
174
a2c2cee9 175tree
7a41fcde
MS
176decl_init_size (tree decl, bool min)
177{
178 tree size = DECL_SIZE_UNIT (decl);
179 tree type = TREE_TYPE (decl);
180 if (TREE_CODE (type) != RECORD_TYPE)
181 return size;
182
183 tree last = last_field (type);
184 if (!last)
185 return size;
186
187 tree last_type = TREE_TYPE (last);
188 if (TREE_CODE (last_type) != ARRAY_TYPE
189 || TYPE_SIZE (last_type))
190 return size;
191
192 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
193 of the initializer and sometimes doesn't. */
194 size = TYPE_SIZE_UNIT (type);
195 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
196 tree compsize = component_ref_size (ref);
197 if (!compsize)
198 return min ? size : NULL_TREE;
199
200 /* The size includes tail padding and initializer elements. */
201 tree pos = byte_position (last);
202 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
203 return size;
204}
10a0d495
JJ
205
206/* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
207 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
208 If unknown, return unknown[object_size_type]. */
209
05a64756 210static bool
eb9ed98a 211addr_object_size (struct object_size_info *osi, const_tree ptr,
28a5fa54 212 int object_size_type, unsigned HOST_WIDE_INT *psize,
f7d86b5c 213 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
10a0d495 214{
eb9ed98a 215 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
10a0d495 216
f7d86b5c 217 tree dummy_decl, dummy_off = size_zero_node;
28a5fa54 218 if (!pdecl)
f7d86b5c
MS
219 pdecl = &dummy_decl;
220 if (!poff)
221 poff = &dummy_off;
28a5fa54 222
10a0d495
JJ
223 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
224
05a64756
MS
225 /* Set to unknown and overwrite just before returning if the size
226 could be determined. */
227 *psize = unknown[object_size_type];
228
10a0d495 229 pt_var = TREE_OPERAND (ptr, 0);
d837d73d
RG
230 while (handled_component_p (pt_var))
231 pt_var = TREE_OPERAND (pt_var, 0);
10a0d495 232
7a41fcde
MS
233 if (!pt_var)
234 return false;
235
236 if (TREE_CODE (pt_var) == MEM_REF)
10a0d495 237 {
eb9ed98a 238 unsigned HOST_WIDE_INT sz;
10a0d495 239
d837d73d 240 if (!osi || (object_size_type & 1) != 0
ea17de23 241 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
70f34814 242 {
05a64756 243 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
f7d86b5c 244 object_size_type & ~1, &sz, pdecl, poff);
70f34814 245 }
eb9ed98a 246 else
10a0d495 247 {
eb9ed98a
JJ
248 tree var = TREE_OPERAND (pt_var, 0);
249 if (osi->pass == 0)
250 collect_object_sizes_for (osi, var);
251 if (bitmap_bit_p (computed[object_size_type],
252 SSA_NAME_VERSION (var)))
253 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
254 else
255 sz = unknown[object_size_type];
d837d73d
RG
256 }
257 if (sz != unknown[object_size_type])
258 {
aca52e6f
RS
259 offset_int mem_offset;
260 if (mem_ref_offset (pt_var).is_constant (&mem_offset))
261 {
a2c2cee9
MS
262 if (*poff)
263 *poff = wide_int_to_tree (ptrdiff_type_node,
264 mem_offset + wi::to_offset (*poff));
265 else
266 *poff = wide_int_to_tree (ptrdiff_type_node, mem_offset);
aca52e6f
RS
267 offset_int dsz = wi::sub (sz, mem_offset);
268 if (wi::neg_p (dsz))
269 sz = 0;
270 else if (wi::fits_uhwi_p (dsz))
271 sz = dsz.to_uhwi ();
272 else
273 sz = unknown[object_size_type];
274 }
70f34814 275 else
d837d73d 276 sz = unknown[object_size_type];
eb9ed98a
JJ
277 }
278
279 if (sz != unknown[object_size_type] && sz < offset_limit)
280 pt_var_size = size_int (sz);
281 }
7a41fcde 282 else if (DECL_P (pt_var))
28a5fa54
MS
283 {
284 *pdecl = pt_var;
7a41fcde
MS
285 pt_var_size = decl_init_size (pt_var, object_size_type & 2);
286 if (!pt_var_size)
287 return false;
28a5fa54 288 }
7a41fcde 289 else if (TREE_CODE (pt_var) == STRING_CST)
eb9ed98a
JJ
290 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
291 else
05a64756 292 return false;
eb9ed98a 293
7a41fcde
MS
294 if (pt_var_size)
295 {
296 /* Validate the size determined above. */
297 if (!tree_fits_uhwi_p (pt_var_size)
298 || tree_to_uhwi (pt_var_size) >= offset_limit)
299 return false;
300 }
301
eb9ed98a
JJ
302 if (pt_var != TREE_OPERAND (ptr, 0))
303 {
304 tree var;
10a0d495 305
eb9ed98a
JJ
306 if (object_size_type & 1)
307 {
308 var = TREE_OPERAND (ptr, 0);
309
310 while (var != pt_var
311 && TREE_CODE (var) != BIT_FIELD_REF
312 && TREE_CODE (var) != COMPONENT_REF
313 && TREE_CODE (var) != ARRAY_REF
314 && TREE_CODE (var) != ARRAY_RANGE_REF
315 && TREE_CODE (var) != REALPART_EXPR
316 && TREE_CODE (var) != IMAGPART_EXPR)
317 var = TREE_OPERAND (var, 0);
318 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
38027156 319 var = TREE_OPERAND (var, 0);
eb9ed98a 320 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
cc269bb6 321 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
eb9ed98a
JJ
322 || (pt_var_size
323 && tree_int_cst_lt (pt_var_size,
324 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
325 var = pt_var;
70f34814 326 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
10a0d495 327 {
eb9ed98a
JJ
328 tree v = var;
329 /* For &X->fld, compute object size only if fld isn't the last
330 field, as struct { int i; char c[1]; } is often used instead
331 of flexible array member. */
332 while (v && v != pt_var)
333 switch (TREE_CODE (v))
334 {
335 case ARRAY_REF:
336 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
337 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
338 {
339 tree domain
340 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
341 if (domain
342 && TYPE_MAX_VALUE (domain)
343 && TREE_CODE (TYPE_MAX_VALUE (domain))
344 == INTEGER_CST
345 && tree_int_cst_lt (TREE_OPERAND (v, 1),
346 TYPE_MAX_VALUE (domain)))
347 {
348 v = NULL_TREE;
349 break;
350 }
351 }
352 v = TREE_OPERAND (v, 0);
353 break;
354 case REALPART_EXPR:
355 case IMAGPART_EXPR:
356 v = NULL_TREE;
357 break;
358 case COMPONENT_REF:
8593e0b6
JJ
359 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
360 {
361 v = NULL_TREE;
362 break;
363 }
38027156
JJ
364 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
365 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
366 != UNION_TYPE
367 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
368 != QUAL_UNION_TYPE)
369 break;
370 else
371 v = TREE_OPERAND (v, 0);
372 if (TREE_CODE (v) == COMPONENT_REF
373 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
374 == RECORD_TYPE)
eb9ed98a 375 {
910ad8de
NF
376 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
377 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
8593e0b6
JJ
378 if (TREE_CODE (fld_chain) == FIELD_DECL)
379 break;
380
381 if (fld_chain)
382 {
383 v = NULL_TREE;
eb9ed98a 384 break;
8593e0b6 385 }
38027156 386 v = TREE_OPERAND (v, 0);
eb9ed98a 387 }
38027156
JJ
388 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
389 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
390 != UNION_TYPE
391 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
392 != QUAL_UNION_TYPE)
8593e0b6
JJ
393 break;
394 else
395 v = TREE_OPERAND (v, 0);
38027156 396 if (v != pt_var)
8593e0b6
JJ
397 v = NULL_TREE;
398 else
399 v = pt_var;
eb9ed98a
JJ
400 break;
401 default:
402 v = pt_var;
403 break;
404 }
405 if (v == pt_var)
10a0d495
JJ
406 var = pt_var;
407 }
eb9ed98a
JJ
408 }
409 else
410 var = pt_var;
10a0d495 411
eb9ed98a
JJ
412 if (var != pt_var)
413 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
414 else if (!pt_var_size)
05a64756 415 return false;
eb9ed98a
JJ
416 else
417 var_size = pt_var_size;
418 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
419 if (bytes != error_mark_node)
420 {
a2c2cee9 421 *poff = bytes;
eb9ed98a
JJ
422 if (TREE_CODE (bytes) == INTEGER_CST
423 && tree_int_cst_lt (var_size, bytes))
424 bytes = size_zero_node;
425 else
426 bytes = size_binop (MINUS_EXPR, var_size, bytes);
427 }
428 if (var != pt_var
429 && pt_var_size
70f34814 430 && TREE_CODE (pt_var) == MEM_REF
eb9ed98a
JJ
431 && bytes != error_mark_node)
432 {
433 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
434 if (bytes2 != error_mark_node)
10a0d495 435 {
eb9ed98a
JJ
436 if (TREE_CODE (bytes2) == INTEGER_CST
437 && tree_int_cst_lt (pt_var_size, bytes2))
438 bytes2 = size_zero_node;
10a0d495 439 else
98a129b9 440 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
f7d86b5c 441 *poff = size_binop (PLUS_EXPR, *poff, bytes2);
eb9ed98a 442 bytes = size_binop (MIN_EXPR, bytes, bytes2);
10a0d495
JJ
443 }
444 }
10a0d495 445 }
eb9ed98a 446 else if (!pt_var_size)
05a64756 447 return false;
eb9ed98a 448 else
a2c2cee9
MS
449 {
450 bytes = pt_var_size;
451 if (!*poff)
452 *poff = size_zero_node;
453 }
eb9ed98a 454
cc269bb6 455 if (tree_fits_uhwi_p (bytes))
05a64756
MS
456 {
457 *psize = tree_to_uhwi (bytes);
458 return true;
459 }
10a0d495 460
05a64756 461 return false;
10a0d495
JJ
462}
463
464
726a989a 465/* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
302db8ba
MS
466 Handles calls to functions declared with attribute alloc_size.
467 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
468 If unknown, return unknown[object_size_type]. */
10a0d495
JJ
469
470static unsigned HOST_WIDE_INT
538dd0b7 471alloc_object_size (const gcall *call, int object_size_type)
10a0d495 472{
726a989a 473 gcc_assert (is_gimple_call (call));
10a0d495 474
302db8ba
MS
475 tree calltype;
476 if (tree callfn = gimple_call_fndecl (call))
477 calltype = TREE_TYPE (callfn);
478 else
479 calltype = gimple_call_fntype (call);
480
481 if (!calltype)
51bc54a6
DM
482 return unknown[object_size_type];
483
302db8ba
MS
484 /* Set to positions of alloc_size arguments. */
485 int arg1 = -1, arg2 = -1;
486 tree alloc_size = lookup_attribute ("alloc_size",
487 TYPE_ATTRIBUTES (calltype));
51bc54a6
DM
488 if (alloc_size && TREE_VALUE (alloc_size))
489 {
490 tree p = TREE_VALUE (alloc_size);
491
492 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
493 if (TREE_CHAIN (p))
726a989a 494 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
51bc54a6 495 }
b8698a0f 496
726a989a
RB
497 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
498 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
b8698a0f 499 || (arg2 >= 0
726a989a
RB
500 && (arg2 >= (int)gimple_call_num_args (call)
501 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
b8698a0f 502 return unknown[object_size_type];
51bc54a6 503
302db8ba 504 tree bytes = NULL_TREE;
51bc54a6
DM
505 if (arg2 >= 0)
506 bytes = size_binop (MULT_EXPR,
726a989a
RB
507 fold_convert (sizetype, gimple_call_arg (call, arg1)),
508 fold_convert (sizetype, gimple_call_arg (call, arg2)));
51bc54a6 509 else if (arg1 >= 0)
726a989a 510 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
51bc54a6 511
cc269bb6 512 if (bytes && tree_fits_uhwi_p (bytes))
ae7e9ddd 513 return tree_to_uhwi (bytes);
10a0d495
JJ
514
515 return unknown[object_size_type];
516}
517
518
519/* If object size is propagated from one of function's arguments directly
726a989a 520 to its return value, return that argument for GIMPLE_CALL statement CALL.
10a0d495
JJ
521 Otherwise return NULL. */
522
523static tree
538dd0b7 524pass_through_call (const gcall *call)
10a0d495 525{
51feb980
JJ
526 unsigned rf = gimple_call_return_flags (call);
527 if (rf & ERF_RETURNS_ARG)
528 {
529 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
530 if (argnum < gimple_call_num_args (call))
531 return gimple_call_arg (call, argnum);
532 }
10a0d495 533
51feb980
JJ
534 /* __builtin_assume_aligned is intentionally not marked RET1. */
535 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
536 return gimple_call_arg (call, 0);
10a0d495
JJ
537
538 return NULL_TREE;
539}
540
541
05a64756 542/* Compute __builtin_object_size value for PTR and set *PSIZE to
28a5fa54
MS
543 the resulting value. If the declared object is known and PDECL
544 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
545 is the second argument to __builtin_object_size.
546 Returns true on success and false when the object size could not
547 be determined. */
10a0d495 548
05a64756
MS
549bool
550compute_builtin_object_size (tree ptr, int object_size_type,
28a5fa54 551 unsigned HOST_WIDE_INT *psize,
f7d86b5c 552 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
10a0d495
JJ
553{
554 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
555
f7d86b5c
MS
556 tree dummy_decl, dummy_off = size_zero_node;
557 if (!pdecl)
558 pdecl = &dummy_decl;
559 if (!poff)
560 poff = &dummy_off;
561
05a64756
MS
562 /* Set to unknown and overwrite just before returning if the size
563 could be determined. */
564 *psize = unknown[object_size_type];
565
10a0d495
JJ
566 if (! offset_limit)
567 init_offset_limit ();
568
569 if (TREE_CODE (ptr) == ADDR_EXPR)
f7d86b5c 570 return addr_object_size (NULL, ptr, object_size_type, psize, pdecl, poff);
05a64756
MS
571
572 if (TREE_CODE (ptr) != SSA_NAME
573 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
574 return false;
10a0d495 575
05a64756 576 if (computed[object_size_type] == NULL)
10a0d495 577 {
05a64756
MS
578 if (optimize || object_size_type & 1)
579 return false;
580
581 /* When not optimizing, rather than failing, make a small effort
582 to determine the object size without the full benefit of
583 the (costly) computation below. */
584 gimple *def = SSA_NAME_DEF_STMT (ptr);
585 if (gimple_code (def) == GIMPLE_ASSIGN)
10a0d495 586 {
05a64756
MS
587 tree_code code = gimple_assign_rhs_code (def);
588 if (code == POINTER_PLUS_EXPR)
10a0d495 589 {
05a64756
MS
590 tree offset = gimple_assign_rhs2 (def);
591 ptr = gimple_assign_rhs1 (def);
10a0d495 592
6182121c 593 if (tree_fits_shwi_p (offset)
28a5fa54 594 && compute_builtin_object_size (ptr, object_size_type,
f7d86b5c 595 psize, pdecl, poff))
10a0d495 596 {
05a64756
MS
597 /* Return zero when the offset is out of bounds. */
598 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
599 *psize = off < *psize ? *psize - off : 0;
f7d86b5c 600 *poff = offset;
05a64756 601 return true;
10a0d495 602 }
05a64756
MS
603 }
604 }
605 return false;
606 }
10a0d495 607
05a64756
MS
608 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
609 {
610 struct object_size_info osi;
611 bitmap_iterator bi;
612 unsigned int i;
613
614 if (num_ssa_names > object_sizes[object_size_type].length ())
615 object_sizes[object_size_type].safe_grow (num_ssa_names);
616 if (dump_file)
617 {
618 fprintf (dump_file, "Computing %s %sobject size for ",
619 (object_size_type & 2) ? "minimum" : "maximum",
620 (object_size_type & 1) ? "sub" : "");
621 print_generic_expr (dump_file, ptr, dump_flags);
622 fprintf (dump_file, ":\n");
623 }
10a0d495 624
05a64756
MS
625 osi.visited = BITMAP_ALLOC (NULL);
626 osi.reexamine = BITMAP_ALLOC (NULL);
627 osi.object_size_type = object_size_type;
628 osi.depths = NULL;
629 osi.stack = NULL;
630 osi.tos = NULL;
631
632 /* First pass: walk UD chains, compute object sizes that
633 can be computed. osi.reexamine bitmap at the end will
634 contain what variables were found in dependency cycles
635 and therefore need to be reexamined. */
636 osi.pass = 0;
637 osi.changed = false;
638 collect_object_sizes_for (&osi, ptr);
639
640 /* Second pass: keep recomputing object sizes of variables
641 that need reexamination, until no object sizes are
642 increased or all object sizes are computed. */
643 if (! bitmap_empty_p (osi.reexamine))
644 {
645 bitmap reexamine = BITMAP_ALLOC (NULL);
646
647 /* If looking for minimum instead of maximum object size,
648 detect cases where a pointer is increased in a loop.
649 Although even without this detection pass 2 would eventually
650 terminate, it could take a long time. If a pointer is
651 increasing this way, we need to assume 0 object size.
652 E.g. p = &buf[0]; while (cond) p = p + 4; */
653 if (object_size_type & 2)
654 {
655 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
656 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
657 osi.tos = osi.stack;
658 osi.pass = 1;
659 /* collect_object_sizes_for is changing
660 osi.reexamine bitmap, so iterate over a copy. */
661 bitmap_copy (reexamine, osi.reexamine);
662 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
663 if (bitmap_bit_p (osi.reexamine, i))
664 check_for_plus_in_loops (&osi, ssa_name (i));
665
666 free (osi.depths);
667 osi.depths = NULL;
668 free (osi.stack);
669 osi.stack = NULL;
670 osi.tos = NULL;
10a0d495 671 }
10a0d495 672
05a64756 673 do
10a0d495 674 {
05a64756
MS
675 osi.pass = 2;
676 osi.changed = false;
677 /* collect_object_sizes_for is changing
678 osi.reexamine bitmap, so iterate over a copy. */
679 bitmap_copy (reexamine, osi.reexamine);
680 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
681 if (bitmap_bit_p (osi.reexamine, i))
10a0d495 682 {
05a64756
MS
683 collect_object_sizes_for (&osi, ssa_name (i));
684 if (dump_file && (dump_flags & TDF_DETAILS))
685 {
686 fprintf (dump_file, "Reexamining ");
687 print_generic_expr (dump_file, ssa_name (i),
688 dump_flags);
689 fprintf (dump_file, "\n");
690 }
10a0d495
JJ
691 }
692 }
05a64756 693 while (osi.changed);
10a0d495 694
05a64756 695 BITMAP_FREE (reexamine);
10a0d495 696 }
05a64756
MS
697 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
698 bitmap_set_bit (computed[object_size_type], i);
10a0d495 699
05a64756
MS
700 /* Debugging dumps. */
701 if (dump_file)
702 {
703 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
704 if (object_sizes[object_size_type][i]
705 != unknown[object_size_type])
706 {
707 print_generic_expr (dump_file, ssa_name (i),
708 dump_flags);
709 fprintf (dump_file,
710 ": %s %sobject size "
711 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
712 (object_size_type & 2) ? "minimum" : "maximum",
713 (object_size_type & 1) ? "sub" : "",
714 object_sizes[object_size_type][i]);
715 }
716 }
717
718 BITMAP_FREE (osi.reexamine);
719 BITMAP_FREE (osi.visited);
10a0d495
JJ
720 }
721
05a64756
MS
722 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
723 return *psize != unknown[object_size_type];
10a0d495
JJ
724}
725
726a989a 726/* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
10a0d495
JJ
727
728static void
729expr_object_size (struct object_size_info *osi, tree ptr, tree value)
730{
731 int object_size_type = osi->object_size_type;
732 unsigned int varno = SSA_NAME_VERSION (ptr);
733 unsigned HOST_WIDE_INT bytes;
734
735 gcc_assert (object_sizes[object_size_type][varno]
736 != unknown[object_size_type]);
737 gcc_assert (osi->pass == 0);
738
739 if (TREE_CODE (value) == WITH_SIZE_EXPR)
740 value = TREE_OPERAND (value, 0);
741
742 /* Pointer variables should have been handled by merge_object_sizes. */
743 gcc_assert (TREE_CODE (value) != SSA_NAME
744 || !POINTER_TYPE_P (TREE_TYPE (value)));
745
746 if (TREE_CODE (value) == ADDR_EXPR)
05a64756 747 addr_object_size (osi, value, object_size_type, &bytes);
10a0d495
JJ
748 else
749 bytes = unknown[object_size_type];
750
751 if ((object_size_type & 2) == 0)
752 {
753 if (object_sizes[object_size_type][varno] < bytes)
754 object_sizes[object_size_type][varno] = bytes;
755 }
756 else
757 {
758 if (object_sizes[object_size_type][varno] > bytes)
759 object_sizes[object_size_type][varno] = bytes;
760 }
761}
762
763
726a989a
RB
764/* Compute object_sizes for PTR, defined to the result of a call. */
765
766static void
538dd0b7 767call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
726a989a
RB
768{
769 int object_size_type = osi->object_size_type;
770 unsigned int varno = SSA_NAME_VERSION (ptr);
771 unsigned HOST_WIDE_INT bytes;
772
773 gcc_assert (is_gimple_call (call));
774
775 gcc_assert (object_sizes[object_size_type][varno]
776 != unknown[object_size_type]);
777 gcc_assert (osi->pass == 0);
778
779 bytes = alloc_object_size (call, object_size_type);
780
781 if ((object_size_type & 2) == 0)
782 {
783 if (object_sizes[object_size_type][varno] < bytes)
784 object_sizes[object_size_type][varno] = bytes;
785 }
786 else
787 {
788 if (object_sizes[object_size_type][varno] > bytes)
789 object_sizes[object_size_type][varno] = bytes;
790 }
791}
792
793
794/* Compute object_sizes for PTR, defined to an unknown value. */
795
796static void
797unknown_object_size (struct object_size_info *osi, tree ptr)
798{
799 int object_size_type = osi->object_size_type;
800 unsigned int varno = SSA_NAME_VERSION (ptr);
801 unsigned HOST_WIDE_INT bytes;
802
803 gcc_assert (object_sizes[object_size_type][varno]
804 != unknown[object_size_type]);
805 gcc_assert (osi->pass == 0);
806
807 bytes = unknown[object_size_type];
808
809 if ((object_size_type & 2) == 0)
810 {
811 if (object_sizes[object_size_type][varno] < bytes)
812 object_sizes[object_size_type][varno] = bytes;
813 }
814 else
815 {
816 if (object_sizes[object_size_type][varno] > bytes)
817 object_sizes[object_size_type][varno] = bytes;
818 }
819}
820
821
10a0d495
JJ
822/* Merge object sizes of ORIG + OFFSET into DEST. Return true if
823 the object size might need reexamination later. */
824
825static bool
826merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
827 unsigned HOST_WIDE_INT offset)
828{
829 int object_size_type = osi->object_size_type;
830 unsigned int varno = SSA_NAME_VERSION (dest);
831 unsigned HOST_WIDE_INT orig_bytes;
832
833 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
834 return false;
835 if (offset >= offset_limit)
836 {
837 object_sizes[object_size_type][varno] = unknown[object_size_type];
838 return false;
839 }
840
841 if (osi->pass == 0)
842 collect_object_sizes_for (osi, orig);
843
844 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
845 if (orig_bytes != unknown[object_size_type])
846 orig_bytes = (offset > orig_bytes)
07e96250 847 ? HOST_WIDE_INT_0U : orig_bytes - offset;
10a0d495
JJ
848
849 if ((object_size_type & 2) == 0)
850 {
851 if (object_sizes[object_size_type][varno] < orig_bytes)
852 {
853 object_sizes[object_size_type][varno] = orig_bytes;
854 osi->changed = true;
855 }
856 }
857 else
858 {
859 if (object_sizes[object_size_type][varno] > orig_bytes)
860 {
861 object_sizes[object_size_type][varno] = orig_bytes;
862 osi->changed = true;
863 }
864 }
865 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
866}
867
868
726a989a
RB
869/* Compute object_sizes for VAR, defined to the result of an assignment
870 with operator POINTER_PLUS_EXPR. Return true if the object size might
871 need reexamination later. */
10a0d495
JJ
872
873static bool
355fe088 874plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
10a0d495 875{
10a0d495
JJ
876 int object_size_type = osi->object_size_type;
877 unsigned int varno = SSA_NAME_VERSION (var);
878 unsigned HOST_WIDE_INT bytes;
726a989a
RB
879 tree op0, op1;
880
70f34814
RG
881 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
882 {
883 op0 = gimple_assign_rhs1 (stmt);
884 op1 = gimple_assign_rhs2 (stmt);
885 }
886 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
887 {
888 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
889 gcc_assert (TREE_CODE (rhs) == MEM_REF);
890 op0 = TREE_OPERAND (rhs, 0);
891 op1 = TREE_OPERAND (rhs, 1);
892 }
893 else
894 gcc_unreachable ();
10a0d495
JJ
895
896 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
897 return false;
898
10a0d495 899 /* Handle PTR + OFFSET here. */
5be014d5 900 if (TREE_CODE (op1) == INTEGER_CST
10a0d495
JJ
901 && (TREE_CODE (op0) == SSA_NAME
902 || TREE_CODE (op0) == ADDR_EXPR))
903 {
cc269bb6 904 if (! tree_fits_uhwi_p (op1))
10a0d495
JJ
905 bytes = unknown[object_size_type];
906 else if (TREE_CODE (op0) == SSA_NAME)
ae7e9ddd 907 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
10a0d495
JJ
908 else
909 {
ae7e9ddd 910 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
10a0d495 911
726a989a 912 /* op0 will be ADDR_EXPR here. */
05a64756 913 addr_object_size (osi, op0, object_size_type, &bytes);
ac5a28a6
JH
914 if (bytes == unknown[object_size_type])
915 ;
916 else if (off > offset_limit)
10a0d495
JJ
917 bytes = unknown[object_size_type];
918 else if (off > bytes)
919 bytes = 0;
920 else
921 bytes -= off;
922 }
923 }
924 else
925 bytes = unknown[object_size_type];
926
927 if ((object_size_type & 2) == 0)
928 {
929 if (object_sizes[object_size_type][varno] < bytes)
930 object_sizes[object_size_type][varno] = bytes;
931 }
932 else
933 {
934 if (object_sizes[object_size_type][varno] > bytes)
935 object_sizes[object_size_type][varno] = bytes;
936 }
937 return false;
938}
939
940
4e71066d 941/* Compute object_sizes for VAR, defined at STMT, which is
f255541f
RC
942 a COND_EXPR. Return true if the object size might need reexamination
943 later. */
944
945static bool
355fe088 946cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
f255541f
RC
947{
948 tree then_, else_;
949 int object_size_type = osi->object_size_type;
950 unsigned int varno = SSA_NAME_VERSION (var);
951 bool reexamine = false;
952
4e71066d 953 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
f255541f
RC
954
955 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
956 return false;
957
4e71066d
RG
958 then_ = gimple_assign_rhs2 (stmt);
959 else_ = gimple_assign_rhs3 (stmt);
f255541f
RC
960
961 if (TREE_CODE (then_) == SSA_NAME)
962 reexamine |= merge_object_sizes (osi, var, then_, 0);
963 else
964 expr_object_size (osi, var, then_);
965
0bbb0ca1
JJ
966 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
967 return reexamine;
968
f255541f
RC
969 if (TREE_CODE (else_) == SSA_NAME)
970 reexamine |= merge_object_sizes (osi, var, else_, 0);
971 else
972 expr_object_size (osi, var, else_);
973
974 return reexamine;
975}
976
10a0d495
JJ
977/* Compute object sizes for VAR.
978 For ADDR_EXPR an object size is the number of remaining bytes
619519c8 979 to the end of the object (where what is considered an object depends on
10a0d495 980 OSI->object_size_type).
726a989a 981 For allocation GIMPLE_CALL like malloc or calloc object size is the size
10a0d495 982 of the allocation.
5be014d5 983 For POINTER_PLUS_EXPR where second operand is a constant integer,
10a0d495
JJ
984 object size is object size of the first operand minus the constant.
985 If the constant is bigger than the number of remaining bytes until the
986 end of the object, object size is 0, but if it is instead a pointer
987 subtraction, object size is unknown[object_size_type].
988 To differentiate addition from subtraction, ADDR_EXPR returns
989 unknown[object_size_type] for all objects bigger than half of the address
990 space, and constants less than half of the address space are considered
991 addition, while bigger constants subtraction.
726a989a 992 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
10a0d495
JJ
993 object size is object size of that argument.
994 Otherwise, object size is the maximum of object sizes of variables
995 that it might be set to. */
996
997static void
998collect_object_sizes_for (struct object_size_info *osi, tree var)
999{
1000 int object_size_type = osi->object_size_type;
1001 unsigned int varno = SSA_NAME_VERSION (var);
355fe088 1002 gimple *stmt;
10a0d495
JJ
1003 bool reexamine;
1004
1005 if (bitmap_bit_p (computed[object_size_type], varno))
1006 return;
1007
1008 if (osi->pass == 0)
1009 {
fcaa4ca4 1010 if (bitmap_set_bit (osi->visited, varno))
10a0d495 1011 {
10a0d495
JJ
1012 object_sizes[object_size_type][varno]
1013 = (object_size_type & 2) ? -1 : 0;
1014 }
1015 else
1016 {
1017 /* Found a dependency loop. Mark the variable for later
1018 re-examination. */
1019 bitmap_set_bit (osi->reexamine, varno);
1020 if (dump_file && (dump_flags & TDF_DETAILS))
1021 {
1022 fprintf (dump_file, "Found a dependency loop at ");
1023 print_generic_expr (dump_file, var, dump_flags);
1024 fprintf (dump_file, "\n");
1025 }
1026 return;
1027 }
1028 }
1029
1030 if (dump_file && (dump_flags & TDF_DETAILS))
1031 {
1032 fprintf (dump_file, "Visiting use-def links for ");
1033 print_generic_expr (dump_file, var, dump_flags);
1034 fprintf (dump_file, "\n");
1035 }
1036
1037 stmt = SSA_NAME_DEF_STMT (var);
1038 reexamine = false;
1039
726a989a 1040 switch (gimple_code (stmt))
10a0d495 1041 {
726a989a 1042 case GIMPLE_ASSIGN:
10a0d495 1043 {
70f34814
RG
1044 tree rhs = gimple_assign_rhs1 (stmt);
1045 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1046 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1047 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
726a989a 1048 reexamine = plus_stmt_object_size (osi, var, stmt);
4e71066d
RG
1049 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1050 reexamine = cond_expr_object_size (osi, var, stmt);
726a989a
RB
1051 else if (gimple_assign_single_p (stmt)
1052 || gimple_assign_unary_nop_p (stmt))
1053 {
726a989a
RB
1054 if (TREE_CODE (rhs) == SSA_NAME
1055 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1056 reexamine = merge_object_sizes (osi, var, rhs, 0);
726a989a
RB
1057 else
1058 expr_object_size (osi, var, rhs);
1059 }
1060 else
1061 unknown_object_size (osi, var);
1062 break;
1063 }
f255541f 1064
726a989a
RB
1065 case GIMPLE_CALL:
1066 {
538dd0b7
DM
1067 gcall *call_stmt = as_a <gcall *> (stmt);
1068 tree arg = pass_through_call (call_stmt);
726a989a
RB
1069 if (arg)
1070 {
1071 if (TREE_CODE (arg) == SSA_NAME
1072 && POINTER_TYPE_P (TREE_TYPE (arg)))
1073 reexamine = merge_object_sizes (osi, var, arg, 0);
726a989a
RB
1074 else
1075 expr_object_size (osi, var, arg);
1076 }
1077 else
538dd0b7 1078 call_object_size (osi, var, call_stmt);
10a0d495
JJ
1079 break;
1080 }
1081
726a989a 1082 case GIMPLE_ASM:
10a0d495
JJ
1083 /* Pointers defined by __asm__ statements can point anywhere. */
1084 object_sizes[object_size_type][varno] = unknown[object_size_type];
1085 break;
1086
726a989a 1087 case GIMPLE_NOP:
70b5e7dc
RG
1088 if (SSA_NAME_VAR (var)
1089 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1090 expr_object_size (osi, var, SSA_NAME_VAR (var));
1091 else
1092 /* Uninitialized SSA names point nowhere. */
1093 object_sizes[object_size_type][varno] = unknown[object_size_type];
10a0d495
JJ
1094 break;
1095
726a989a 1096 case GIMPLE_PHI:
10a0d495 1097 {
726a989a 1098 unsigned i;
10a0d495 1099
726a989a 1100 for (i = 0; i < gimple_phi_num_args (stmt); i++)
10a0d495 1101 {
726a989a 1102 tree rhs = gimple_phi_arg (stmt, i)->def;
10a0d495
JJ
1103
1104 if (object_sizes[object_size_type][varno]
1105 == unknown[object_size_type])
1106 break;
1107
1108 if (TREE_CODE (rhs) == SSA_NAME)
1109 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1110 else if (osi->pass == 0)
1111 expr_object_size (osi, var, rhs);
1112 }
1113 break;
1114 }
726a989a 1115
10a0d495
JJ
1116 default:
1117 gcc_unreachable ();
1118 }
1119
1120 if (! reexamine
1121 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1122 {
1123 bitmap_set_bit (computed[object_size_type], varno);
1124 bitmap_clear_bit (osi->reexamine, varno);
1125 }
1126 else
1127 {
1128 bitmap_set_bit (osi->reexamine, varno);
1129 if (dump_file && (dump_flags & TDF_DETAILS))
1130 {
1131 fprintf (dump_file, "Need to reexamine ");
1132 print_generic_expr (dump_file, var, dump_flags);
1133 fprintf (dump_file, "\n");
1134 }
1135 }
1136}
1137
1138
1139/* Helper function for check_for_plus_in_loops. Called recursively
1140 to detect loops. */
1141
1142static void
1143check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1144 unsigned int depth)
1145{
355fe088 1146 gimple *stmt = SSA_NAME_DEF_STMT (var);
10a0d495
JJ
1147 unsigned int varno = SSA_NAME_VERSION (var);
1148
1149 if (osi->depths[varno])
1150 {
1151 if (osi->depths[varno] != depth)
1152 {
1153 unsigned int *sp;
1154
1155 /* Found a loop involving pointer addition. */
1156 for (sp = osi->tos; sp > osi->stack; )
1157 {
1158 --sp;
1159 bitmap_clear_bit (osi->reexamine, *sp);
1160 bitmap_set_bit (computed[osi->object_size_type], *sp);
1161 object_sizes[osi->object_size_type][*sp] = 0;
1162 if (*sp == varno)
1163 break;
1164 }
1165 }
1166 return;
1167 }
1168 else if (! bitmap_bit_p (osi->reexamine, varno))
1169 return;
1170
1171 osi->depths[varno] = depth;
1172 *osi->tos++ = varno;
1173
726a989a 1174 switch (gimple_code (stmt))
10a0d495 1175 {
10a0d495 1176
726a989a 1177 case GIMPLE_ASSIGN:
10a0d495 1178 {
726a989a
RB
1179 if ((gimple_assign_single_p (stmt)
1180 || gimple_assign_unary_nop_p (stmt))
1181 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1182 {
1183 tree rhs = gimple_assign_rhs1 (stmt);
1184
1185 check_for_plus_in_loops_1 (osi, rhs, depth);
1186 }
1187 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1188 {
1189 tree basevar = gimple_assign_rhs1 (stmt);
1190 tree cst = gimple_assign_rhs2 (stmt);
1191
1192 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1193
1194 check_for_plus_in_loops_1 (osi, basevar,
1195 depth + !integer_zerop (cst));
1196 }
1197 else
1198 gcc_unreachable ();
1199 break;
1200 }
10a0d495 1201
726a989a
RB
1202 case GIMPLE_CALL:
1203 {
538dd0b7
DM
1204 gcall *call_stmt = as_a <gcall *> (stmt);
1205 tree arg = pass_through_call (call_stmt);
726a989a
RB
1206 if (arg)
1207 {
1208 if (TREE_CODE (arg) == SSA_NAME)
1209 check_for_plus_in_loops_1 (osi, arg, depth);
1210 else
1211 gcc_unreachable ();
1212 }
1213 break;
10a0d495 1214 }
726a989a
RB
1215
1216 case GIMPLE_PHI:
10a0d495 1217 {
726a989a 1218 unsigned i;
10a0d495 1219
726a989a 1220 for (i = 0; i < gimple_phi_num_args (stmt); i++)
10a0d495 1221 {
726a989a 1222 tree rhs = gimple_phi_arg (stmt, i)->def;
10a0d495
JJ
1223
1224 if (TREE_CODE (rhs) == SSA_NAME)
1225 check_for_plus_in_loops_1 (osi, rhs, depth);
1226 }
1227 break;
1228 }
726a989a 1229
10a0d495
JJ
1230 default:
1231 gcc_unreachable ();
1232 }
1233
1234 osi->depths[varno] = 0;
1235 osi->tos--;
1236}
1237
1238
1239/* Check if some pointer we are computing object size of is being increased
1240 within a loop. If yes, assume all the SSA variables participating in
1241 that loop have minimum object sizes 0. */
1242
1243static void
1244check_for_plus_in_loops (struct object_size_info *osi, tree var)
1245{
355fe088 1246 gimple *stmt = SSA_NAME_DEF_STMT (var);
10a0d495 1247
726a989a
RB
1248 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1249 and looked for a POINTER_PLUS_EXPR in the pass-through
1250 argument, if any. In GIMPLE, however, such an expression
1251 is not a valid call operand. */
10a0d495 1252
726a989a
RB
1253 if (is_gimple_assign (stmt)
1254 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1255 {
1256 tree basevar = gimple_assign_rhs1 (stmt);
1257 tree cst = gimple_assign_rhs2 (stmt);
b8698a0f 1258
726a989a
RB
1259 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1260
1261 if (integer_zerop (cst))
1262 return;
1263
1264 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1265 *osi->tos++ = SSA_NAME_VERSION (basevar);
1266 check_for_plus_in_loops_1 (osi, var, 2);
1267 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1268 osi->tos--;
10a0d495
JJ
1269 }
1270}
1271
1272
1273/* Initialize data structures for the object size computation. */
1274
1275void
1276init_object_sizes (void)
1277{
1278 int object_size_type;
1279
db84d11e 1280 if (computed[0])
10a0d495
JJ
1281 return;
1282
1283 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1284 {
db84d11e 1285 object_sizes[object_size_type].safe_grow (num_ssa_names);
10a0d495
JJ
1286 computed[object_size_type] = BITMAP_ALLOC (NULL);
1287 }
1288
1289 init_offset_limit ();
1290}
1291
1292
1293/* Destroy data structures after the object size computation. */
1294
eb07c7cf 1295void
10a0d495
JJ
1296fini_object_sizes (void)
1297{
1298 int object_size_type;
1299
1300 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1301 {
db84d11e 1302 object_sizes[object_size_type].release ();
10a0d495 1303 BITMAP_FREE (computed[object_size_type]);
10a0d495
JJ
1304 }
1305}
1306
1307
1308/* Simple pass to optimize all __builtin_object_size () builtins. */
1309
be55bfe6
TS
1310namespace {
1311
1312const pass_data pass_data_object_sizes =
1313{
1314 GIMPLE_PASS, /* type */
1315 "objsz", /* name */
1316 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
1317 TV_NONE, /* tv_id */
1318 ( PROP_cfg | PROP_ssa ), /* properties_required */
1319 0, /* properties_provided */
1320 0, /* properties_destroyed */
1321 0, /* todo_flags_start */
3bea341f 1322 0, /* todo_flags_finish */
be55bfe6
TS
1323};
1324
1325class pass_object_sizes : public gimple_opt_pass
1326{
1327public:
1328 pass_object_sizes (gcc::context *ctxt)
813ccd83 1329 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
be55bfe6
TS
1330 {}
1331
1332 /* opt_pass methods: */
1333 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
813ccd83
TV
1334 void set_pass_param (unsigned int n, bool param)
1335 {
1336 gcc_assert (n == 0);
1337 insert_min_max_p = param;
1338 }
be55bfe6
TS
1339 virtual unsigned int execute (function *);
1340
813ccd83
TV
1341 private:
1342 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1343 bool insert_min_max_p;
be55bfe6
TS
1344}; // class pass_object_sizes
1345
1e080ab4
RB
1346/* Dummy valueize function. */
1347
1348static tree
1349do_valueize (tree t)
1350{
1351 return t;
1352}
1353
be55bfe6
TS
1354unsigned int
1355pass_object_sizes::execute (function *fun)
10a0d495
JJ
1356{
1357 basic_block bb;
be55bfe6 1358 FOR_EACH_BB_FN (bb, fun)
10a0d495 1359 {
726a989a
RB
1360 gimple_stmt_iterator i;
1361 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
10a0d495 1362 {
1eadb567 1363 tree result;
355fe088 1364 gimple *call = gsi_stmt (i);
1eadb567 1365 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
10a0d495
JJ
1366 continue;
1367
1368 init_object_sizes ();
672ff0b6 1369
813ccd83 1370 /* If insert_min_max_p, only attempt to fold
672ff0b6
JJ
1371 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1372 and rather than folding the builtin to the constant if any,
1373 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1374 call result and the computed constant. */
813ccd83 1375 if (insert_min_max_p)
672ff0b6
JJ
1376 {
1377 tree ost = gimple_call_arg (call, 1);
1378 if (tree_fits_uhwi_p (ost))
1379 {
1380 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1381 tree ptr = gimple_call_arg (call, 0);
1382 tree lhs = gimple_call_lhs (call);
1383 if ((object_size_type == 1 || object_size_type == 3)
1384 && (TREE_CODE (ptr) == ADDR_EXPR
1385 || TREE_CODE (ptr) == SSA_NAME)
1386 && lhs)
1387 {
1388 tree type = TREE_TYPE (lhs);
05a64756
MS
1389 unsigned HOST_WIDE_INT bytes;
1390 if (compute_builtin_object_size (ptr, object_size_type,
1391 &bytes)
672ff0b6
JJ
1392 && wi::fits_to_tree_p (bytes, type))
1393 {
1394 tree tem = make_ssa_name (type);
1395 gimple_call_set_lhs (call, tem);
1396 enum tree_code code
1397 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1398 tree cst = build_int_cstu (type, bytes);
355fe088
TS
1399 gimple *g
1400 = gimple_build_assign (lhs, code, tem, cst);
672ff0b6
JJ
1401 gsi_insert_after (&i, g, GSI_NEW_STMT);
1402 update_stmt (call);
1403 }
1404 }
1405 }
1406 continue;
1407 }
1408
1e080ab4
RB
1409 tree lhs = gimple_call_lhs (call);
1410 if (!lhs)
1411 continue;
1412
1413 result = gimple_fold_stmt_to_constant (call, do_valueize);
10a0d495
JJ
1414 if (!result)
1415 {
672ff0b6
JJ
1416 tree ost = gimple_call_arg (call, 1);
1417
1418 if (tree_fits_uhwi_p (ost))
10a0d495 1419 {
672ff0b6 1420 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
10a0d495 1421
672ff0b6
JJ
1422 if (object_size_type < 2)
1423 result = fold_convert (size_type_node,
1424 integer_minus_one_node);
1425 else if (object_size_type < 4)
1426 result = build_zero_cst (size_type_node);
10a0d495
JJ
1427 }
1428
1429 if (!result)
1430 continue;
1431 }
1432
1eadb567
RB
1433 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1434
10a0d495
JJ
1435 if (dump_file && (dump_flags & TDF_DETAILS))
1436 {
1437 fprintf (dump_file, "Simplified\n ");
726a989a 1438 print_gimple_stmt (dump_file, call, 0, dump_flags);
1eadb567 1439 fprintf (dump_file, " to ");
ef6cb4c7 1440 print_generic_expr (dump_file, result);
1eadb567 1441 fprintf (dump_file, "\n");
10a0d495
JJ
1442 }
1443
1eadb567 1444 /* Propagate into all uses and fold those stmts. */
9ecb3ecc
JJ
1445 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1446 replace_uses_by (lhs, result);
1447 else
1448 replace_call_with_value (&i, result);
10a0d495
JJ
1449 }
1450 }
1451
1452 fini_object_sizes ();
c2924966 1453 return 0;
10a0d495
JJ
1454}
1455
27a4cd48
DM
1456} // anon namespace
1457
1458gimple_opt_pass *
1459make_pass_object_sizes (gcc::context *ctxt)
1460{
1461 return new pass_object_sizes (ctxt);
1462}