]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-object-size.c
expander: Optimize store_expr from STRING_CST [PR95052]
[thirdparty/gcc.git] / gcc / tree-object-size.c
CommitLineData
10a0d495 1/* __builtin_object_size (ptr, object_size_type) computation
8d9254fc 2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
10a0d495
JJ
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
10a0d495
JJ
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
10a0d495
JJ
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
10a0d495 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5 27#include "tree-pass.h"
c7131fb2 28#include "ssa.h"
957060b5 29#include "gimple-pretty-print.h"
40e23961 30#include "fold-const.h"
d8a2d370 31#include "tree-object-size.h"
2fb9a547 32#include "gimple-fold.h"
5be5c238 33#include "gimple-iterator.h"
1e080ab4 34#include "tree-cfg.h"
314e6352
ML
35#include "stringpool.h"
36#include "attribs.h"
10a0d495
JJ
37
38struct object_size_info
39{
40 int object_size_type;
34e82342 41 unsigned char pass;
10a0d495 42 bool changed;
34e82342 43 bitmap visited, reexamine;
10a0d495
JJ
44 unsigned int *depths;
45 unsigned int *stack, *tos;
46};
47
a43068cc
JJ
48static const unsigned HOST_WIDE_INT unknown[4] = {
49 HOST_WIDE_INT_M1U,
50 HOST_WIDE_INT_M1U,
51 0,
52 0
53};
10a0d495 54
ac545c64 55static tree compute_object_offset (const_tree, const_tree);
05a64756 56static bool addr_object_size (struct object_size_info *,
28a5fa54 57 const_tree, int, unsigned HOST_WIDE_INT *,
f7d86b5c 58 tree * = NULL, tree * = NULL);
538dd0b7
DM
59static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
60static tree pass_through_call (const gcall *);
10a0d495
JJ
61static void collect_object_sizes_for (struct object_size_info *, tree);
62static void expr_object_size (struct object_size_info *, tree, tree);
63static bool merge_object_sizes (struct object_size_info *, tree, tree,
64 unsigned HOST_WIDE_INT);
60dd79ca
TS
65static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
66static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
10a0d495
JJ
67static void init_offset_limit (void);
68static void check_for_plus_in_loops (struct object_size_info *, tree);
69static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
70 unsigned int);
71
72/* object_sizes[0] is upper bound for number of bytes till the end of
73 the object.
74 object_sizes[1] is upper bound for number of bytes till the end of
75 the subobject (innermost array or field with address taken).
76 object_sizes[2] is lower bound for number of bytes till the end of
77 the object and object_sizes[3] lower bound for subobject. */
db84d11e 78static vec<unsigned HOST_WIDE_INT> object_sizes[4];
10a0d495
JJ
79
80/* Bitmaps what object sizes have been computed already. */
81static bitmap computed[4];
82
83/* Maximum value of offset we consider to be addition. */
84static unsigned HOST_WIDE_INT offset_limit;
85
86
87/* Initialize OFFSET_LIMIT variable. */
88static void
89init_offset_limit (void)
90{
cc269bb6 91 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
ae7e9ddd 92 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
10a0d495
JJ
93 else
94 offset_limit = -1;
95 offset_limit /= 2;
96}
97
98
99/* Compute offset of EXPR within VAR. Return error_mark_node
100 if unknown. */
101
102static tree
ac545c64 103compute_object_offset (const_tree expr, const_tree var)
10a0d495
JJ
104{
105 enum tree_code code = PLUS_EXPR;
106 tree base, off, t;
107
108 if (expr == var)
109 return size_zero_node;
110
111 switch (TREE_CODE (expr))
112 {
113 case COMPONENT_REF:
114 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
115 if (base == error_mark_node)
116 return base;
117
118 t = TREE_OPERAND (expr, 1);
119 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
ae7e9ddd 120 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
10a0d495
JJ
121 / BITS_PER_UNIT));
122 break;
123
124 case REALPART_EXPR:
1043771b 125 CASE_CONVERT:
10a0d495
JJ
126 case VIEW_CONVERT_EXPR:
127 case NON_LVALUE_EXPR:
128 return compute_object_offset (TREE_OPERAND (expr, 0), var);
129
130 case IMAGPART_EXPR:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
134
135 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
136 break;
137
138 case ARRAY_REF:
139 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
140 if (base == error_mark_node)
141 return base;
142
143 t = TREE_OPERAND (expr, 1);
2d4102c5
JJ
144 tree low_bound, unit_size;
145 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
146 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
147 if (! integer_zerop (low_bound))
148 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
10a0d495
JJ
149 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
150 {
151 code = MINUS_EXPR;
152 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
153 }
b6f65e3c 154 t = fold_convert (sizetype, t);
2d4102c5 155 off = size_binop (MULT_EXPR, unit_size, t);
10a0d495
JJ
156 break;
157
70f34814
RG
158 case MEM_REF:
159 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
807e902e 160 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
70f34814 161
10a0d495
JJ
162 default:
163 return error_mark_node;
164 }
165
166 return size_binop (code, base, off);
167}
168
7a41fcde
MS
169/* Returns the size of the object designated by DECL considering its
170 initializer if it either has one or if it would not affect its size,
171 otherwise the size of the object without the initializer when MIN
172 is true, else null. An object's initializer affects the object's
173 size if it's a struct type with a flexible array member. */
174
175static tree
176decl_init_size (tree decl, bool min)
177{
178 tree size = DECL_SIZE_UNIT (decl);
179 tree type = TREE_TYPE (decl);
180 if (TREE_CODE (type) != RECORD_TYPE)
181 return size;
182
183 tree last = last_field (type);
184 if (!last)
185 return size;
186
187 tree last_type = TREE_TYPE (last);
188 if (TREE_CODE (last_type) != ARRAY_TYPE
189 || TYPE_SIZE (last_type))
190 return size;
191
192 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
193 of the initializer and sometimes doesn't. */
194 size = TYPE_SIZE_UNIT (type);
195 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
196 tree compsize = component_ref_size (ref);
197 if (!compsize)
198 return min ? size : NULL_TREE;
199
200 /* The size includes tail padding and initializer elements. */
201 tree pos = byte_position (last);
202 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
203 return size;
204}
10a0d495
JJ
205
206/* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
207 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
208 If unknown, return unknown[object_size_type]. */
209
05a64756 210static bool
eb9ed98a 211addr_object_size (struct object_size_info *osi, const_tree ptr,
28a5fa54 212 int object_size_type, unsigned HOST_WIDE_INT *psize,
f7d86b5c 213 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
10a0d495 214{
eb9ed98a 215 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
10a0d495 216
f7d86b5c 217 tree dummy_decl, dummy_off = size_zero_node;
28a5fa54 218 if (!pdecl)
f7d86b5c
MS
219 pdecl = &dummy_decl;
220 if (!poff)
221 poff = &dummy_off;
28a5fa54 222
10a0d495
JJ
223 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
224
05a64756
MS
225 /* Set to unknown and overwrite just before returning if the size
226 could be determined. */
227 *psize = unknown[object_size_type];
228
10a0d495 229 pt_var = TREE_OPERAND (ptr, 0);
d837d73d
RG
230 while (handled_component_p (pt_var))
231 pt_var = TREE_OPERAND (pt_var, 0);
10a0d495 232
7a41fcde
MS
233 if (!pt_var)
234 return false;
235
236 if (TREE_CODE (pt_var) == MEM_REF)
10a0d495 237 {
eb9ed98a 238 unsigned HOST_WIDE_INT sz;
10a0d495 239
d837d73d 240 if (!osi || (object_size_type & 1) != 0
ea17de23 241 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
70f34814 242 {
05a64756 243 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
f7d86b5c 244 object_size_type & ~1, &sz, pdecl, poff);
70f34814 245 }
eb9ed98a 246 else
10a0d495 247 {
eb9ed98a
JJ
248 tree var = TREE_OPERAND (pt_var, 0);
249 if (osi->pass == 0)
250 collect_object_sizes_for (osi, var);
251 if (bitmap_bit_p (computed[object_size_type],
252 SSA_NAME_VERSION (var)))
253 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
254 else
255 sz = unknown[object_size_type];
d837d73d
RG
256 }
257 if (sz != unknown[object_size_type])
258 {
aca52e6f
RS
259 offset_int mem_offset;
260 if (mem_ref_offset (pt_var).is_constant (&mem_offset))
261 {
262 offset_int dsz = wi::sub (sz, mem_offset);
263 if (wi::neg_p (dsz))
264 sz = 0;
265 else if (wi::fits_uhwi_p (dsz))
266 sz = dsz.to_uhwi ();
267 else
268 sz = unknown[object_size_type];
269 }
70f34814 270 else
d837d73d 271 sz = unknown[object_size_type];
eb9ed98a
JJ
272 }
273
274 if (sz != unknown[object_size_type] && sz < offset_limit)
275 pt_var_size = size_int (sz);
276 }
7a41fcde 277 else if (DECL_P (pt_var))
28a5fa54
MS
278 {
279 *pdecl = pt_var;
7a41fcde
MS
280 pt_var_size = decl_init_size (pt_var, object_size_type & 2);
281 if (!pt_var_size)
282 return false;
28a5fa54 283 }
7a41fcde 284 else if (TREE_CODE (pt_var) == STRING_CST)
eb9ed98a
JJ
285 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
286 else
05a64756 287 return false;
eb9ed98a 288
7a41fcde
MS
289 if (pt_var_size)
290 {
291 /* Validate the size determined above. */
292 if (!tree_fits_uhwi_p (pt_var_size)
293 || tree_to_uhwi (pt_var_size) >= offset_limit)
294 return false;
295 }
296
eb9ed98a
JJ
297 if (pt_var != TREE_OPERAND (ptr, 0))
298 {
299 tree var;
10a0d495 300
eb9ed98a
JJ
301 if (object_size_type & 1)
302 {
303 var = TREE_OPERAND (ptr, 0);
304
305 while (var != pt_var
306 && TREE_CODE (var) != BIT_FIELD_REF
307 && TREE_CODE (var) != COMPONENT_REF
308 && TREE_CODE (var) != ARRAY_REF
309 && TREE_CODE (var) != ARRAY_RANGE_REF
310 && TREE_CODE (var) != REALPART_EXPR
311 && TREE_CODE (var) != IMAGPART_EXPR)
312 var = TREE_OPERAND (var, 0);
313 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
38027156 314 var = TREE_OPERAND (var, 0);
eb9ed98a 315 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
cc269bb6 316 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
eb9ed98a
JJ
317 || (pt_var_size
318 && tree_int_cst_lt (pt_var_size,
319 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
320 var = pt_var;
70f34814 321 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
10a0d495 322 {
eb9ed98a
JJ
323 tree v = var;
324 /* For &X->fld, compute object size only if fld isn't the last
325 field, as struct { int i; char c[1]; } is often used instead
326 of flexible array member. */
327 while (v && v != pt_var)
328 switch (TREE_CODE (v))
329 {
330 case ARRAY_REF:
331 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
332 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
333 {
334 tree domain
335 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
336 if (domain
337 && TYPE_MAX_VALUE (domain)
338 && TREE_CODE (TYPE_MAX_VALUE (domain))
339 == INTEGER_CST
340 && tree_int_cst_lt (TREE_OPERAND (v, 1),
341 TYPE_MAX_VALUE (domain)))
342 {
343 v = NULL_TREE;
344 break;
345 }
346 }
347 v = TREE_OPERAND (v, 0);
348 break;
349 case REALPART_EXPR:
350 case IMAGPART_EXPR:
351 v = NULL_TREE;
352 break;
353 case COMPONENT_REF:
8593e0b6
JJ
354 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
355 {
356 v = NULL_TREE;
357 break;
358 }
38027156
JJ
359 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
360 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
361 != UNION_TYPE
362 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
363 != QUAL_UNION_TYPE)
364 break;
365 else
366 v = TREE_OPERAND (v, 0);
367 if (TREE_CODE (v) == COMPONENT_REF
368 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
369 == RECORD_TYPE)
eb9ed98a 370 {
910ad8de
NF
371 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
372 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
8593e0b6
JJ
373 if (TREE_CODE (fld_chain) == FIELD_DECL)
374 break;
375
376 if (fld_chain)
377 {
378 v = NULL_TREE;
eb9ed98a 379 break;
8593e0b6 380 }
38027156 381 v = TREE_OPERAND (v, 0);
eb9ed98a 382 }
38027156
JJ
383 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
384 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
385 != UNION_TYPE
386 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
387 != QUAL_UNION_TYPE)
8593e0b6
JJ
388 break;
389 else
390 v = TREE_OPERAND (v, 0);
38027156 391 if (v != pt_var)
8593e0b6
JJ
392 v = NULL_TREE;
393 else
394 v = pt_var;
eb9ed98a
JJ
395 break;
396 default:
397 v = pt_var;
398 break;
399 }
400 if (v == pt_var)
10a0d495
JJ
401 var = pt_var;
402 }
eb9ed98a
JJ
403 }
404 else
405 var = pt_var;
10a0d495 406
eb9ed98a
JJ
407 if (var != pt_var)
408 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
409 else if (!pt_var_size)
05a64756 410 return false;
eb9ed98a
JJ
411 else
412 var_size = pt_var_size;
413 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
414 if (bytes != error_mark_node)
415 {
416 if (TREE_CODE (bytes) == INTEGER_CST
417 && tree_int_cst_lt (var_size, bytes))
418 bytes = size_zero_node;
419 else
420 bytes = size_binop (MINUS_EXPR, var_size, bytes);
f7d86b5c 421 *poff = bytes;
eb9ed98a
JJ
422 }
423 if (var != pt_var
424 && pt_var_size
70f34814 425 && TREE_CODE (pt_var) == MEM_REF
eb9ed98a
JJ
426 && bytes != error_mark_node)
427 {
428 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
429 if (bytes2 != error_mark_node)
10a0d495 430 {
eb9ed98a
JJ
431 if (TREE_CODE (bytes2) == INTEGER_CST
432 && tree_int_cst_lt (pt_var_size, bytes2))
433 bytes2 = size_zero_node;
10a0d495 434 else
98a129b9 435 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
f7d86b5c 436 *poff = size_binop (PLUS_EXPR, *poff, bytes2);
eb9ed98a 437 bytes = size_binop (MIN_EXPR, bytes, bytes2);
10a0d495
JJ
438 }
439 }
10a0d495 440 }
eb9ed98a 441 else if (!pt_var_size)
05a64756 442 return false;
eb9ed98a
JJ
443 else
444 bytes = pt_var_size;
445
cc269bb6 446 if (tree_fits_uhwi_p (bytes))
05a64756
MS
447 {
448 *psize = tree_to_uhwi (bytes);
449 return true;
450 }
10a0d495 451
05a64756 452 return false;
10a0d495
JJ
453}
454
455
726a989a 456/* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
302db8ba
MS
457 Handles calls to functions declared with attribute alloc_size.
458 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
459 If unknown, return unknown[object_size_type]. */
10a0d495
JJ
460
461static unsigned HOST_WIDE_INT
538dd0b7 462alloc_object_size (const gcall *call, int object_size_type)
10a0d495 463{
726a989a 464 gcc_assert (is_gimple_call (call));
10a0d495 465
302db8ba
MS
466 tree calltype;
467 if (tree callfn = gimple_call_fndecl (call))
468 calltype = TREE_TYPE (callfn);
469 else
470 calltype = gimple_call_fntype (call);
471
472 if (!calltype)
51bc54a6
DM
473 return unknown[object_size_type];
474
302db8ba
MS
475 /* Set to positions of alloc_size arguments. */
476 int arg1 = -1, arg2 = -1;
477 tree alloc_size = lookup_attribute ("alloc_size",
478 TYPE_ATTRIBUTES (calltype));
51bc54a6
DM
479 if (alloc_size && TREE_VALUE (alloc_size))
480 {
481 tree p = TREE_VALUE (alloc_size);
482
483 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
484 if (TREE_CHAIN (p))
726a989a 485 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
51bc54a6 486 }
b8698a0f 487
726a989a
RB
488 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
489 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
b8698a0f 490 || (arg2 >= 0
726a989a
RB
491 && (arg2 >= (int)gimple_call_num_args (call)
492 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
b8698a0f 493 return unknown[object_size_type];
51bc54a6 494
302db8ba 495 tree bytes = NULL_TREE;
51bc54a6
DM
496 if (arg2 >= 0)
497 bytes = size_binop (MULT_EXPR,
726a989a
RB
498 fold_convert (sizetype, gimple_call_arg (call, arg1)),
499 fold_convert (sizetype, gimple_call_arg (call, arg2)));
51bc54a6 500 else if (arg1 >= 0)
726a989a 501 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
51bc54a6 502
cc269bb6 503 if (bytes && tree_fits_uhwi_p (bytes))
ae7e9ddd 504 return tree_to_uhwi (bytes);
10a0d495
JJ
505
506 return unknown[object_size_type];
507}
508
509
510/* If object size is propagated from one of function's arguments directly
726a989a 511 to its return value, return that argument for GIMPLE_CALL statement CALL.
10a0d495
JJ
512 Otherwise return NULL. */
513
514static tree
538dd0b7 515pass_through_call (const gcall *call)
10a0d495 516{
51feb980
JJ
517 unsigned rf = gimple_call_return_flags (call);
518 if (rf & ERF_RETURNS_ARG)
519 {
520 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
521 if (argnum < gimple_call_num_args (call))
522 return gimple_call_arg (call, argnum);
523 }
10a0d495 524
51feb980
JJ
525 /* __builtin_assume_aligned is intentionally not marked RET1. */
526 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
527 return gimple_call_arg (call, 0);
10a0d495
JJ
528
529 return NULL_TREE;
530}
531
532
05a64756 533/* Compute __builtin_object_size value for PTR and set *PSIZE to
28a5fa54
MS
534 the resulting value. If the declared object is known and PDECL
535 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
536 is the second argument to __builtin_object_size.
537 Returns true on success and false when the object size could not
538 be determined. */
10a0d495 539
05a64756
MS
540bool
541compute_builtin_object_size (tree ptr, int object_size_type,
28a5fa54 542 unsigned HOST_WIDE_INT *psize,
f7d86b5c 543 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
10a0d495
JJ
544{
545 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
546
f7d86b5c
MS
547 tree dummy_decl, dummy_off = size_zero_node;
548 if (!pdecl)
549 pdecl = &dummy_decl;
550 if (!poff)
551 poff = &dummy_off;
552
05a64756
MS
553 /* Set to unknown and overwrite just before returning if the size
554 could be determined. */
555 *psize = unknown[object_size_type];
556
10a0d495
JJ
557 if (! offset_limit)
558 init_offset_limit ();
559
560 if (TREE_CODE (ptr) == ADDR_EXPR)
f7d86b5c 561 return addr_object_size (NULL, ptr, object_size_type, psize, pdecl, poff);
05a64756
MS
562
563 if (TREE_CODE (ptr) != SSA_NAME
564 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
565 return false;
10a0d495 566
05a64756 567 if (computed[object_size_type] == NULL)
10a0d495 568 {
05a64756
MS
569 if (optimize || object_size_type & 1)
570 return false;
571
572 /* When not optimizing, rather than failing, make a small effort
573 to determine the object size without the full benefit of
574 the (costly) computation below. */
575 gimple *def = SSA_NAME_DEF_STMT (ptr);
576 if (gimple_code (def) == GIMPLE_ASSIGN)
10a0d495 577 {
05a64756
MS
578 tree_code code = gimple_assign_rhs_code (def);
579 if (code == POINTER_PLUS_EXPR)
10a0d495 580 {
05a64756
MS
581 tree offset = gimple_assign_rhs2 (def);
582 ptr = gimple_assign_rhs1 (def);
10a0d495 583
6182121c 584 if (tree_fits_shwi_p (offset)
28a5fa54 585 && compute_builtin_object_size (ptr, object_size_type,
f7d86b5c 586 psize, pdecl, poff))
10a0d495 587 {
05a64756
MS
588 /* Return zero when the offset is out of bounds. */
589 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
590 *psize = off < *psize ? *psize - off : 0;
f7d86b5c 591 *poff = offset;
05a64756 592 return true;
10a0d495 593 }
05a64756
MS
594 }
595 }
596 return false;
597 }
10a0d495 598
05a64756
MS
599 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
600 {
601 struct object_size_info osi;
602 bitmap_iterator bi;
603 unsigned int i;
604
605 if (num_ssa_names > object_sizes[object_size_type].length ())
606 object_sizes[object_size_type].safe_grow (num_ssa_names);
607 if (dump_file)
608 {
609 fprintf (dump_file, "Computing %s %sobject size for ",
610 (object_size_type & 2) ? "minimum" : "maximum",
611 (object_size_type & 1) ? "sub" : "");
612 print_generic_expr (dump_file, ptr, dump_flags);
613 fprintf (dump_file, ":\n");
614 }
10a0d495 615
05a64756
MS
616 osi.visited = BITMAP_ALLOC (NULL);
617 osi.reexamine = BITMAP_ALLOC (NULL);
618 osi.object_size_type = object_size_type;
619 osi.depths = NULL;
620 osi.stack = NULL;
621 osi.tos = NULL;
622
623 /* First pass: walk UD chains, compute object sizes that
624 can be computed. osi.reexamine bitmap at the end will
625 contain what variables were found in dependency cycles
626 and therefore need to be reexamined. */
627 osi.pass = 0;
628 osi.changed = false;
629 collect_object_sizes_for (&osi, ptr);
630
631 /* Second pass: keep recomputing object sizes of variables
632 that need reexamination, until no object sizes are
633 increased or all object sizes are computed. */
634 if (! bitmap_empty_p (osi.reexamine))
635 {
636 bitmap reexamine = BITMAP_ALLOC (NULL);
637
638 /* If looking for minimum instead of maximum object size,
639 detect cases where a pointer is increased in a loop.
640 Although even without this detection pass 2 would eventually
641 terminate, it could take a long time. If a pointer is
642 increasing this way, we need to assume 0 object size.
643 E.g. p = &buf[0]; while (cond) p = p + 4; */
644 if (object_size_type & 2)
645 {
646 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
647 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
648 osi.tos = osi.stack;
649 osi.pass = 1;
650 /* collect_object_sizes_for is changing
651 osi.reexamine bitmap, so iterate over a copy. */
652 bitmap_copy (reexamine, osi.reexamine);
653 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
654 if (bitmap_bit_p (osi.reexamine, i))
655 check_for_plus_in_loops (&osi, ssa_name (i));
656
657 free (osi.depths);
658 osi.depths = NULL;
659 free (osi.stack);
660 osi.stack = NULL;
661 osi.tos = NULL;
10a0d495 662 }
10a0d495 663
05a64756 664 do
10a0d495 665 {
05a64756
MS
666 osi.pass = 2;
667 osi.changed = false;
668 /* collect_object_sizes_for is changing
669 osi.reexamine bitmap, so iterate over a copy. */
670 bitmap_copy (reexamine, osi.reexamine);
671 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
672 if (bitmap_bit_p (osi.reexamine, i))
10a0d495 673 {
05a64756
MS
674 collect_object_sizes_for (&osi, ssa_name (i));
675 if (dump_file && (dump_flags & TDF_DETAILS))
676 {
677 fprintf (dump_file, "Reexamining ");
678 print_generic_expr (dump_file, ssa_name (i),
679 dump_flags);
680 fprintf (dump_file, "\n");
681 }
10a0d495
JJ
682 }
683 }
05a64756 684 while (osi.changed);
10a0d495 685
05a64756 686 BITMAP_FREE (reexamine);
10a0d495 687 }
05a64756
MS
688 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
689 bitmap_set_bit (computed[object_size_type], i);
10a0d495 690
05a64756
MS
691 /* Debugging dumps. */
692 if (dump_file)
693 {
694 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
695 if (object_sizes[object_size_type][i]
696 != unknown[object_size_type])
697 {
698 print_generic_expr (dump_file, ssa_name (i),
699 dump_flags);
700 fprintf (dump_file,
701 ": %s %sobject size "
702 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
703 (object_size_type & 2) ? "minimum" : "maximum",
704 (object_size_type & 1) ? "sub" : "",
705 object_sizes[object_size_type][i]);
706 }
707 }
708
709 BITMAP_FREE (osi.reexamine);
710 BITMAP_FREE (osi.visited);
10a0d495
JJ
711 }
712
05a64756
MS
713 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
714 return *psize != unknown[object_size_type];
10a0d495
JJ
715}
716
726a989a 717/* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
10a0d495
JJ
718
719static void
720expr_object_size (struct object_size_info *osi, tree ptr, tree value)
721{
722 int object_size_type = osi->object_size_type;
723 unsigned int varno = SSA_NAME_VERSION (ptr);
724 unsigned HOST_WIDE_INT bytes;
725
726 gcc_assert (object_sizes[object_size_type][varno]
727 != unknown[object_size_type]);
728 gcc_assert (osi->pass == 0);
729
730 if (TREE_CODE (value) == WITH_SIZE_EXPR)
731 value = TREE_OPERAND (value, 0);
732
733 /* Pointer variables should have been handled by merge_object_sizes. */
734 gcc_assert (TREE_CODE (value) != SSA_NAME
735 || !POINTER_TYPE_P (TREE_TYPE (value)));
736
737 if (TREE_CODE (value) == ADDR_EXPR)
05a64756 738 addr_object_size (osi, value, object_size_type, &bytes);
10a0d495
JJ
739 else
740 bytes = unknown[object_size_type];
741
742 if ((object_size_type & 2) == 0)
743 {
744 if (object_sizes[object_size_type][varno] < bytes)
745 object_sizes[object_size_type][varno] = bytes;
746 }
747 else
748 {
749 if (object_sizes[object_size_type][varno] > bytes)
750 object_sizes[object_size_type][varno] = bytes;
751 }
752}
753
754
726a989a
RB
755/* Compute object_sizes for PTR, defined to the result of a call. */
756
757static void
538dd0b7 758call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
726a989a
RB
759{
760 int object_size_type = osi->object_size_type;
761 unsigned int varno = SSA_NAME_VERSION (ptr);
762 unsigned HOST_WIDE_INT bytes;
763
764 gcc_assert (is_gimple_call (call));
765
766 gcc_assert (object_sizes[object_size_type][varno]
767 != unknown[object_size_type]);
768 gcc_assert (osi->pass == 0);
769
770 bytes = alloc_object_size (call, object_size_type);
771
772 if ((object_size_type & 2) == 0)
773 {
774 if (object_sizes[object_size_type][varno] < bytes)
775 object_sizes[object_size_type][varno] = bytes;
776 }
777 else
778 {
779 if (object_sizes[object_size_type][varno] > bytes)
780 object_sizes[object_size_type][varno] = bytes;
781 }
782}
783
784
785/* Compute object_sizes for PTR, defined to an unknown value. */
786
787static void
788unknown_object_size (struct object_size_info *osi, tree ptr)
789{
790 int object_size_type = osi->object_size_type;
791 unsigned int varno = SSA_NAME_VERSION (ptr);
792 unsigned HOST_WIDE_INT bytes;
793
794 gcc_assert (object_sizes[object_size_type][varno]
795 != unknown[object_size_type]);
796 gcc_assert (osi->pass == 0);
797
798 bytes = unknown[object_size_type];
799
800 if ((object_size_type & 2) == 0)
801 {
802 if (object_sizes[object_size_type][varno] < bytes)
803 object_sizes[object_size_type][varno] = bytes;
804 }
805 else
806 {
807 if (object_sizes[object_size_type][varno] > bytes)
808 object_sizes[object_size_type][varno] = bytes;
809 }
810}
811
812
10a0d495
JJ
813/* Merge object sizes of ORIG + OFFSET into DEST. Return true if
814 the object size might need reexamination later. */
815
816static bool
817merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
818 unsigned HOST_WIDE_INT offset)
819{
820 int object_size_type = osi->object_size_type;
821 unsigned int varno = SSA_NAME_VERSION (dest);
822 unsigned HOST_WIDE_INT orig_bytes;
823
824 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
825 return false;
826 if (offset >= offset_limit)
827 {
828 object_sizes[object_size_type][varno] = unknown[object_size_type];
829 return false;
830 }
831
832 if (osi->pass == 0)
833 collect_object_sizes_for (osi, orig);
834
835 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
836 if (orig_bytes != unknown[object_size_type])
837 orig_bytes = (offset > orig_bytes)
07e96250 838 ? HOST_WIDE_INT_0U : orig_bytes - offset;
10a0d495
JJ
839
840 if ((object_size_type & 2) == 0)
841 {
842 if (object_sizes[object_size_type][varno] < orig_bytes)
843 {
844 object_sizes[object_size_type][varno] = orig_bytes;
845 osi->changed = true;
846 }
847 }
848 else
849 {
850 if (object_sizes[object_size_type][varno] > orig_bytes)
851 {
852 object_sizes[object_size_type][varno] = orig_bytes;
853 osi->changed = true;
854 }
855 }
856 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
857}
858
859
726a989a
RB
860/* Compute object_sizes for VAR, defined to the result of an assignment
861 with operator POINTER_PLUS_EXPR. Return true if the object size might
862 need reexamination later. */
10a0d495
JJ
863
864static bool
355fe088 865plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
10a0d495 866{
10a0d495
JJ
867 int object_size_type = osi->object_size_type;
868 unsigned int varno = SSA_NAME_VERSION (var);
869 unsigned HOST_WIDE_INT bytes;
726a989a
RB
870 tree op0, op1;
871
70f34814
RG
872 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
873 {
874 op0 = gimple_assign_rhs1 (stmt);
875 op1 = gimple_assign_rhs2 (stmt);
876 }
877 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
878 {
879 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
880 gcc_assert (TREE_CODE (rhs) == MEM_REF);
881 op0 = TREE_OPERAND (rhs, 0);
882 op1 = TREE_OPERAND (rhs, 1);
883 }
884 else
885 gcc_unreachable ();
10a0d495
JJ
886
887 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
888 return false;
889
10a0d495 890 /* Handle PTR + OFFSET here. */
5be014d5 891 if (TREE_CODE (op1) == INTEGER_CST
10a0d495
JJ
892 && (TREE_CODE (op0) == SSA_NAME
893 || TREE_CODE (op0) == ADDR_EXPR))
894 {
cc269bb6 895 if (! tree_fits_uhwi_p (op1))
10a0d495
JJ
896 bytes = unknown[object_size_type];
897 else if (TREE_CODE (op0) == SSA_NAME)
ae7e9ddd 898 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
10a0d495
JJ
899 else
900 {
ae7e9ddd 901 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
10a0d495 902
726a989a 903 /* op0 will be ADDR_EXPR here. */
05a64756 904 addr_object_size (osi, op0, object_size_type, &bytes);
ac5a28a6
JH
905 if (bytes == unknown[object_size_type])
906 ;
907 else if (off > offset_limit)
10a0d495
JJ
908 bytes = unknown[object_size_type];
909 else if (off > bytes)
910 bytes = 0;
911 else
912 bytes -= off;
913 }
914 }
915 else
916 bytes = unknown[object_size_type];
917
918 if ((object_size_type & 2) == 0)
919 {
920 if (object_sizes[object_size_type][varno] < bytes)
921 object_sizes[object_size_type][varno] = bytes;
922 }
923 else
924 {
925 if (object_sizes[object_size_type][varno] > bytes)
926 object_sizes[object_size_type][varno] = bytes;
927 }
928 return false;
929}
930
931
4e71066d 932/* Compute object_sizes for VAR, defined at STMT, which is
f255541f
RC
933 a COND_EXPR. Return true if the object size might need reexamination
934 later. */
935
936static bool
355fe088 937cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
f255541f
RC
938{
939 tree then_, else_;
940 int object_size_type = osi->object_size_type;
941 unsigned int varno = SSA_NAME_VERSION (var);
942 bool reexamine = false;
943
4e71066d 944 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
f255541f
RC
945
946 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
947 return false;
948
4e71066d
RG
949 then_ = gimple_assign_rhs2 (stmt);
950 else_ = gimple_assign_rhs3 (stmt);
f255541f
RC
951
952 if (TREE_CODE (then_) == SSA_NAME)
953 reexamine |= merge_object_sizes (osi, var, then_, 0);
954 else
955 expr_object_size (osi, var, then_);
956
0bbb0ca1
JJ
957 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
958 return reexamine;
959
f255541f
RC
960 if (TREE_CODE (else_) == SSA_NAME)
961 reexamine |= merge_object_sizes (osi, var, else_, 0);
962 else
963 expr_object_size (osi, var, else_);
964
965 return reexamine;
966}
967
10a0d495
JJ
968/* Compute object sizes for VAR.
969 For ADDR_EXPR an object size is the number of remaining bytes
619519c8 970 to the end of the object (where what is considered an object depends on
10a0d495 971 OSI->object_size_type).
726a989a 972 For allocation GIMPLE_CALL like malloc or calloc object size is the size
10a0d495 973 of the allocation.
5be014d5 974 For POINTER_PLUS_EXPR where second operand is a constant integer,
10a0d495
JJ
975 object size is object size of the first operand minus the constant.
976 If the constant is bigger than the number of remaining bytes until the
977 end of the object, object size is 0, but if it is instead a pointer
978 subtraction, object size is unknown[object_size_type].
979 To differentiate addition from subtraction, ADDR_EXPR returns
980 unknown[object_size_type] for all objects bigger than half of the address
981 space, and constants less than half of the address space are considered
982 addition, while bigger constants subtraction.
726a989a 983 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
10a0d495
JJ
984 object size is object size of that argument.
985 Otherwise, object size is the maximum of object sizes of variables
986 that it might be set to. */
987
988static void
989collect_object_sizes_for (struct object_size_info *osi, tree var)
990{
991 int object_size_type = osi->object_size_type;
992 unsigned int varno = SSA_NAME_VERSION (var);
355fe088 993 gimple *stmt;
10a0d495
JJ
994 bool reexamine;
995
996 if (bitmap_bit_p (computed[object_size_type], varno))
997 return;
998
999 if (osi->pass == 0)
1000 {
fcaa4ca4 1001 if (bitmap_set_bit (osi->visited, varno))
10a0d495 1002 {
10a0d495
JJ
1003 object_sizes[object_size_type][varno]
1004 = (object_size_type & 2) ? -1 : 0;
1005 }
1006 else
1007 {
1008 /* Found a dependency loop. Mark the variable for later
1009 re-examination. */
1010 bitmap_set_bit (osi->reexamine, varno);
1011 if (dump_file && (dump_flags & TDF_DETAILS))
1012 {
1013 fprintf (dump_file, "Found a dependency loop at ");
1014 print_generic_expr (dump_file, var, dump_flags);
1015 fprintf (dump_file, "\n");
1016 }
1017 return;
1018 }
1019 }
1020
1021 if (dump_file && (dump_flags & TDF_DETAILS))
1022 {
1023 fprintf (dump_file, "Visiting use-def links for ");
1024 print_generic_expr (dump_file, var, dump_flags);
1025 fprintf (dump_file, "\n");
1026 }
1027
1028 stmt = SSA_NAME_DEF_STMT (var);
1029 reexamine = false;
1030
726a989a 1031 switch (gimple_code (stmt))
10a0d495 1032 {
726a989a 1033 case GIMPLE_ASSIGN:
10a0d495 1034 {
70f34814
RG
1035 tree rhs = gimple_assign_rhs1 (stmt);
1036 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1037 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1038 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
726a989a 1039 reexamine = plus_stmt_object_size (osi, var, stmt);
4e71066d
RG
1040 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1041 reexamine = cond_expr_object_size (osi, var, stmt);
726a989a
RB
1042 else if (gimple_assign_single_p (stmt)
1043 || gimple_assign_unary_nop_p (stmt))
1044 {
726a989a
RB
1045 if (TREE_CODE (rhs) == SSA_NAME
1046 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1047 reexamine = merge_object_sizes (osi, var, rhs, 0);
726a989a
RB
1048 else
1049 expr_object_size (osi, var, rhs);
1050 }
1051 else
1052 unknown_object_size (osi, var);
1053 break;
1054 }
f255541f 1055
726a989a
RB
1056 case GIMPLE_CALL:
1057 {
538dd0b7
DM
1058 gcall *call_stmt = as_a <gcall *> (stmt);
1059 tree arg = pass_through_call (call_stmt);
726a989a
RB
1060 if (arg)
1061 {
1062 if (TREE_CODE (arg) == SSA_NAME
1063 && POINTER_TYPE_P (TREE_TYPE (arg)))
1064 reexamine = merge_object_sizes (osi, var, arg, 0);
726a989a
RB
1065 else
1066 expr_object_size (osi, var, arg);
1067 }
1068 else
538dd0b7 1069 call_object_size (osi, var, call_stmt);
10a0d495
JJ
1070 break;
1071 }
1072
726a989a 1073 case GIMPLE_ASM:
10a0d495
JJ
1074 /* Pointers defined by __asm__ statements can point anywhere. */
1075 object_sizes[object_size_type][varno] = unknown[object_size_type];
1076 break;
1077
726a989a 1078 case GIMPLE_NOP:
70b5e7dc
RG
1079 if (SSA_NAME_VAR (var)
1080 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1081 expr_object_size (osi, var, SSA_NAME_VAR (var));
1082 else
1083 /* Uninitialized SSA names point nowhere. */
1084 object_sizes[object_size_type][varno] = unknown[object_size_type];
10a0d495
JJ
1085 break;
1086
726a989a 1087 case GIMPLE_PHI:
10a0d495 1088 {
726a989a 1089 unsigned i;
10a0d495 1090
726a989a 1091 for (i = 0; i < gimple_phi_num_args (stmt); i++)
10a0d495 1092 {
726a989a 1093 tree rhs = gimple_phi_arg (stmt, i)->def;
10a0d495
JJ
1094
1095 if (object_sizes[object_size_type][varno]
1096 == unknown[object_size_type])
1097 break;
1098
1099 if (TREE_CODE (rhs) == SSA_NAME)
1100 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1101 else if (osi->pass == 0)
1102 expr_object_size (osi, var, rhs);
1103 }
1104 break;
1105 }
726a989a 1106
10a0d495
JJ
1107 default:
1108 gcc_unreachable ();
1109 }
1110
1111 if (! reexamine
1112 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1113 {
1114 bitmap_set_bit (computed[object_size_type], varno);
1115 bitmap_clear_bit (osi->reexamine, varno);
1116 }
1117 else
1118 {
1119 bitmap_set_bit (osi->reexamine, varno);
1120 if (dump_file && (dump_flags & TDF_DETAILS))
1121 {
1122 fprintf (dump_file, "Need to reexamine ");
1123 print_generic_expr (dump_file, var, dump_flags);
1124 fprintf (dump_file, "\n");
1125 }
1126 }
1127}
1128
1129
1130/* Helper function for check_for_plus_in_loops. Called recursively
1131 to detect loops. */
1132
1133static void
1134check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1135 unsigned int depth)
1136{
355fe088 1137 gimple *stmt = SSA_NAME_DEF_STMT (var);
10a0d495
JJ
1138 unsigned int varno = SSA_NAME_VERSION (var);
1139
1140 if (osi->depths[varno])
1141 {
1142 if (osi->depths[varno] != depth)
1143 {
1144 unsigned int *sp;
1145
1146 /* Found a loop involving pointer addition. */
1147 for (sp = osi->tos; sp > osi->stack; )
1148 {
1149 --sp;
1150 bitmap_clear_bit (osi->reexamine, *sp);
1151 bitmap_set_bit (computed[osi->object_size_type], *sp);
1152 object_sizes[osi->object_size_type][*sp] = 0;
1153 if (*sp == varno)
1154 break;
1155 }
1156 }
1157 return;
1158 }
1159 else if (! bitmap_bit_p (osi->reexamine, varno))
1160 return;
1161
1162 osi->depths[varno] = depth;
1163 *osi->tos++ = varno;
1164
726a989a 1165 switch (gimple_code (stmt))
10a0d495 1166 {
10a0d495 1167
726a989a 1168 case GIMPLE_ASSIGN:
10a0d495 1169 {
726a989a
RB
1170 if ((gimple_assign_single_p (stmt)
1171 || gimple_assign_unary_nop_p (stmt))
1172 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1173 {
1174 tree rhs = gimple_assign_rhs1 (stmt);
1175
1176 check_for_plus_in_loops_1 (osi, rhs, depth);
1177 }
1178 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1179 {
1180 tree basevar = gimple_assign_rhs1 (stmt);
1181 tree cst = gimple_assign_rhs2 (stmt);
1182
1183 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1184
1185 check_for_plus_in_loops_1 (osi, basevar,
1186 depth + !integer_zerop (cst));
1187 }
1188 else
1189 gcc_unreachable ();
1190 break;
1191 }
10a0d495 1192
726a989a
RB
1193 case GIMPLE_CALL:
1194 {
538dd0b7
DM
1195 gcall *call_stmt = as_a <gcall *> (stmt);
1196 tree arg = pass_through_call (call_stmt);
726a989a
RB
1197 if (arg)
1198 {
1199 if (TREE_CODE (arg) == SSA_NAME)
1200 check_for_plus_in_loops_1 (osi, arg, depth);
1201 else
1202 gcc_unreachable ();
1203 }
1204 break;
10a0d495 1205 }
726a989a
RB
1206
1207 case GIMPLE_PHI:
10a0d495 1208 {
726a989a 1209 unsigned i;
10a0d495 1210
726a989a 1211 for (i = 0; i < gimple_phi_num_args (stmt); i++)
10a0d495 1212 {
726a989a 1213 tree rhs = gimple_phi_arg (stmt, i)->def;
10a0d495
JJ
1214
1215 if (TREE_CODE (rhs) == SSA_NAME)
1216 check_for_plus_in_loops_1 (osi, rhs, depth);
1217 }
1218 break;
1219 }
726a989a 1220
10a0d495
JJ
1221 default:
1222 gcc_unreachable ();
1223 }
1224
1225 osi->depths[varno] = 0;
1226 osi->tos--;
1227}
1228
1229
1230/* Check if some pointer we are computing object size of is being increased
1231 within a loop. If yes, assume all the SSA variables participating in
1232 that loop have minimum object sizes 0. */
1233
1234static void
1235check_for_plus_in_loops (struct object_size_info *osi, tree var)
1236{
355fe088 1237 gimple *stmt = SSA_NAME_DEF_STMT (var);
10a0d495 1238
726a989a
RB
1239 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1240 and looked for a POINTER_PLUS_EXPR in the pass-through
1241 argument, if any. In GIMPLE, however, such an expression
1242 is not a valid call operand. */
10a0d495 1243
726a989a
RB
1244 if (is_gimple_assign (stmt)
1245 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1246 {
1247 tree basevar = gimple_assign_rhs1 (stmt);
1248 tree cst = gimple_assign_rhs2 (stmt);
b8698a0f 1249
726a989a
RB
1250 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1251
1252 if (integer_zerop (cst))
1253 return;
1254
1255 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1256 *osi->tos++ = SSA_NAME_VERSION (basevar);
1257 check_for_plus_in_loops_1 (osi, var, 2);
1258 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1259 osi->tos--;
10a0d495
JJ
1260 }
1261}
1262
1263
1264/* Initialize data structures for the object size computation. */
1265
1266void
1267init_object_sizes (void)
1268{
1269 int object_size_type;
1270
db84d11e 1271 if (computed[0])
10a0d495
JJ
1272 return;
1273
1274 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1275 {
db84d11e 1276 object_sizes[object_size_type].safe_grow (num_ssa_names);
10a0d495
JJ
1277 computed[object_size_type] = BITMAP_ALLOC (NULL);
1278 }
1279
1280 init_offset_limit ();
1281}
1282
1283
1284/* Destroy data structures after the object size computation. */
1285
eb07c7cf 1286void
10a0d495
JJ
1287fini_object_sizes (void)
1288{
1289 int object_size_type;
1290
1291 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1292 {
db84d11e 1293 object_sizes[object_size_type].release ();
10a0d495 1294 BITMAP_FREE (computed[object_size_type]);
10a0d495
JJ
1295 }
1296}
1297
1298
1299/* Simple pass to optimize all __builtin_object_size () builtins. */
1300
be55bfe6
TS
1301namespace {
1302
1303const pass_data pass_data_object_sizes =
1304{
1305 GIMPLE_PASS, /* type */
1306 "objsz", /* name */
1307 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
1308 TV_NONE, /* tv_id */
1309 ( PROP_cfg | PROP_ssa ), /* properties_required */
1310 0, /* properties_provided */
1311 0, /* properties_destroyed */
1312 0, /* todo_flags_start */
3bea341f 1313 0, /* todo_flags_finish */
be55bfe6
TS
1314};
1315
1316class pass_object_sizes : public gimple_opt_pass
1317{
1318public:
1319 pass_object_sizes (gcc::context *ctxt)
813ccd83 1320 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
be55bfe6
TS
1321 {}
1322
1323 /* opt_pass methods: */
1324 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
813ccd83
TV
1325 void set_pass_param (unsigned int n, bool param)
1326 {
1327 gcc_assert (n == 0);
1328 insert_min_max_p = param;
1329 }
be55bfe6
TS
1330 virtual unsigned int execute (function *);
1331
813ccd83
TV
1332 private:
1333 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1334 bool insert_min_max_p;
be55bfe6
TS
1335}; // class pass_object_sizes
1336
1e080ab4
RB
1337/* Dummy valueize function. */
1338
1339static tree
1340do_valueize (tree t)
1341{
1342 return t;
1343}
1344
be55bfe6
TS
1345unsigned int
1346pass_object_sizes::execute (function *fun)
10a0d495
JJ
1347{
1348 basic_block bb;
be55bfe6 1349 FOR_EACH_BB_FN (bb, fun)
10a0d495 1350 {
726a989a
RB
1351 gimple_stmt_iterator i;
1352 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
10a0d495 1353 {
1eadb567 1354 tree result;
355fe088 1355 gimple *call = gsi_stmt (i);
1eadb567 1356 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
10a0d495
JJ
1357 continue;
1358
1359 init_object_sizes ();
672ff0b6 1360
813ccd83 1361 /* If insert_min_max_p, only attempt to fold
672ff0b6
JJ
1362 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1363 and rather than folding the builtin to the constant if any,
1364 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1365 call result and the computed constant. */
813ccd83 1366 if (insert_min_max_p)
672ff0b6
JJ
1367 {
1368 tree ost = gimple_call_arg (call, 1);
1369 if (tree_fits_uhwi_p (ost))
1370 {
1371 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1372 tree ptr = gimple_call_arg (call, 0);
1373 tree lhs = gimple_call_lhs (call);
1374 if ((object_size_type == 1 || object_size_type == 3)
1375 && (TREE_CODE (ptr) == ADDR_EXPR
1376 || TREE_CODE (ptr) == SSA_NAME)
1377 && lhs)
1378 {
1379 tree type = TREE_TYPE (lhs);
05a64756
MS
1380 unsigned HOST_WIDE_INT bytes;
1381 if (compute_builtin_object_size (ptr, object_size_type,
1382 &bytes)
672ff0b6
JJ
1383 && wi::fits_to_tree_p (bytes, type))
1384 {
1385 tree tem = make_ssa_name (type);
1386 gimple_call_set_lhs (call, tem);
1387 enum tree_code code
1388 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1389 tree cst = build_int_cstu (type, bytes);
355fe088
TS
1390 gimple *g
1391 = gimple_build_assign (lhs, code, tem, cst);
672ff0b6
JJ
1392 gsi_insert_after (&i, g, GSI_NEW_STMT);
1393 update_stmt (call);
1394 }
1395 }
1396 }
1397 continue;
1398 }
1399
1e080ab4
RB
1400 tree lhs = gimple_call_lhs (call);
1401 if (!lhs)
1402 continue;
1403
1404 result = gimple_fold_stmt_to_constant (call, do_valueize);
10a0d495
JJ
1405 if (!result)
1406 {
672ff0b6
JJ
1407 tree ost = gimple_call_arg (call, 1);
1408
1409 if (tree_fits_uhwi_p (ost))
10a0d495 1410 {
672ff0b6 1411 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
10a0d495 1412
672ff0b6
JJ
1413 if (object_size_type < 2)
1414 result = fold_convert (size_type_node,
1415 integer_minus_one_node);
1416 else if (object_size_type < 4)
1417 result = build_zero_cst (size_type_node);
10a0d495
JJ
1418 }
1419
1420 if (!result)
1421 continue;
1422 }
1423
1eadb567
RB
1424 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1425
10a0d495
JJ
1426 if (dump_file && (dump_flags & TDF_DETAILS))
1427 {
1428 fprintf (dump_file, "Simplified\n ");
726a989a 1429 print_gimple_stmt (dump_file, call, 0, dump_flags);
1eadb567 1430 fprintf (dump_file, " to ");
ef6cb4c7 1431 print_generic_expr (dump_file, result);
1eadb567 1432 fprintf (dump_file, "\n");
10a0d495
JJ
1433 }
1434
1eadb567 1435 /* Propagate into all uses and fold those stmts. */
9ecb3ecc
JJ
1436 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1437 replace_uses_by (lhs, result);
1438 else
1439 replace_call_with_value (&i, result);
10a0d495
JJ
1440 }
1441 }
1442
1443 fini_object_sizes ();
c2924966 1444 return 0;
10a0d495
JJ
1445}
1446
27a4cd48
DM
1447} // anon namespace
1448
1449gimple_opt_pass *
1450make_pass_object_sizes (gcc::context *ctxt)
1451{
1452 return new pass_object_sizes (ctxt);
1453}