]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-object-size.c
expander: Optimize store_expr from STRING_CST [PR95052]
[thirdparty/gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "stringpool.h"
36 #include "attribs.h"
37
38 struct object_size_info
39 {
40 int object_size_type;
41 unsigned char pass;
42 bool changed;
43 bitmap visited, reexamine;
44 unsigned int *depths;
45 unsigned int *stack, *tos;
46 };
47
48 static const unsigned HOST_WIDE_INT unknown[4] = {
49 HOST_WIDE_INT_M1U,
50 HOST_WIDE_INT_M1U,
51 0,
52 0
53 };
54
55 static tree compute_object_offset (const_tree, const_tree);
56 static bool addr_object_size (struct object_size_info *,
57 const_tree, int, unsigned HOST_WIDE_INT *,
58 tree * = NULL, tree * = NULL);
59 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
60 static tree pass_through_call (const gcall *);
61 static void collect_object_sizes_for (struct object_size_info *, tree);
62 static void expr_object_size (struct object_size_info *, tree, tree);
63 static bool merge_object_sizes (struct object_size_info *, tree, tree,
64 unsigned HOST_WIDE_INT);
65 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
66 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
67 static void init_offset_limit (void);
68 static void check_for_plus_in_loops (struct object_size_info *, tree);
69 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
70 unsigned int);
71
72 /* object_sizes[0] is upper bound for number of bytes till the end of
73 the object.
74 object_sizes[1] is upper bound for number of bytes till the end of
75 the subobject (innermost array or field with address taken).
76 object_sizes[2] is lower bound for number of bytes till the end of
77 the object and object_sizes[3] lower bound for subobject. */
78 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
79
80 /* Bitmaps what object sizes have been computed already. */
81 static bitmap computed[4];
82
83 /* Maximum value of offset we consider to be addition. */
84 static unsigned HOST_WIDE_INT offset_limit;
85
86
87 /* Initialize OFFSET_LIMIT variable. */
88 static void
89 init_offset_limit (void)
90 {
91 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
92 offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
93 else
94 offset_limit = -1;
95 offset_limit /= 2;
96 }
97
98
99 /* Compute offset of EXPR within VAR. Return error_mark_node
100 if unknown. */
101
102 static tree
103 compute_object_offset (const_tree expr, const_tree var)
104 {
105 enum tree_code code = PLUS_EXPR;
106 tree base, off, t;
107
108 if (expr == var)
109 return size_zero_node;
110
111 switch (TREE_CODE (expr))
112 {
113 case COMPONENT_REF:
114 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
115 if (base == error_mark_node)
116 return base;
117
118 t = TREE_OPERAND (expr, 1);
119 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
120 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
121 / BITS_PER_UNIT));
122 break;
123
124 case REALPART_EXPR:
125 CASE_CONVERT:
126 case VIEW_CONVERT_EXPR:
127 case NON_LVALUE_EXPR:
128 return compute_object_offset (TREE_OPERAND (expr, 0), var);
129
130 case IMAGPART_EXPR:
131 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
132 if (base == error_mark_node)
133 return base;
134
135 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
136 break;
137
138 case ARRAY_REF:
139 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
140 if (base == error_mark_node)
141 return base;
142
143 t = TREE_OPERAND (expr, 1);
144 tree low_bound, unit_size;
145 low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
146 unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
147 if (! integer_zerop (low_bound))
148 t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
149 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
150 {
151 code = MINUS_EXPR;
152 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
153 }
154 t = fold_convert (sizetype, t);
155 off = size_binop (MULT_EXPR, unit_size, t);
156 break;
157
158 case MEM_REF:
159 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
160 return wide_int_to_tree (sizetype, mem_ref_offset (expr));
161
162 default:
163 return error_mark_node;
164 }
165
166 return size_binop (code, base, off);
167 }
168
169 /* Returns the size of the object designated by DECL considering its
170 initializer if it either has one or if it would not affect its size,
171 otherwise the size of the object without the initializer when MIN
172 is true, else null. An object's initializer affects the object's
173 size if it's a struct type with a flexible array member. */
174
175 static tree
176 decl_init_size (tree decl, bool min)
177 {
178 tree size = DECL_SIZE_UNIT (decl);
179 tree type = TREE_TYPE (decl);
180 if (TREE_CODE (type) != RECORD_TYPE)
181 return size;
182
183 tree last = last_field (type);
184 if (!last)
185 return size;
186
187 tree last_type = TREE_TYPE (last);
188 if (TREE_CODE (last_type) != ARRAY_TYPE
189 || TYPE_SIZE (last_type))
190 return size;
191
192 /* Use TYPE_SIZE_UNIT; DECL_SIZE_UNIT sometimes reflects the size
193 of the initializer and sometimes doesn't. */
194 size = TYPE_SIZE_UNIT (type);
195 tree ref = build3 (COMPONENT_REF, type, decl, last, NULL_TREE);
196 tree compsize = component_ref_size (ref);
197 if (!compsize)
198 return min ? size : NULL_TREE;
199
200 /* The size includes tail padding and initializer elements. */
201 tree pos = byte_position (last);
202 size = fold_build2 (PLUS_EXPR, TREE_TYPE (size), pos, compsize);
203 return size;
204 }
205
206 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
207 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
208 If unknown, return unknown[object_size_type]. */
209
210 static bool
211 addr_object_size (struct object_size_info *osi, const_tree ptr,
212 int object_size_type, unsigned HOST_WIDE_INT *psize,
213 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
214 {
215 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
216
217 tree dummy_decl, dummy_off = size_zero_node;
218 if (!pdecl)
219 pdecl = &dummy_decl;
220 if (!poff)
221 poff = &dummy_off;
222
223 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
224
225 /* Set to unknown and overwrite just before returning if the size
226 could be determined. */
227 *psize = unknown[object_size_type];
228
229 pt_var = TREE_OPERAND (ptr, 0);
230 while (handled_component_p (pt_var))
231 pt_var = TREE_OPERAND (pt_var, 0);
232
233 if (!pt_var)
234 return false;
235
236 if (TREE_CODE (pt_var) == MEM_REF)
237 {
238 unsigned HOST_WIDE_INT sz;
239
240 if (!osi || (object_size_type & 1) != 0
241 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
242 {
243 compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
244 object_size_type & ~1, &sz, pdecl, poff);
245 }
246 else
247 {
248 tree var = TREE_OPERAND (pt_var, 0);
249 if (osi->pass == 0)
250 collect_object_sizes_for (osi, var);
251 if (bitmap_bit_p (computed[object_size_type],
252 SSA_NAME_VERSION (var)))
253 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
254 else
255 sz = unknown[object_size_type];
256 }
257 if (sz != unknown[object_size_type])
258 {
259 offset_int mem_offset;
260 if (mem_ref_offset (pt_var).is_constant (&mem_offset))
261 {
262 offset_int dsz = wi::sub (sz, mem_offset);
263 if (wi::neg_p (dsz))
264 sz = 0;
265 else if (wi::fits_uhwi_p (dsz))
266 sz = dsz.to_uhwi ();
267 else
268 sz = unknown[object_size_type];
269 }
270 else
271 sz = unknown[object_size_type];
272 }
273
274 if (sz != unknown[object_size_type] && sz < offset_limit)
275 pt_var_size = size_int (sz);
276 }
277 else if (DECL_P (pt_var))
278 {
279 *pdecl = pt_var;
280 pt_var_size = decl_init_size (pt_var, object_size_type & 2);
281 if (!pt_var_size)
282 return false;
283 }
284 else if (TREE_CODE (pt_var) == STRING_CST)
285 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
286 else
287 return false;
288
289 if (pt_var_size)
290 {
291 /* Validate the size determined above. */
292 if (!tree_fits_uhwi_p (pt_var_size)
293 || tree_to_uhwi (pt_var_size) >= offset_limit)
294 return false;
295 }
296
297 if (pt_var != TREE_OPERAND (ptr, 0))
298 {
299 tree var;
300
301 if (object_size_type & 1)
302 {
303 var = TREE_OPERAND (ptr, 0);
304
305 while (var != pt_var
306 && TREE_CODE (var) != BIT_FIELD_REF
307 && TREE_CODE (var) != COMPONENT_REF
308 && TREE_CODE (var) != ARRAY_REF
309 && TREE_CODE (var) != ARRAY_RANGE_REF
310 && TREE_CODE (var) != REALPART_EXPR
311 && TREE_CODE (var) != IMAGPART_EXPR)
312 var = TREE_OPERAND (var, 0);
313 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
314 var = TREE_OPERAND (var, 0);
315 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
316 || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
317 || (pt_var_size
318 && tree_int_cst_lt (pt_var_size,
319 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
320 var = pt_var;
321 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
322 {
323 tree v = var;
324 /* For &X->fld, compute object size only if fld isn't the last
325 field, as struct { int i; char c[1]; } is often used instead
326 of flexible array member. */
327 while (v && v != pt_var)
328 switch (TREE_CODE (v))
329 {
330 case ARRAY_REF:
331 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
332 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
333 {
334 tree domain
335 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
336 if (domain
337 && TYPE_MAX_VALUE (domain)
338 && TREE_CODE (TYPE_MAX_VALUE (domain))
339 == INTEGER_CST
340 && tree_int_cst_lt (TREE_OPERAND (v, 1),
341 TYPE_MAX_VALUE (domain)))
342 {
343 v = NULL_TREE;
344 break;
345 }
346 }
347 v = TREE_OPERAND (v, 0);
348 break;
349 case REALPART_EXPR:
350 case IMAGPART_EXPR:
351 v = NULL_TREE;
352 break;
353 case COMPONENT_REF:
354 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
355 {
356 v = NULL_TREE;
357 break;
358 }
359 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
360 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
361 != UNION_TYPE
362 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
363 != QUAL_UNION_TYPE)
364 break;
365 else
366 v = TREE_OPERAND (v, 0);
367 if (TREE_CODE (v) == COMPONENT_REF
368 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
369 == RECORD_TYPE)
370 {
371 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
372 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
373 if (TREE_CODE (fld_chain) == FIELD_DECL)
374 break;
375
376 if (fld_chain)
377 {
378 v = NULL_TREE;
379 break;
380 }
381 v = TREE_OPERAND (v, 0);
382 }
383 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
384 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
385 != UNION_TYPE
386 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
387 != QUAL_UNION_TYPE)
388 break;
389 else
390 v = TREE_OPERAND (v, 0);
391 if (v != pt_var)
392 v = NULL_TREE;
393 else
394 v = pt_var;
395 break;
396 default:
397 v = pt_var;
398 break;
399 }
400 if (v == pt_var)
401 var = pt_var;
402 }
403 }
404 else
405 var = pt_var;
406
407 if (var != pt_var)
408 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
409 else if (!pt_var_size)
410 return false;
411 else
412 var_size = pt_var_size;
413 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
414 if (bytes != error_mark_node)
415 {
416 if (TREE_CODE (bytes) == INTEGER_CST
417 && tree_int_cst_lt (var_size, bytes))
418 bytes = size_zero_node;
419 else
420 bytes = size_binop (MINUS_EXPR, var_size, bytes);
421 *poff = bytes;
422 }
423 if (var != pt_var
424 && pt_var_size
425 && TREE_CODE (pt_var) == MEM_REF
426 && bytes != error_mark_node)
427 {
428 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
429 if (bytes2 != error_mark_node)
430 {
431 if (TREE_CODE (bytes2) == INTEGER_CST
432 && tree_int_cst_lt (pt_var_size, bytes2))
433 bytes2 = size_zero_node;
434 else
435 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
436 *poff = size_binop (PLUS_EXPR, *poff, bytes2);
437 bytes = size_binop (MIN_EXPR, bytes, bytes2);
438 }
439 }
440 }
441 else if (!pt_var_size)
442 return false;
443 else
444 bytes = pt_var_size;
445
446 if (tree_fits_uhwi_p (bytes))
447 {
448 *psize = tree_to_uhwi (bytes);
449 return true;
450 }
451
452 return false;
453 }
454
455
456 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
457 Handles calls to functions declared with attribute alloc_size.
458 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
459 If unknown, return unknown[object_size_type]. */
460
461 static unsigned HOST_WIDE_INT
462 alloc_object_size (const gcall *call, int object_size_type)
463 {
464 gcc_assert (is_gimple_call (call));
465
466 tree calltype;
467 if (tree callfn = gimple_call_fndecl (call))
468 calltype = TREE_TYPE (callfn);
469 else
470 calltype = gimple_call_fntype (call);
471
472 if (!calltype)
473 return unknown[object_size_type];
474
475 /* Set to positions of alloc_size arguments. */
476 int arg1 = -1, arg2 = -1;
477 tree alloc_size = lookup_attribute ("alloc_size",
478 TYPE_ATTRIBUTES (calltype));
479 if (alloc_size && TREE_VALUE (alloc_size))
480 {
481 tree p = TREE_VALUE (alloc_size);
482
483 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
484 if (TREE_CHAIN (p))
485 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
486 }
487
488 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
489 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
490 || (arg2 >= 0
491 && (arg2 >= (int)gimple_call_num_args (call)
492 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
493 return unknown[object_size_type];
494
495 tree bytes = NULL_TREE;
496 if (arg2 >= 0)
497 bytes = size_binop (MULT_EXPR,
498 fold_convert (sizetype, gimple_call_arg (call, arg1)),
499 fold_convert (sizetype, gimple_call_arg (call, arg2)));
500 else if (arg1 >= 0)
501 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
502
503 if (bytes && tree_fits_uhwi_p (bytes))
504 return tree_to_uhwi (bytes);
505
506 return unknown[object_size_type];
507 }
508
509
510 /* If object size is propagated from one of function's arguments directly
511 to its return value, return that argument for GIMPLE_CALL statement CALL.
512 Otherwise return NULL. */
513
514 static tree
515 pass_through_call (const gcall *call)
516 {
517 unsigned rf = gimple_call_return_flags (call);
518 if (rf & ERF_RETURNS_ARG)
519 {
520 unsigned argnum = rf & ERF_RETURN_ARG_MASK;
521 if (argnum < gimple_call_num_args (call))
522 return gimple_call_arg (call, argnum);
523 }
524
525 /* __builtin_assume_aligned is intentionally not marked RET1. */
526 if (gimple_call_builtin_p (call, BUILT_IN_ASSUME_ALIGNED))
527 return gimple_call_arg (call, 0);
528
529 return NULL_TREE;
530 }
531
532
533 /* Compute __builtin_object_size value for PTR and set *PSIZE to
534 the resulting value. If the declared object is known and PDECL
535 is nonnull, sets *PDECL to the object's DECL. OBJECT_SIZE_TYPE
536 is the second argument to __builtin_object_size.
537 Returns true on success and false when the object size could not
538 be determined. */
539
540 bool
541 compute_builtin_object_size (tree ptr, int object_size_type,
542 unsigned HOST_WIDE_INT *psize,
543 tree *pdecl /* = NULL */, tree *poff /* = NULL */)
544 {
545 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
546
547 tree dummy_decl, dummy_off = size_zero_node;
548 if (!pdecl)
549 pdecl = &dummy_decl;
550 if (!poff)
551 poff = &dummy_off;
552
553 /* Set to unknown and overwrite just before returning if the size
554 could be determined. */
555 *psize = unknown[object_size_type];
556
557 if (! offset_limit)
558 init_offset_limit ();
559
560 if (TREE_CODE (ptr) == ADDR_EXPR)
561 return addr_object_size (NULL, ptr, object_size_type, psize, pdecl, poff);
562
563 if (TREE_CODE (ptr) != SSA_NAME
564 || !POINTER_TYPE_P (TREE_TYPE (ptr)))
565 return false;
566
567 if (computed[object_size_type] == NULL)
568 {
569 if (optimize || object_size_type & 1)
570 return false;
571
572 /* When not optimizing, rather than failing, make a small effort
573 to determine the object size without the full benefit of
574 the (costly) computation below. */
575 gimple *def = SSA_NAME_DEF_STMT (ptr);
576 if (gimple_code (def) == GIMPLE_ASSIGN)
577 {
578 tree_code code = gimple_assign_rhs_code (def);
579 if (code == POINTER_PLUS_EXPR)
580 {
581 tree offset = gimple_assign_rhs2 (def);
582 ptr = gimple_assign_rhs1 (def);
583
584 if (tree_fits_shwi_p (offset)
585 && compute_builtin_object_size (ptr, object_size_type,
586 psize, pdecl, poff))
587 {
588 /* Return zero when the offset is out of bounds. */
589 unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
590 *psize = off < *psize ? *psize - off : 0;
591 *poff = offset;
592 return true;
593 }
594 }
595 }
596 return false;
597 }
598
599 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
600 {
601 struct object_size_info osi;
602 bitmap_iterator bi;
603 unsigned int i;
604
605 if (num_ssa_names > object_sizes[object_size_type].length ())
606 object_sizes[object_size_type].safe_grow (num_ssa_names);
607 if (dump_file)
608 {
609 fprintf (dump_file, "Computing %s %sobject size for ",
610 (object_size_type & 2) ? "minimum" : "maximum",
611 (object_size_type & 1) ? "sub" : "");
612 print_generic_expr (dump_file, ptr, dump_flags);
613 fprintf (dump_file, ":\n");
614 }
615
616 osi.visited = BITMAP_ALLOC (NULL);
617 osi.reexamine = BITMAP_ALLOC (NULL);
618 osi.object_size_type = object_size_type;
619 osi.depths = NULL;
620 osi.stack = NULL;
621 osi.tos = NULL;
622
623 /* First pass: walk UD chains, compute object sizes that
624 can be computed. osi.reexamine bitmap at the end will
625 contain what variables were found in dependency cycles
626 and therefore need to be reexamined. */
627 osi.pass = 0;
628 osi.changed = false;
629 collect_object_sizes_for (&osi, ptr);
630
631 /* Second pass: keep recomputing object sizes of variables
632 that need reexamination, until no object sizes are
633 increased or all object sizes are computed. */
634 if (! bitmap_empty_p (osi.reexamine))
635 {
636 bitmap reexamine = BITMAP_ALLOC (NULL);
637
638 /* If looking for minimum instead of maximum object size,
639 detect cases where a pointer is increased in a loop.
640 Although even without this detection pass 2 would eventually
641 terminate, it could take a long time. If a pointer is
642 increasing this way, we need to assume 0 object size.
643 E.g. p = &buf[0]; while (cond) p = p + 4; */
644 if (object_size_type & 2)
645 {
646 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
647 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
648 osi.tos = osi.stack;
649 osi.pass = 1;
650 /* collect_object_sizes_for is changing
651 osi.reexamine bitmap, so iterate over a copy. */
652 bitmap_copy (reexamine, osi.reexamine);
653 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
654 if (bitmap_bit_p (osi.reexamine, i))
655 check_for_plus_in_loops (&osi, ssa_name (i));
656
657 free (osi.depths);
658 osi.depths = NULL;
659 free (osi.stack);
660 osi.stack = NULL;
661 osi.tos = NULL;
662 }
663
664 do
665 {
666 osi.pass = 2;
667 osi.changed = false;
668 /* collect_object_sizes_for is changing
669 osi.reexamine bitmap, so iterate over a copy. */
670 bitmap_copy (reexamine, osi.reexamine);
671 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
672 if (bitmap_bit_p (osi.reexamine, i))
673 {
674 collect_object_sizes_for (&osi, ssa_name (i));
675 if (dump_file && (dump_flags & TDF_DETAILS))
676 {
677 fprintf (dump_file, "Reexamining ");
678 print_generic_expr (dump_file, ssa_name (i),
679 dump_flags);
680 fprintf (dump_file, "\n");
681 }
682 }
683 }
684 while (osi.changed);
685
686 BITMAP_FREE (reexamine);
687 }
688 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
689 bitmap_set_bit (computed[object_size_type], i);
690
691 /* Debugging dumps. */
692 if (dump_file)
693 {
694 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
695 if (object_sizes[object_size_type][i]
696 != unknown[object_size_type])
697 {
698 print_generic_expr (dump_file, ssa_name (i),
699 dump_flags);
700 fprintf (dump_file,
701 ": %s %sobject size "
702 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
703 (object_size_type & 2) ? "minimum" : "maximum",
704 (object_size_type & 1) ? "sub" : "",
705 object_sizes[object_size_type][i]);
706 }
707 }
708
709 BITMAP_FREE (osi.reexamine);
710 BITMAP_FREE (osi.visited);
711 }
712
713 *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
714 return *psize != unknown[object_size_type];
715 }
716
717 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
718
719 static void
720 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
721 {
722 int object_size_type = osi->object_size_type;
723 unsigned int varno = SSA_NAME_VERSION (ptr);
724 unsigned HOST_WIDE_INT bytes;
725
726 gcc_assert (object_sizes[object_size_type][varno]
727 != unknown[object_size_type]);
728 gcc_assert (osi->pass == 0);
729
730 if (TREE_CODE (value) == WITH_SIZE_EXPR)
731 value = TREE_OPERAND (value, 0);
732
733 /* Pointer variables should have been handled by merge_object_sizes. */
734 gcc_assert (TREE_CODE (value) != SSA_NAME
735 || !POINTER_TYPE_P (TREE_TYPE (value)));
736
737 if (TREE_CODE (value) == ADDR_EXPR)
738 addr_object_size (osi, value, object_size_type, &bytes);
739 else
740 bytes = unknown[object_size_type];
741
742 if ((object_size_type & 2) == 0)
743 {
744 if (object_sizes[object_size_type][varno] < bytes)
745 object_sizes[object_size_type][varno] = bytes;
746 }
747 else
748 {
749 if (object_sizes[object_size_type][varno] > bytes)
750 object_sizes[object_size_type][varno] = bytes;
751 }
752 }
753
754
755 /* Compute object_sizes for PTR, defined to the result of a call. */
756
757 static void
758 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
759 {
760 int object_size_type = osi->object_size_type;
761 unsigned int varno = SSA_NAME_VERSION (ptr);
762 unsigned HOST_WIDE_INT bytes;
763
764 gcc_assert (is_gimple_call (call));
765
766 gcc_assert (object_sizes[object_size_type][varno]
767 != unknown[object_size_type]);
768 gcc_assert (osi->pass == 0);
769
770 bytes = alloc_object_size (call, object_size_type);
771
772 if ((object_size_type & 2) == 0)
773 {
774 if (object_sizes[object_size_type][varno] < bytes)
775 object_sizes[object_size_type][varno] = bytes;
776 }
777 else
778 {
779 if (object_sizes[object_size_type][varno] > bytes)
780 object_sizes[object_size_type][varno] = bytes;
781 }
782 }
783
784
785 /* Compute object_sizes for PTR, defined to an unknown value. */
786
787 static void
788 unknown_object_size (struct object_size_info *osi, tree ptr)
789 {
790 int object_size_type = osi->object_size_type;
791 unsigned int varno = SSA_NAME_VERSION (ptr);
792 unsigned HOST_WIDE_INT bytes;
793
794 gcc_assert (object_sizes[object_size_type][varno]
795 != unknown[object_size_type]);
796 gcc_assert (osi->pass == 0);
797
798 bytes = unknown[object_size_type];
799
800 if ((object_size_type & 2) == 0)
801 {
802 if (object_sizes[object_size_type][varno] < bytes)
803 object_sizes[object_size_type][varno] = bytes;
804 }
805 else
806 {
807 if (object_sizes[object_size_type][varno] > bytes)
808 object_sizes[object_size_type][varno] = bytes;
809 }
810 }
811
812
813 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
814 the object size might need reexamination later. */
815
816 static bool
817 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
818 unsigned HOST_WIDE_INT offset)
819 {
820 int object_size_type = osi->object_size_type;
821 unsigned int varno = SSA_NAME_VERSION (dest);
822 unsigned HOST_WIDE_INT orig_bytes;
823
824 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
825 return false;
826 if (offset >= offset_limit)
827 {
828 object_sizes[object_size_type][varno] = unknown[object_size_type];
829 return false;
830 }
831
832 if (osi->pass == 0)
833 collect_object_sizes_for (osi, orig);
834
835 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
836 if (orig_bytes != unknown[object_size_type])
837 orig_bytes = (offset > orig_bytes)
838 ? HOST_WIDE_INT_0U : orig_bytes - offset;
839
840 if ((object_size_type & 2) == 0)
841 {
842 if (object_sizes[object_size_type][varno] < orig_bytes)
843 {
844 object_sizes[object_size_type][varno] = orig_bytes;
845 osi->changed = true;
846 }
847 }
848 else
849 {
850 if (object_sizes[object_size_type][varno] > orig_bytes)
851 {
852 object_sizes[object_size_type][varno] = orig_bytes;
853 osi->changed = true;
854 }
855 }
856 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
857 }
858
859
860 /* Compute object_sizes for VAR, defined to the result of an assignment
861 with operator POINTER_PLUS_EXPR. Return true if the object size might
862 need reexamination later. */
863
864 static bool
865 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
866 {
867 int object_size_type = osi->object_size_type;
868 unsigned int varno = SSA_NAME_VERSION (var);
869 unsigned HOST_WIDE_INT bytes;
870 tree op0, op1;
871
872 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
873 {
874 op0 = gimple_assign_rhs1 (stmt);
875 op1 = gimple_assign_rhs2 (stmt);
876 }
877 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
878 {
879 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
880 gcc_assert (TREE_CODE (rhs) == MEM_REF);
881 op0 = TREE_OPERAND (rhs, 0);
882 op1 = TREE_OPERAND (rhs, 1);
883 }
884 else
885 gcc_unreachable ();
886
887 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
888 return false;
889
890 /* Handle PTR + OFFSET here. */
891 if (TREE_CODE (op1) == INTEGER_CST
892 && (TREE_CODE (op0) == SSA_NAME
893 || TREE_CODE (op0) == ADDR_EXPR))
894 {
895 if (! tree_fits_uhwi_p (op1))
896 bytes = unknown[object_size_type];
897 else if (TREE_CODE (op0) == SSA_NAME)
898 return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
899 else
900 {
901 unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
902
903 /* op0 will be ADDR_EXPR here. */
904 addr_object_size (osi, op0, object_size_type, &bytes);
905 if (bytes == unknown[object_size_type])
906 ;
907 else if (off > offset_limit)
908 bytes = unknown[object_size_type];
909 else if (off > bytes)
910 bytes = 0;
911 else
912 bytes -= off;
913 }
914 }
915 else
916 bytes = unknown[object_size_type];
917
918 if ((object_size_type & 2) == 0)
919 {
920 if (object_sizes[object_size_type][varno] < bytes)
921 object_sizes[object_size_type][varno] = bytes;
922 }
923 else
924 {
925 if (object_sizes[object_size_type][varno] > bytes)
926 object_sizes[object_size_type][varno] = bytes;
927 }
928 return false;
929 }
930
931
932 /* Compute object_sizes for VAR, defined at STMT, which is
933 a COND_EXPR. Return true if the object size might need reexamination
934 later. */
935
936 static bool
937 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
938 {
939 tree then_, else_;
940 int object_size_type = osi->object_size_type;
941 unsigned int varno = SSA_NAME_VERSION (var);
942 bool reexamine = false;
943
944 gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
945
946 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
947 return false;
948
949 then_ = gimple_assign_rhs2 (stmt);
950 else_ = gimple_assign_rhs3 (stmt);
951
952 if (TREE_CODE (then_) == SSA_NAME)
953 reexamine |= merge_object_sizes (osi, var, then_, 0);
954 else
955 expr_object_size (osi, var, then_);
956
957 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
958 return reexamine;
959
960 if (TREE_CODE (else_) == SSA_NAME)
961 reexamine |= merge_object_sizes (osi, var, else_, 0);
962 else
963 expr_object_size (osi, var, else_);
964
965 return reexamine;
966 }
967
968 /* Compute object sizes for VAR.
969 For ADDR_EXPR an object size is the number of remaining bytes
970 to the end of the object (where what is considered an object depends on
971 OSI->object_size_type).
972 For allocation GIMPLE_CALL like malloc or calloc object size is the size
973 of the allocation.
974 For POINTER_PLUS_EXPR where second operand is a constant integer,
975 object size is object size of the first operand minus the constant.
976 If the constant is bigger than the number of remaining bytes until the
977 end of the object, object size is 0, but if it is instead a pointer
978 subtraction, object size is unknown[object_size_type].
979 To differentiate addition from subtraction, ADDR_EXPR returns
980 unknown[object_size_type] for all objects bigger than half of the address
981 space, and constants less than half of the address space are considered
982 addition, while bigger constants subtraction.
983 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
984 object size is object size of that argument.
985 Otherwise, object size is the maximum of object sizes of variables
986 that it might be set to. */
987
988 static void
989 collect_object_sizes_for (struct object_size_info *osi, tree var)
990 {
991 int object_size_type = osi->object_size_type;
992 unsigned int varno = SSA_NAME_VERSION (var);
993 gimple *stmt;
994 bool reexamine;
995
996 if (bitmap_bit_p (computed[object_size_type], varno))
997 return;
998
999 if (osi->pass == 0)
1000 {
1001 if (bitmap_set_bit (osi->visited, varno))
1002 {
1003 object_sizes[object_size_type][varno]
1004 = (object_size_type & 2) ? -1 : 0;
1005 }
1006 else
1007 {
1008 /* Found a dependency loop. Mark the variable for later
1009 re-examination. */
1010 bitmap_set_bit (osi->reexamine, varno);
1011 if (dump_file && (dump_flags & TDF_DETAILS))
1012 {
1013 fprintf (dump_file, "Found a dependency loop at ");
1014 print_generic_expr (dump_file, var, dump_flags);
1015 fprintf (dump_file, "\n");
1016 }
1017 return;
1018 }
1019 }
1020
1021 if (dump_file && (dump_flags & TDF_DETAILS))
1022 {
1023 fprintf (dump_file, "Visiting use-def links for ");
1024 print_generic_expr (dump_file, var, dump_flags);
1025 fprintf (dump_file, "\n");
1026 }
1027
1028 stmt = SSA_NAME_DEF_STMT (var);
1029 reexamine = false;
1030
1031 switch (gimple_code (stmt))
1032 {
1033 case GIMPLE_ASSIGN:
1034 {
1035 tree rhs = gimple_assign_rhs1 (stmt);
1036 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1037 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1038 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
1039 reexamine = plus_stmt_object_size (osi, var, stmt);
1040 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
1041 reexamine = cond_expr_object_size (osi, var, stmt);
1042 else if (gimple_assign_single_p (stmt)
1043 || gimple_assign_unary_nop_p (stmt))
1044 {
1045 if (TREE_CODE (rhs) == SSA_NAME
1046 && POINTER_TYPE_P (TREE_TYPE (rhs)))
1047 reexamine = merge_object_sizes (osi, var, rhs, 0);
1048 else
1049 expr_object_size (osi, var, rhs);
1050 }
1051 else
1052 unknown_object_size (osi, var);
1053 break;
1054 }
1055
1056 case GIMPLE_CALL:
1057 {
1058 gcall *call_stmt = as_a <gcall *> (stmt);
1059 tree arg = pass_through_call (call_stmt);
1060 if (arg)
1061 {
1062 if (TREE_CODE (arg) == SSA_NAME
1063 && POINTER_TYPE_P (TREE_TYPE (arg)))
1064 reexamine = merge_object_sizes (osi, var, arg, 0);
1065 else
1066 expr_object_size (osi, var, arg);
1067 }
1068 else
1069 call_object_size (osi, var, call_stmt);
1070 break;
1071 }
1072
1073 case GIMPLE_ASM:
1074 /* Pointers defined by __asm__ statements can point anywhere. */
1075 object_sizes[object_size_type][varno] = unknown[object_size_type];
1076 break;
1077
1078 case GIMPLE_NOP:
1079 if (SSA_NAME_VAR (var)
1080 && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
1081 expr_object_size (osi, var, SSA_NAME_VAR (var));
1082 else
1083 /* Uninitialized SSA names point nowhere. */
1084 object_sizes[object_size_type][varno] = unknown[object_size_type];
1085 break;
1086
1087 case GIMPLE_PHI:
1088 {
1089 unsigned i;
1090
1091 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1092 {
1093 tree rhs = gimple_phi_arg (stmt, i)->def;
1094
1095 if (object_sizes[object_size_type][varno]
1096 == unknown[object_size_type])
1097 break;
1098
1099 if (TREE_CODE (rhs) == SSA_NAME)
1100 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1101 else if (osi->pass == 0)
1102 expr_object_size (osi, var, rhs);
1103 }
1104 break;
1105 }
1106
1107 default:
1108 gcc_unreachable ();
1109 }
1110
1111 if (! reexamine
1112 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1113 {
1114 bitmap_set_bit (computed[object_size_type], varno);
1115 bitmap_clear_bit (osi->reexamine, varno);
1116 }
1117 else
1118 {
1119 bitmap_set_bit (osi->reexamine, varno);
1120 if (dump_file && (dump_flags & TDF_DETAILS))
1121 {
1122 fprintf (dump_file, "Need to reexamine ");
1123 print_generic_expr (dump_file, var, dump_flags);
1124 fprintf (dump_file, "\n");
1125 }
1126 }
1127 }
1128
1129
1130 /* Helper function for check_for_plus_in_loops. Called recursively
1131 to detect loops. */
1132
1133 static void
1134 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1135 unsigned int depth)
1136 {
1137 gimple *stmt = SSA_NAME_DEF_STMT (var);
1138 unsigned int varno = SSA_NAME_VERSION (var);
1139
1140 if (osi->depths[varno])
1141 {
1142 if (osi->depths[varno] != depth)
1143 {
1144 unsigned int *sp;
1145
1146 /* Found a loop involving pointer addition. */
1147 for (sp = osi->tos; sp > osi->stack; )
1148 {
1149 --sp;
1150 bitmap_clear_bit (osi->reexamine, *sp);
1151 bitmap_set_bit (computed[osi->object_size_type], *sp);
1152 object_sizes[osi->object_size_type][*sp] = 0;
1153 if (*sp == varno)
1154 break;
1155 }
1156 }
1157 return;
1158 }
1159 else if (! bitmap_bit_p (osi->reexamine, varno))
1160 return;
1161
1162 osi->depths[varno] = depth;
1163 *osi->tos++ = varno;
1164
1165 switch (gimple_code (stmt))
1166 {
1167
1168 case GIMPLE_ASSIGN:
1169 {
1170 if ((gimple_assign_single_p (stmt)
1171 || gimple_assign_unary_nop_p (stmt))
1172 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1173 {
1174 tree rhs = gimple_assign_rhs1 (stmt);
1175
1176 check_for_plus_in_loops_1 (osi, rhs, depth);
1177 }
1178 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1179 {
1180 tree basevar = gimple_assign_rhs1 (stmt);
1181 tree cst = gimple_assign_rhs2 (stmt);
1182
1183 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1184
1185 check_for_plus_in_loops_1 (osi, basevar,
1186 depth + !integer_zerop (cst));
1187 }
1188 else
1189 gcc_unreachable ();
1190 break;
1191 }
1192
1193 case GIMPLE_CALL:
1194 {
1195 gcall *call_stmt = as_a <gcall *> (stmt);
1196 tree arg = pass_through_call (call_stmt);
1197 if (arg)
1198 {
1199 if (TREE_CODE (arg) == SSA_NAME)
1200 check_for_plus_in_loops_1 (osi, arg, depth);
1201 else
1202 gcc_unreachable ();
1203 }
1204 break;
1205 }
1206
1207 case GIMPLE_PHI:
1208 {
1209 unsigned i;
1210
1211 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1212 {
1213 tree rhs = gimple_phi_arg (stmt, i)->def;
1214
1215 if (TREE_CODE (rhs) == SSA_NAME)
1216 check_for_plus_in_loops_1 (osi, rhs, depth);
1217 }
1218 break;
1219 }
1220
1221 default:
1222 gcc_unreachable ();
1223 }
1224
1225 osi->depths[varno] = 0;
1226 osi->tos--;
1227 }
1228
1229
1230 /* Check if some pointer we are computing object size of is being increased
1231 within a loop. If yes, assume all the SSA variables participating in
1232 that loop have minimum object sizes 0. */
1233
1234 static void
1235 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1236 {
1237 gimple *stmt = SSA_NAME_DEF_STMT (var);
1238
1239 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1240 and looked for a POINTER_PLUS_EXPR in the pass-through
1241 argument, if any. In GIMPLE, however, such an expression
1242 is not a valid call operand. */
1243
1244 if (is_gimple_assign (stmt)
1245 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1246 {
1247 tree basevar = gimple_assign_rhs1 (stmt);
1248 tree cst = gimple_assign_rhs2 (stmt);
1249
1250 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1251
1252 if (integer_zerop (cst))
1253 return;
1254
1255 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1256 *osi->tos++ = SSA_NAME_VERSION (basevar);
1257 check_for_plus_in_loops_1 (osi, var, 2);
1258 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1259 osi->tos--;
1260 }
1261 }
1262
1263
1264 /* Initialize data structures for the object size computation. */
1265
1266 void
1267 init_object_sizes (void)
1268 {
1269 int object_size_type;
1270
1271 if (computed[0])
1272 return;
1273
1274 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1275 {
1276 object_sizes[object_size_type].safe_grow (num_ssa_names);
1277 computed[object_size_type] = BITMAP_ALLOC (NULL);
1278 }
1279
1280 init_offset_limit ();
1281 }
1282
1283
1284 /* Destroy data structures after the object size computation. */
1285
1286 void
1287 fini_object_sizes (void)
1288 {
1289 int object_size_type;
1290
1291 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1292 {
1293 object_sizes[object_size_type].release ();
1294 BITMAP_FREE (computed[object_size_type]);
1295 }
1296 }
1297
1298
1299 /* Simple pass to optimize all __builtin_object_size () builtins. */
1300
1301 namespace {
1302
1303 const pass_data pass_data_object_sizes =
1304 {
1305 GIMPLE_PASS, /* type */
1306 "objsz", /* name */
1307 OPTGROUP_NONE, /* optinfo_flags */
1308 TV_NONE, /* tv_id */
1309 ( PROP_cfg | PROP_ssa ), /* properties_required */
1310 0, /* properties_provided */
1311 0, /* properties_destroyed */
1312 0, /* todo_flags_start */
1313 0, /* todo_flags_finish */
1314 };
1315
1316 class pass_object_sizes : public gimple_opt_pass
1317 {
1318 public:
1319 pass_object_sizes (gcc::context *ctxt)
1320 : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1321 {}
1322
1323 /* opt_pass methods: */
1324 opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1325 void set_pass_param (unsigned int n, bool param)
1326 {
1327 gcc_assert (n == 0);
1328 insert_min_max_p = param;
1329 }
1330 virtual unsigned int execute (function *);
1331
1332 private:
1333 /* Determines whether the pass instance creates MIN/MAX_EXPRs. */
1334 bool insert_min_max_p;
1335 }; // class pass_object_sizes
1336
1337 /* Dummy valueize function. */
1338
1339 static tree
1340 do_valueize (tree t)
1341 {
1342 return t;
1343 }
1344
1345 unsigned int
1346 pass_object_sizes::execute (function *fun)
1347 {
1348 basic_block bb;
1349 FOR_EACH_BB_FN (bb, fun)
1350 {
1351 gimple_stmt_iterator i;
1352 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1353 {
1354 tree result;
1355 gimple *call = gsi_stmt (i);
1356 if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1357 continue;
1358
1359 init_object_sizes ();
1360
1361 /* If insert_min_max_p, only attempt to fold
1362 __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1363 and rather than folding the builtin to the constant if any,
1364 create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1365 call result and the computed constant. */
1366 if (insert_min_max_p)
1367 {
1368 tree ost = gimple_call_arg (call, 1);
1369 if (tree_fits_uhwi_p (ost))
1370 {
1371 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1372 tree ptr = gimple_call_arg (call, 0);
1373 tree lhs = gimple_call_lhs (call);
1374 if ((object_size_type == 1 || object_size_type == 3)
1375 && (TREE_CODE (ptr) == ADDR_EXPR
1376 || TREE_CODE (ptr) == SSA_NAME)
1377 && lhs)
1378 {
1379 tree type = TREE_TYPE (lhs);
1380 unsigned HOST_WIDE_INT bytes;
1381 if (compute_builtin_object_size (ptr, object_size_type,
1382 &bytes)
1383 && wi::fits_to_tree_p (bytes, type))
1384 {
1385 tree tem = make_ssa_name (type);
1386 gimple_call_set_lhs (call, tem);
1387 enum tree_code code
1388 = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1389 tree cst = build_int_cstu (type, bytes);
1390 gimple *g
1391 = gimple_build_assign (lhs, code, tem, cst);
1392 gsi_insert_after (&i, g, GSI_NEW_STMT);
1393 update_stmt (call);
1394 }
1395 }
1396 }
1397 continue;
1398 }
1399
1400 tree lhs = gimple_call_lhs (call);
1401 if (!lhs)
1402 continue;
1403
1404 result = gimple_fold_stmt_to_constant (call, do_valueize);
1405 if (!result)
1406 {
1407 tree ost = gimple_call_arg (call, 1);
1408
1409 if (tree_fits_uhwi_p (ost))
1410 {
1411 unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1412
1413 if (object_size_type < 2)
1414 result = fold_convert (size_type_node,
1415 integer_minus_one_node);
1416 else if (object_size_type < 4)
1417 result = build_zero_cst (size_type_node);
1418 }
1419
1420 if (!result)
1421 continue;
1422 }
1423
1424 gcc_assert (TREE_CODE (result) == INTEGER_CST);
1425
1426 if (dump_file && (dump_flags & TDF_DETAILS))
1427 {
1428 fprintf (dump_file, "Simplified\n ");
1429 print_gimple_stmt (dump_file, call, 0, dump_flags);
1430 fprintf (dump_file, " to ");
1431 print_generic_expr (dump_file, result);
1432 fprintf (dump_file, "\n");
1433 }
1434
1435 /* Propagate into all uses and fold those stmts. */
1436 if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
1437 replace_uses_by (lhs, result);
1438 else
1439 replace_call_with_value (&i, result);
1440 }
1441 }
1442
1443 fini_object_sizes ();
1444 return 0;
1445 }
1446
1447 } // anon namespace
1448
1449 gimple_opt_pass *
1450 make_pass_object_sizes (gcc::context *ctxt)
1451 {
1452 return new pass_object_sizes (ctxt);
1453 }